blob: 90af8ba72b9ab0ebc74867075c75a9cf0e909584 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop15ae2502019-06-18 21:44:24 -040013OE_EXTRA_IMPORTS ?= ""
14
15OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050016OE_IMPORTS[type] = "list"
17
18def oe_import(d):
19 import sys
20
Brad Bishop6e60e8b2018-02-01 10:27:11 -050021 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050022 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
23
24 def inject(name, value):
25 """Make a python object accessible from the metadata"""
26 if hasattr(bb.utils, "_context"):
27 bb.utils._context[name] = value
28 else:
29 __builtins__[name] = value
30
31 import oe.data
32 for toimport in oe.data.typed_value("OE_IMPORTS", d):
33 imported = __import__(toimport)
34 inject(toimport.split(".", 1)[0], imported)
35
36 return ""
37
38# We need the oe module name space early (before INHERITs get added)
39OE_IMPORTED := "${@oe_import(d)}"
40
41def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050042 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050043 adjust_func = None
44 if adjust:
45 try:
46 adjust_func = globals()[adjust]
47 except KeyError:
48 pass
49 return oe.lsb.distro_identifier(adjust_func)
50
51die() {
52 bbfatal_log "$*"
53}
54
55oe_runmake_call() {
56 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
57 ${MAKE} ${EXTRA_OEMAKE} "$@"
58}
59
60oe_runmake() {
61 oe_runmake_call "$@" || die "oe_runmake failed"
62}
63
64
65def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050066 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
67 return ""
68 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050071
Brad Bishopd7bf8c12018-02-25 22:55:05 -050072BASEDEPENDS = ""
73BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
74BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075
76DEPENDS_prepend="${BASEDEPENDS} "
77
78FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
79# THISDIR only works properly with imediate expansion as it has to run
80# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050081THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050082
83def extra_path_elements(d):
84 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050085 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086 for e in elements:
87 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
88 return path
89
90PATH_prepend = "${@extra_path_elements(d)}"
91
92def get_lic_checksum_file_list(d):
93 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050094 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
95 tmpdir = d.getVar("TMPDIR")
96 s = d.getVar("S")
97 b = d.getVar("B")
98 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050099
100 urls = lic_files.split()
101 for url in urls:
102 # We only care about items that are absolute paths since
103 # any others should be covered by SRC_URI.
104 try:
Brad Bishop220d5532018-08-14 00:59:39 +0100105 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
106 if method != "file" or not path:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600107 raise bb.fetch.MalformedUrl(url)
108
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500109 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500110 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500111 continue
112 filelist.append(path + ":" + str(os.path.exists(path)))
113 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500114 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500115 return " ".join(filelist)
116
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500117def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
118 tools = d.getVar(toolsvar).split()
119 origbbenv = d.getVar("BB_ORIGENV", False)
120 path = origbbenv.getVar("PATH")
121 bb.utils.mkdirhier(dest)
122 notfound = []
123 for tool in tools:
124 desttool = os.path.join(dest, tool)
125 if not os.path.exists(desttool):
126 srctool = bb.utils.which(path, tool, executable=True)
Brad Bishop19323692019-04-05 15:28:33 -0400127 # gcc/g++ may link to ccache on some hosts, e.g.,
128 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
129 # would return /usr/local/bin/ccache/gcc, but what we need is
130 # /usr/bin/gcc, this code can check and fix that.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500131 if "ccache" in srctool:
132 srctool = bb.utils.which(path, tool, executable=True, direction=1)
133 if srctool:
134 os.symlink(srctool, desttool)
135 else:
136 notfound.append(tool)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800137 # Force "python" -> "python2"
138 desttool = os.path.join(dest, "python")
139 if not os.path.exists(desttool):
140 srctool = "python2"
141 os.symlink(srctool, desttool)
142
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500143 if notfound and fatal:
144 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
145
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500146addtask fetch
147do_fetch[dirs] = "${DL_DIR}"
148do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
149do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
150do_fetch[vardeps] += "SRCREV"
151python base_do_fetch() {
152
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500153 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 if len(src_uri) == 0:
155 return
156
157 try:
158 fetcher = bb.fetch2.Fetch(src_uri, d)
159 fetcher.download()
160 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600161 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500162}
163
164addtask unpack after do_fetch
165do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600166
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800167do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400168
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500170 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171 if len(src_uri) == 0:
172 return
173
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500174 try:
175 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500176 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600178 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179}
180
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500182 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500183 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500184 base_get_metadata_git_branch(i, None).strip(), \
185 base_get_metadata_git_revision(i, None)) \
186 for i in layers]
187 i = len(layers_branch_rev)-1
188 p1 = layers_branch_rev[i].find("=")
189 s1 = layers_branch_rev[i][p1:]
190 while i > 0:
191 p2 = layers_branch_rev[i-1].find("=")
192 s2= layers_branch_rev[i-1][p2:]
193 if s1 == s2:
194 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
195 i -= 1
196 else:
197 i -= 1
198 p1 = layers_branch_rev[i].find("=")
199 s1= layers_branch_rev[i][p1:]
200 return layers_branch_rev
201
202
203BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
204BUILDCFG_FUNCS[type] = "list"
205
206def buildcfg_vars(d):
207 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
208 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500209 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500211 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212
213def buildcfg_neededvars(d):
214 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
215 pesteruser = []
216 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500217 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218 if not val or val == 'INVALID':
219 pesteruser.append(v)
220
221 if pesteruser:
222 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
223
224addhandler base_eventhandler
Brad Bishop19323692019-04-05 15:28:33 -0400225base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226python base_eventhandler() {
227 import bb.runqueue
228
229 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400230 if not d.getVar("NATIVELSBSTRING", False):
231 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
232 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop19323692019-04-05 15:28:33 -0400233
234 # There might be no bb.event.ConfigParsed event if bitbake server is
235 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
236 # exists.
237 if isinstance(e, bb.event.ConfigParsed) or \
238 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500239 # Works with the line in layer.conf which changes PATH to point here
240 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
241 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500242
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500243 if isinstance(e, bb.event.MultiConfigParsed):
244 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
245 # own contexts so the variables get expanded correctly for that arch, then inject back into
246 # the main data store.
247 deps = []
248 for config in e.mcdata:
249 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
250 deps = " ".join(deps)
251 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
252
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500253 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400254 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500255 statuslines = []
256 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
257 g = globals()
258 if func not in g:
259 bb.warn("Build configuration function '%s' does not exist" % func)
260 else:
261 flines = g[func](localdata)
262 if flines:
263 statuslines.extend(flines)
264
Brad Bishop316dfdd2018-06-25 12:45:53 -0400265 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500266 if statusheader:
267 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500268
269 # This code is to silence warnings where the SDK variables overwrite the
270 # target ones and we'd see dulpicate key names overwriting each other
271 # for various PREFERRED_PROVIDERS
272 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400273 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
274 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400275 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
276 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
277 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500279 if isinstance(e, bb.event.RecipeParsed):
280 #
281 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
282 # skip parsing for all the other providers which will mean they get uninstalled from the
283 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
284 # particular.
285 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500286 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500287 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
288 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500289 provs = (d.getVar("PROVIDES") or "").split()
290 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500291 for p in provs:
292 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500293 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500294 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400295 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296}
297
298CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
299CLEANBROKEN = "0"
300
301addtask configure after do_patch
302do_configure[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500303base_do_configure() {
304 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
305 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
306 cd ${B}
307 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
308 oe_runmake clean
309 fi
Brad Bishopc4ea0752018-11-15 14:30:15 -0800310 # -ignore_readdir_race does not work correctly with -delete;
311 # use xargs to avoid spurious build failures
312 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 fi
314 fi
315 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500316 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
318 fi
319}
320
321addtask compile after do_configure
322do_compile[dirs] = "${B}"
323base_do_compile() {
324 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
325 oe_runmake || die "make failed"
326 else
327 bbnote "nothing to compile"
328 fi
329}
330
331addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600332do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500333# Remove and re-create ${D} so that is it guaranteed to be empty
334do_install[cleandirs] = "${D}"
335
336base_do_install() {
337 :
338}
339
340base_do_package() {
341 :
342}
343
344addtask build after do_populate_sysroot
345do_build[noexec] = "1"
346do_build[recrdeptask] += "do_deploy"
347do_build () {
348 :
349}
350
351def set_packagetriplet(d):
352 archs = []
353 tos = []
354 tvs = []
355
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500356 archs.append(d.getVar("PACKAGE_ARCHS").split())
357 tos.append(d.getVar("TARGET_OS"))
358 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500359
360 def settriplet(d, varname, archs, tos, tvs):
361 triplets = []
362 for i in range(len(archs)):
363 for arch in archs[i]:
364 triplets.append(arch + tvs[i] + "-" + tos[i])
365 triplets.reverse()
366 d.setVar(varname, " ".join(triplets))
367
368 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
369
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500370 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371 for item in variants.split():
372 localdata = bb.data.createCopy(d)
373 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
374 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500375
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500376 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
377 tos.append(localdata.getVar("TARGET_OS"))
378 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379
380 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
381
382python () {
383 import string, re
384
Brad Bishop316dfdd2018-06-25 12:45:53 -0400385 # Handle backfilling
386 oe.utils.features_backfill("DISTRO_FEATURES", d)
387 oe.utils.features_backfill("MACHINE_FEATURES", d)
388
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500389 # Handle PACKAGECONFIG
390 #
391 # These take the form:
392 #
393 # PACKAGECONFIG ??= "<default options>"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500394 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
396 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500397 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
398 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500400 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401
402 def expandFilter(appends, extension, prefix):
403 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
404 newappends = []
405 for a in appends:
406 if a.endswith("-native") or ("-cross-" in a):
407 newappends.append(a)
408 elif a.startswith("virtual/"):
409 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500410 if subs.startswith(prefix):
411 newappends.append(a + extension)
412 else:
413 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414 else:
415 if a.startswith(prefix):
416 newappends.append(a + extension)
417 else:
418 newappends.append(prefix + a + extension)
419 return newappends
420
421 def appendVar(varname, appends):
422 if not appends:
423 return
424 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500425 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500426 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500427 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500428 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500429 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 appends = expandFilter(appends, "", mlprefix)
431 varname = d.expand(varname)
432 d.appendVar(varname, " " + " ".join(appends))
433
434 extradeps = []
435 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500436 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437 extraconf = []
438 for flag, flagval in sorted(pkgconfigflags.items()):
439 items = flagval.split(",")
440 num = len(items)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500441 if num > 5:
442 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500443 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500444
445 if flag in pkgconfig:
446 if num >= 3 and items[2]:
447 extradeps.append(items[2])
448 if num >= 4 and items[3]:
449 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500450 if num >= 5 and items[4]:
451 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500452 if num >= 1 and items[0]:
453 extraconf.append(items[0])
454 elif num >= 2 and items[1]:
455 extraconf.append(items[1])
456 appendVar('DEPENDS', extradeps)
457 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500458 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500459 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
460
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500461 pn = d.getVar('PN')
462 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400463 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
465
466 if bb.data.inherits_class('license', d):
467 check_license_format(d)
Brad Bishop19323692019-04-05 15:28:33 -0400468 unmatched_license_flags = check_license_flags(d)
469 if unmatched_license_flags:
470 if len(unmatched_license_flags) == 1:
471 message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
472 else:
473 message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
474 ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
475 bb.debug(1, "Skipping %s %s" % (pn, message))
476 raise bb.parse.SkipRecipe(message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500477
478 # If we're building a target package we need to use fakeroot (pseudo)
479 # in order to capture permissions, owners, groups and special files
480 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
481 d.setVarFlag('do_unpack', 'umask', '022')
482 d.setVarFlag('do_configure', 'umask', '022')
483 d.setVarFlag('do_compile', 'umask', '022')
484 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500485 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 d.setVarFlag('do_install', 'umask', '022')
487 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500488 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500490 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500491 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500494
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500495 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500496 if need_machine:
497 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500498 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500499 for m in compat_machines:
500 if re.match(need_machine, m):
501 break
502 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400503 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500504
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500507 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 if need_host:
509 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500510 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500511 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400512 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500513
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500514 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515
516 check_license = False if pn.startswith("nativesdk-") else True
517 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600518 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
520 if pn.endswith(d.expand(t)):
521 check_license = False
522 if pn.startswith("gcc-source-"):
523 check_license = False
524
525 if check_license and bad_licenses:
526 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
527
528 whitelist = []
529 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 for lic in bad_licenses:
531 spdx_license = return_spdx(d, lic)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800532 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
533 if spdx_license:
534 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
535 '''
536 We need to track what we are whitelisting and why. If pn is
537 incompatible we need to be able to note that the image that
538 is created may infact contain incompatible licenses despite
539 INCOMPATIBLE_LICENSE being set.
540 '''
541 incompatwl.extend((d.getVar("WHITELIST_" + lic) or "").split())
542 if spdx_license:
543 incompatwl.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544
545 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500546 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547 skipped_pkgs = []
548 unskipped_pkgs = []
549 for pkg in pkgs:
550 if incompatible_license(d, bad_licenses, pkg):
551 skipped_pkgs.append(pkg)
552 else:
553 unskipped_pkgs.append(pkg)
554 all_skipped = skipped_pkgs and not unskipped_pkgs
555 if unskipped_pkgs:
556 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500557 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500558 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500559 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500560 for pkg in unskipped_pkgs:
561 bb.debug(1, "INCLUDING the package " + pkg)
562 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500563 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400564 raise bb.parse.SkipRecipe("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500565 elif pn in whitelist:
566 if pn in incompatwl:
567 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500568
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600569 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
570 # superset of all per-package licenses. We do not do advanced (pattern)
571 # matching of license expressions - just check that all license strings
572 # in LICENSE_<pkg> are found in LICENSE.
573 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500574 for pkg in d.getVar('PACKAGES').split():
575 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600576 if pkg_license:
577 unlisted = oe.license.list_licenses(pkg_license) - license_set
578 if unlisted:
579 bb.warn("LICENSE_%s includes licenses (%s) that are not "
580 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
581
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500582 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500583 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500584 for uri in srcuri.split():
585 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500586
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500587 # HTTP/FTP use the wget fetcher
588 if scheme in ("http", "https", "ftp"):
589 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500591 # Svn packages should DEPEND on subversion-native
592 if scheme == "svn":
593 needsrcrev = True
594 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500595
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500596 # Git packages should DEPEND on git-native
597 elif scheme in ("git", "gitsm"):
598 needsrcrev = True
599 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500600
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500601 # Mercurial packages should DEPEND on mercurial-native
602 elif scheme == "hg":
603 needsrcrev = True
604 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500605
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600606 # Perforce packages support SRCREV = "${AUTOREV}"
607 elif scheme == "p4":
608 needsrcrev = True
609
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500610 # OSC packages should DEPEND on osc-native
611 elif scheme == "osc":
612 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500614 elif scheme == "npm":
615 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500616
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500617 # *.lz4 should DEPEND on lz4-native for unpacking
618 if path.endswith('.lz4'):
619 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
620
621 # *.lz should DEPEND on lzip-native for unpacking
622 elif path.endswith('.lz'):
623 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
624
625 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500626 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500627 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
628
629 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500630 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500631 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
632
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800633 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500634 elif path.endswith('.rpm'):
635 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500636
Brad Bishop316dfdd2018-06-25 12:45:53 -0400637 # *.deb should DEPEND on xz-native for unpacking
638 elif path.endswith('.deb'):
639 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
640
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500641 if needsrcrev:
642 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643
Brad Bishop15ae2502019-06-18 21:44:24 -0400644 # Gather all named SRCREVs to add to the sstate hash calculation
645 # This anonymous python snippet is called multiple times so we
646 # need to be careful to not double up the appends here and cause
647 # the base hash to mismatch the task hash
648 for uri in srcuri.split():
649 parm = bb.fetch.decodeurl(uri)[5]
650 uri_names = parm.get("name", "").split(",")
651 for uri_name in filter(None, uri_names):
652 srcrev_name = "SRCREV_{}".format(uri_name)
653 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
654 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
655
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656 set_packagetriplet(d)
657
658 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500659 mach_arch = d.getVar('MACHINE_ARCH')
660 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500661
662 if (pkg_arch == mach_arch):
663 # Already machine specific - nothing further to do
664 return
665
666 #
667 # We always try to scan SRC_URI for urls with machine overrides
668 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
669 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500670 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500671 if override != '0':
672 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500673 fpaths = (d.getVar('FILESPATH') or '').split(':')
674 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675 for p in fpaths:
676 if os.path.basename(p) == machine and os.path.isdir(p):
677 paths.append(p)
678
679 if len(paths) != 0:
680 for s in srcuri.split():
681 if not s.startswith("file://"):
682 continue
683 fetcher = bb.fetch2.Fetch([s], d)
684 local = fetcher.localpath(s)
685 for mp in paths:
686 if local.startswith(mp):
687 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
688 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
689 return
690
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500691 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500693 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
695 # We could look for != PACKAGE_ARCH here but how to choose
696 # if multiple differences are present?
697 # Look through PACKAGE_ARCHS for the priority order?
698 if pkgarch and pkgarch == mach_arch:
699 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500700 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701}
702
703addtask cleansstate after do_clean
704python do_cleansstate() {
705 sstate_clean_cachefiles(d)
706}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500707addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500708do_cleansstate[nostamp] = "1"
709
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500710python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500711 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712 if len(src_uri) == 0:
713 return
714
715 try:
716 fetcher = bb.fetch2.Fetch(src_uri, d)
717 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600718 except bb.fetch2.BBFetchException as e:
719 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720}
721do_cleanall[nostamp] = "1"
722
723
724EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package