blob: d95afb7b9b5e796ed02b201f52a7520ee6b10053 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop6e60e8b2018-02-01 10:27:11 -050013OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050014OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
Brad Bishop6e60e8b2018-02-01 10:27:11 -050019 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050040 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
64 #
65 # Ideally this will check a flag so we will operate properly in
66 # the case where host == build == target, for now we don't work in
67 # that case though.
68 #
69
70 deps = ""
71 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
72 # we need that built is the responsibility of the patch function / class, not
73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050075 if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps
78
79BASEDEPENDS = "${@base_dep_prepend(d)}"
80
81DEPENDS_prepend="${BASEDEPENDS} "
82
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050086THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050087
88def extra_path_elements(d):
89 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050090 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050091 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path
94
95PATH_prepend = "${@extra_path_elements(d)}"
96
97def get_lic_checksum_file_list(d):
98 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050099 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
100 tmpdir = d.getVar("TMPDIR")
101 s = d.getVar("S")
102 b = d.getVar("B")
103 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500104
105 urls = lic_files.split()
106 for url in urls:
107 # We only care about items that are absolute paths since
108 # any others should be covered by SRC_URI.
109 try:
110 path = bb.fetch.decodeurl(url)[2]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600111 if not path:
112 raise bb.fetch.MalformedUrl(url)
113
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500115 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500116 continue
117 filelist.append(path + ":" + str(os.path.exists(path)))
118 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500119 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500120 return " ".join(filelist)
121
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500122def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
123 tools = d.getVar(toolsvar).split()
124 origbbenv = d.getVar("BB_ORIGENV", False)
125 path = origbbenv.getVar("PATH")
126 bb.utils.mkdirhier(dest)
127 notfound = []
128 for tool in tools:
129 desttool = os.path.join(dest, tool)
130 if not os.path.exists(desttool):
131 srctool = bb.utils.which(path, tool, executable=True)
132 if "ccache" in srctool:
133 srctool = bb.utils.which(path, tool, executable=True, direction=1)
134 if srctool:
135 os.symlink(srctool, desttool)
136 else:
137 notfound.append(tool)
138 if notfound and fatal:
139 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
140
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500141addtask fetch
142do_fetch[dirs] = "${DL_DIR}"
143do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
144do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
145do_fetch[vardeps] += "SRCREV"
146python base_do_fetch() {
147
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500148 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500149 if len(src_uri) == 0:
150 return
151
152 try:
153 fetcher = bb.fetch2.Fetch(src_uri, d)
154 fetcher.download()
155 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157}
158
159addtask unpack after do_fetch
160do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600161
162python () {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500163 if d.getVar('S') != d.getVar('WORKDIR'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600164 d.setVarFlag('do_unpack', 'cleandirs', '${S}')
165 else:
166 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
167}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500169 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 if len(src_uri) == 0:
171 return
172
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 try:
174 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500175 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600177 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178}
179
180def pkgarch_mapping(d):
181 # Compatibility mappings of TUNE_PKGARCH (opt in)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500182 if d.getVar("PKGARCHCOMPAT_ARMV7A"):
183 if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500184 d.setVar("TUNE_PKGARCH", "armv7a")
185
186def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500187 layers = (d.getVar("BBLAYERS") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500188 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
189 base_get_metadata_git_branch(i, None).strip(), \
190 base_get_metadata_git_revision(i, None)) \
191 for i in layers]
192 i = len(layers_branch_rev)-1
193 p1 = layers_branch_rev[i].find("=")
194 s1 = layers_branch_rev[i][p1:]
195 while i > 0:
196 p2 = layers_branch_rev[i-1].find("=")
197 s2= layers_branch_rev[i-1][p2:]
198 if s1 == s2:
199 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
200 i -= 1
201 else:
202 i -= 1
203 p1 = layers_branch_rev[i].find("=")
204 s1= layers_branch_rev[i][p1:]
205 return layers_branch_rev
206
207
208BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
209BUILDCFG_FUNCS[type] = "list"
210
211def buildcfg_vars(d):
212 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
213 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500214 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215 if value is not None:
216 yield '%-17s = "%s"' % (var, value)
217
218def buildcfg_neededvars(d):
219 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
220 pesteruser = []
221 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500222 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223 if not val or val == 'INVALID':
224 pesteruser.append(v)
225
226 if pesteruser:
227 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
228
229addhandler base_eventhandler
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500230base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231python base_eventhandler() {
232 import bb.runqueue
233
234 if isinstance(e, bb.event.ConfigParsed):
235 if not e.data.getVar("NATIVELSBSTRING", False):
236 e.data.setVar("NATIVELSBSTRING", lsb_distro_identifier(e.data))
237 e.data.setVar('BB_VERSION', bb.__version__)
238 pkgarch_mapping(e.data)
239 oe.utils.features_backfill("DISTRO_FEATURES", e.data)
240 oe.utils.features_backfill("MACHINE_FEATURES", e.data)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500241 # Works with the line in layer.conf which changes PATH to point here
242 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
243 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500244
245 if isinstance(e, bb.event.BuildStarted):
246 localdata = bb.data.createCopy(e.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 statuslines = []
248 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
249 g = globals()
250 if func not in g:
251 bb.warn("Build configuration function '%s' does not exist" % func)
252 else:
253 flines = g[func](localdata)
254 if flines:
255 statuslines.extend(flines)
256
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500257 statusheader = e.data.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500258 if statusheader:
259 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500260
261 # This code is to silence warnings where the SDK variables overwrite the
262 # target ones and we'd see dulpicate key names overwriting each other
263 # for various PREFERRED_PROVIDERS
264 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500265 if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500266 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
267 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
268 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
269 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
270 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
271
272 if isinstance(e, bb.runqueue.sceneQueueComplete):
273 completions = e.data.expand("${STAGING_DIR}/sstatecompletions")
274 if os.path.exists(completions):
275 cmds = set()
276 with open(completions, "r") as f:
277 cmds = set(f)
278 e.data.setVar("completion_function", "\n".join(cmds))
279 e.data.setVarFlag("completion_function", "func", "1")
280 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
281 bb.build.exec_func("completion_function", e.data)
282 os.remove(completions)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500283
284 if isinstance(e, bb.event.RecipeParsed):
285 #
286 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
287 # skip parsing for all the other providers which will mean they get uninstalled from the
288 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
289 # particular.
290 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500291 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500292 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
293 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500294 provs = (d.getVar("PROVIDES") or "").split()
295 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500296 for p in provs:
297 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500298 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500299 if profprov and pn != profprov:
300 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500301}
302
303CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
304CLEANBROKEN = "0"
305
306addtask configure after do_patch
307do_configure[dirs] = "${B}"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500308do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309base_do_configure() {
310 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
311 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
312 cd ${B}
313 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
314 oe_runmake clean
315 fi
316 find ${B} -ignore_readdir_race -name \*.la -delete
317 fi
318 fi
319 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500320 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500321 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
322 fi
323}
324
325addtask compile after do_configure
326do_compile[dirs] = "${B}"
327base_do_compile() {
328 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
329 oe_runmake || die "make failed"
330 else
331 bbnote "nothing to compile"
332 fi
333}
334
335addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600336do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500337# Remove and re-create ${D} so that is it guaranteed to be empty
338do_install[cleandirs] = "${D}"
339
340base_do_install() {
341 :
342}
343
344base_do_package() {
345 :
346}
347
348addtask build after do_populate_sysroot
349do_build[noexec] = "1"
350do_build[recrdeptask] += "do_deploy"
351do_build () {
352 :
353}
354
355def set_packagetriplet(d):
356 archs = []
357 tos = []
358 tvs = []
359
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500360 archs.append(d.getVar("PACKAGE_ARCHS").split())
361 tos.append(d.getVar("TARGET_OS"))
362 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500363
364 def settriplet(d, varname, archs, tos, tvs):
365 triplets = []
366 for i in range(len(archs)):
367 for arch in archs[i]:
368 triplets.append(arch + tvs[i] + "-" + tos[i])
369 triplets.reverse()
370 d.setVar(varname, " ".join(triplets))
371
372 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
373
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500374 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500375 for item in variants.split():
376 localdata = bb.data.createCopy(d)
377 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
378 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500380 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
381 tos.append(localdata.getVar("TARGET_OS"))
382 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383
384 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
385
386python () {
387 import string, re
388
389 # Handle PACKAGECONFIG
390 #
391 # These take the form:
392 #
393 # PACKAGECONFIG ??= "<default options>"
394 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
395 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
396 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500397 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
398 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500400 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401
402 def expandFilter(appends, extension, prefix):
403 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
404 newappends = []
405 for a in appends:
406 if a.endswith("-native") or ("-cross-" in a):
407 newappends.append(a)
408 elif a.startswith("virtual/"):
409 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500410 if subs.startswith(prefix):
411 newappends.append(a + extension)
412 else:
413 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414 else:
415 if a.startswith(prefix):
416 newappends.append(a + extension)
417 else:
418 newappends.append(prefix + a + extension)
419 return newappends
420
421 def appendVar(varname, appends):
422 if not appends:
423 return
424 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500425 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500426 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500427 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500428 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500429 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 appends = expandFilter(appends, "", mlprefix)
431 varname = d.expand(varname)
432 d.appendVar(varname, " " + " ".join(appends))
433
434 extradeps = []
435 extrardeps = []
436 extraconf = []
437 for flag, flagval in sorted(pkgconfigflags.items()):
438 items = flagval.split(",")
439 num = len(items)
440 if num > 4:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500441 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500442 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443
444 if flag in pkgconfig:
445 if num >= 3 and items[2]:
446 extradeps.append(items[2])
447 if num >= 4 and items[3]:
448 extrardeps.append(items[3])
449 if num >= 1 and items[0]:
450 extraconf.append(items[0])
451 elif num >= 2 and items[1]:
452 extraconf.append(items[1])
453 appendVar('DEPENDS', extradeps)
454 appendVar('RDEPENDS_${PN}', extrardeps)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500455 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
456
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500457 pn = d.getVar('PN')
458 license = d.getVar('LICENSE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459 if license == "INVALID":
460 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
461
462 if bb.data.inherits_class('license', d):
463 check_license_format(d)
464 unmatched_license_flag = check_license_flags(d)
465 if unmatched_license_flag:
466 bb.debug(1, "Skipping %s because it has a restricted license not"
467 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
468 raise bb.parse.SkipPackage("because it has a restricted license not"
469 " whitelisted in LICENSE_FLAGS_WHITELIST")
470
471 # If we're building a target package we need to use fakeroot (pseudo)
472 # in order to capture permissions, owners, groups and special files
473 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
474 d.setVarFlag('do_unpack', 'umask', '022')
475 d.setVarFlag('do_configure', 'umask', '022')
476 d.setVarFlag('do_compile', 'umask', '022')
477 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500478 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 d.setVarFlag('do_install', 'umask', '022')
480 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500481 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500483 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500485 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500487
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500488 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500489 if need_machine:
490 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500491 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492 for m in compat_machines:
493 if re.match(need_machine, m):
494 break
495 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500496 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500497
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600498 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500500 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501 if need_host:
502 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500503 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504 if not re.match(need_host, this_host):
505 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
506
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500507 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508
509 check_license = False if pn.startswith("nativesdk-") else True
510 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600511 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500512 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
513 if pn.endswith(d.expand(t)):
514 check_license = False
515 if pn.startswith("gcc-source-"):
516 check_license = False
517
518 if check_license and bad_licenses:
519 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
520
521 whitelist = []
522 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500523 for lic in bad_licenses:
524 spdx_license = return_spdx(d, lic)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500525 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500526 whitelist.extend((d.getVar(w + lic) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 if spdx_license:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500528 whitelist.extend((d.getVar(w + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500530 We need to track what we are whitelisting and why. If pn is
531 incompatible we need to be able to note that the image that
532 is created may infact contain incompatible licenses despite
533 INCOMPATIBLE_LICENSE being set.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500534 '''
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500535 incompatwl.extend((d.getVar(w + lic) or "").split())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500536 if spdx_license:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500537 incompatwl.extend((d.getVar(w + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538
539 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500540 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541 skipped_pkgs = []
542 unskipped_pkgs = []
543 for pkg in pkgs:
544 if incompatible_license(d, bad_licenses, pkg):
545 skipped_pkgs.append(pkg)
546 else:
547 unskipped_pkgs.append(pkg)
548 all_skipped = skipped_pkgs and not unskipped_pkgs
549 if unskipped_pkgs:
550 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500551 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500552 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500553 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 for pkg in unskipped_pkgs:
555 bb.debug(1, "INCLUDING the package " + pkg)
556 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500557 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500558 raise bb.parse.SkipPackage("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500559 elif pn in whitelist:
560 if pn in incompatwl:
561 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600563 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
564 # superset of all per-package licenses. We do not do advanced (pattern)
565 # matching of license expressions - just check that all license strings
566 # in LICENSE_<pkg> are found in LICENSE.
567 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500568 for pkg in d.getVar('PACKAGES').split():
569 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600570 if pkg_license:
571 unlisted = oe.license.list_licenses(pkg_license) - license_set
572 if unlisted:
573 bb.warn("LICENSE_%s includes licenses (%s) that are not "
574 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
575
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500576 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500577 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500578 for uri in srcuri.split():
579 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500581 # HTTP/FTP use the wget fetcher
582 if scheme in ("http", "https", "ftp"):
583 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500584
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500585 # Svn packages should DEPEND on subversion-native
586 if scheme == "svn":
587 needsrcrev = True
588 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500589
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500590 # Git packages should DEPEND on git-native
591 elif scheme in ("git", "gitsm"):
592 needsrcrev = True
593 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500595 # Mercurial packages should DEPEND on mercurial-native
596 elif scheme == "hg":
597 needsrcrev = True
598 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600600 # Perforce packages support SRCREV = "${AUTOREV}"
601 elif scheme == "p4":
602 needsrcrev = True
603
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500604 # OSC packages should DEPEND on osc-native
605 elif scheme == "osc":
606 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500607
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500608 elif scheme == "npm":
609 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500610
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500611 # *.lz4 should DEPEND on lz4-native for unpacking
612 if path.endswith('.lz4'):
613 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
614
615 # *.lz should DEPEND on lzip-native for unpacking
616 elif path.endswith('.lz'):
617 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
618
619 # *.xz should DEPEND on xz-native for unpacking
620 elif path.endswith('.xz'):
621 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
622
623 # .zip should DEPEND on unzip-native for unpacking
624 elif path.endswith('.zip'):
625 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
626
627 # file is needed by rpm2cpio.sh
628 elif path.endswith('.src.rpm'):
629 d.appendVarFlag('do_unpack', 'depends', ' file-native:do_populate_sysroot')
630
631 if needsrcrev:
632 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633
634 set_packagetriplet(d)
635
636 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500637 mach_arch = d.getVar('MACHINE_ARCH')
638 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500639
640 if (pkg_arch == mach_arch):
641 # Already machine specific - nothing further to do
642 return
643
644 #
645 # We always try to scan SRC_URI for urls with machine overrides
646 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
647 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500648 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649 if override != '0':
650 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500651 fpaths = (d.getVar('FILESPATH') or '').split(':')
652 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653 for p in fpaths:
654 if os.path.basename(p) == machine and os.path.isdir(p):
655 paths.append(p)
656
657 if len(paths) != 0:
658 for s in srcuri.split():
659 if not s.startswith("file://"):
660 continue
661 fetcher = bb.fetch2.Fetch([s], d)
662 local = fetcher.localpath(s)
663 for mp in paths:
664 if local.startswith(mp):
665 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
666 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
667 return
668
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500669 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500671 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672
673 # We could look for != PACKAGE_ARCH here but how to choose
674 # if multiple differences are present?
675 # Look through PACKAGE_ARCHS for the priority order?
676 if pkgarch and pkgarch == mach_arch:
677 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500678 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679}
680
681addtask cleansstate after do_clean
682python do_cleansstate() {
683 sstate_clean_cachefiles(d)
684}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500685addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500686do_cleansstate[nostamp] = "1"
687
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500688python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500689 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690 if len(src_uri) == 0:
691 return
692
693 try:
694 fetcher = bb.fetch2.Fetch(src_uri, d)
695 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600696 except bb.fetch2.BBFetchException as e:
697 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698}
699do_cleanall[nostamp] = "1"
700
701
702EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package