blob: 024fe4331a854ef015e0c4e43ed3f433ea43ec19 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Patrick Williamsc0f7c042017-02-23 20:41:17 -060013OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050014OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
19 bbpath = d.getVar("BBPATH", True).split(":")
20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
40 adjust = d.getVar('LSB_DISTRO_ADJUST', True)
41 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
64 #
65 # Ideally this will check a flag so we will operate properly in
66 # the case where host == build == target, for now we don't work in
67 # that case though.
68 #
69
70 deps = ""
71 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
72 # we need that built is the responsibility of the patch function / class, not
73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
75 if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps
78
79BASEDEPENDS = "${@base_dep_prepend(d)}"
80
81DEPENDS_prepend="${BASEDEPENDS} "
82
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=)
86THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
87
88def extra_path_elements(d):
89 path = ""
90 elements = (d.getVar('EXTRANATIVEPATH', True) or "").split()
91 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path
94
95PATH_prepend = "${@extra_path_elements(d)}"
96
97def get_lic_checksum_file_list(d):
98 filelist = []
99 lic_files = d.getVar("LIC_FILES_CHKSUM", True) or ''
100 tmpdir = d.getVar("TMPDIR", True)
101
102 urls = lic_files.split()
103 for url in urls:
104 # We only care about items that are absolute paths since
105 # any others should be covered by SRC_URI.
106 try:
107 path = bb.fetch.decodeurl(url)[2]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600108 if not path:
109 raise bb.fetch.MalformedUrl(url)
110
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500111 if path[0] == '/':
112 if path.startswith(tmpdir):
113 continue
114 filelist.append(path + ":" + str(os.path.exists(path)))
115 except bb.fetch.MalformedUrl:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500117 return " ".join(filelist)
118
119addtask fetch
120do_fetch[dirs] = "${DL_DIR}"
121do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
122do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
123do_fetch[vardeps] += "SRCREV"
124python base_do_fetch() {
125
126 src_uri = (d.getVar('SRC_URI', True) or "").split()
127 if len(src_uri) == 0:
128 return
129
130 try:
131 fetcher = bb.fetch2.Fetch(src_uri, d)
132 fetcher.download()
133 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600134 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500135}
136
137addtask unpack after do_fetch
138do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600139
140python () {
141 if d.getVar('S', True) != d.getVar('WORKDIR', True):
142 d.setVarFlag('do_unpack', 'cleandirs', '${S}')
143 else:
144 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
145}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500146python base_do_unpack() {
147 src_uri = (d.getVar('SRC_URI', True) or "").split()
148 if len(src_uri) == 0:
149 return
150
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500151 try:
152 fetcher = bb.fetch2.Fetch(src_uri, d)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600153 fetcher.unpack(d.getVar('WORKDIR', True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600155 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156}
157
158def pkgarch_mapping(d):
159 # Compatibility mappings of TUNE_PKGARCH (opt in)
160 if d.getVar("PKGARCHCOMPAT_ARMV7A", True):
161 if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon":
162 d.setVar("TUNE_PKGARCH", "armv7a")
163
164def get_layers_branch_rev(d):
165 layers = (d.getVar("BBLAYERS", True) or "").split()
166 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
167 base_get_metadata_git_branch(i, None).strip(), \
168 base_get_metadata_git_revision(i, None)) \
169 for i in layers]
170 i = len(layers_branch_rev)-1
171 p1 = layers_branch_rev[i].find("=")
172 s1 = layers_branch_rev[i][p1:]
173 while i > 0:
174 p2 = layers_branch_rev[i-1].find("=")
175 s2= layers_branch_rev[i-1][p2:]
176 if s1 == s2:
177 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
178 i -= 1
179 else:
180 i -= 1
181 p1 = layers_branch_rev[i].find("=")
182 s1= layers_branch_rev[i][p1:]
183 return layers_branch_rev
184
185
186BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
187BUILDCFG_FUNCS[type] = "list"
188
189def buildcfg_vars(d):
190 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
191 for var in statusvars:
192 value = d.getVar(var, True)
193 if value is not None:
194 yield '%-17s = "%s"' % (var, value)
195
196def buildcfg_neededvars(d):
197 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
198 pesteruser = []
199 for v in needed_vars:
200 val = d.getVar(v, True)
201 if not val or val == 'INVALID':
202 pesteruser.append(v)
203
204 if pesteruser:
205 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
206
207addhandler base_eventhandler
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500208base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500209python base_eventhandler() {
210 import bb.runqueue
211
212 if isinstance(e, bb.event.ConfigParsed):
213 if not e.data.getVar("NATIVELSBSTRING", False):
214 e.data.setVar("NATIVELSBSTRING", lsb_distro_identifier(e.data))
215 e.data.setVar('BB_VERSION', bb.__version__)
216 pkgarch_mapping(e.data)
217 oe.utils.features_backfill("DISTRO_FEATURES", e.data)
218 oe.utils.features_backfill("MACHINE_FEATURES", e.data)
219
220 if isinstance(e, bb.event.BuildStarted):
221 localdata = bb.data.createCopy(e.data)
222 bb.data.update_data(localdata)
223 statuslines = []
224 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
225 g = globals()
226 if func not in g:
227 bb.warn("Build configuration function '%s' does not exist" % func)
228 else:
229 flines = g[func](localdata)
230 if flines:
231 statuslines.extend(flines)
232
233 statusheader = e.data.getVar('BUILDCFG_HEADER', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500234 if statusheader:
235 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500236
237 # This code is to silence warnings where the SDK variables overwrite the
238 # target ones and we'd see dulpicate key names overwriting each other
239 # for various PREFERRED_PROVIDERS
240 if isinstance(e, bb.event.RecipePreFinalise):
241 if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True):
242 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
243 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
244 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
247
248 if isinstance(e, bb.runqueue.sceneQueueComplete):
249 completions = e.data.expand("${STAGING_DIR}/sstatecompletions")
250 if os.path.exists(completions):
251 cmds = set()
252 with open(completions, "r") as f:
253 cmds = set(f)
254 e.data.setVar("completion_function", "\n".join(cmds))
255 e.data.setVarFlag("completion_function", "func", "1")
256 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
257 bb.build.exec_func("completion_function", e.data)
258 os.remove(completions)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500259
260 if isinstance(e, bb.event.RecipeParsed):
261 #
262 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
263 # skip parsing for all the other providers which will mean they get uninstalled from the
264 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
265 # particular.
266 #
267 pn = d.getVar('PN', True)
268 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
269 if not source_mirror_fetch:
270 provs = (d.getVar("PROVIDES", True) or "").split()
271 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
272 for p in provs:
273 if p.startswith("virtual/") and p not in multiwhitelist:
274 profprov = d.getVar("PREFERRED_PROVIDER_" + p, True)
275 if profprov and pn != profprov:
276 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500277}
278
279CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
280CLEANBROKEN = "0"
281
282addtask configure after do_patch
283do_configure[dirs] = "${B}"
284do_configure[deptask] = "do_populate_sysroot"
285base_do_configure() {
286 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
287 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
288 cd ${B}
289 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
290 oe_runmake clean
291 fi
292 find ${B} -ignore_readdir_race -name \*.la -delete
293 fi
294 fi
295 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500296 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500297 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
298 fi
299}
300
301addtask compile after do_configure
302do_compile[dirs] = "${B}"
303base_do_compile() {
304 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
305 oe_runmake || die "make failed"
306 else
307 bbnote "nothing to compile"
308 fi
309}
310
311addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600312do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313# Remove and re-create ${D} so that is it guaranteed to be empty
314do_install[cleandirs] = "${D}"
315
316base_do_install() {
317 :
318}
319
320base_do_package() {
321 :
322}
323
324addtask build after do_populate_sysroot
325do_build[noexec] = "1"
326do_build[recrdeptask] += "do_deploy"
327do_build () {
328 :
329}
330
331def set_packagetriplet(d):
332 archs = []
333 tos = []
334 tvs = []
335
336 archs.append(d.getVar("PACKAGE_ARCHS", True).split())
337 tos.append(d.getVar("TARGET_OS", True))
338 tvs.append(d.getVar("TARGET_VENDOR", True))
339
340 def settriplet(d, varname, archs, tos, tvs):
341 triplets = []
342 for i in range(len(archs)):
343 for arch in archs[i]:
344 triplets.append(arch + tvs[i] + "-" + tos[i])
345 triplets.reverse()
346 d.setVar(varname, " ".join(triplets))
347
348 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
349
350 variants = d.getVar("MULTILIB_VARIANTS", True) or ""
351 for item in variants.split():
352 localdata = bb.data.createCopy(d)
353 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
354 localdata.setVar("OVERRIDES", overrides)
355 bb.data.update_data(localdata)
356
357 archs.append(localdata.getVar("PACKAGE_ARCHS", True).split())
358 tos.append(localdata.getVar("TARGET_OS", True))
359 tvs.append(localdata.getVar("TARGET_VENDOR", True))
360
361 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
362
363python () {
364 import string, re
365
366 # Handle PACKAGECONFIG
367 #
368 # These take the form:
369 #
370 # PACKAGECONFIG ??= "<default options>"
371 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
372 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
373 if pkgconfigflags:
374 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split()
375 pn = d.getVar("PN", True)
376
377 mlprefix = d.getVar("MLPREFIX", True)
378
379 def expandFilter(appends, extension, prefix):
380 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
381 newappends = []
382 for a in appends:
383 if a.endswith("-native") or ("-cross-" in a):
384 newappends.append(a)
385 elif a.startswith("virtual/"):
386 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500387 if subs.startswith(prefix):
388 newappends.append(a + extension)
389 else:
390 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 else:
392 if a.startswith(prefix):
393 newappends.append(a + extension)
394 else:
395 newappends.append(prefix + a + extension)
396 return newappends
397
398 def appendVar(varname, appends):
399 if not appends:
400 return
401 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500402 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500404 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500405 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500406 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 appends = expandFilter(appends, "", mlprefix)
408 varname = d.expand(varname)
409 d.appendVar(varname, " " + " ".join(appends))
410
411 extradeps = []
412 extrardeps = []
413 extraconf = []
414 for flag, flagval in sorted(pkgconfigflags.items()):
415 items = flagval.split(",")
416 num = len(items)
417 if num > 4:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500418 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
419 % (d.getVar('PN', True), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420
421 if flag in pkgconfig:
422 if num >= 3 and items[2]:
423 extradeps.append(items[2])
424 if num >= 4 and items[3]:
425 extrardeps.append(items[3])
426 if num >= 1 and items[0]:
427 extraconf.append(items[0])
428 elif num >= 2 and items[1]:
429 extraconf.append(items[1])
430 appendVar('DEPENDS', extradeps)
431 appendVar('RDEPENDS_${PN}', extrardeps)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500432 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
433
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434 pn = d.getVar('PN', True)
435 license = d.getVar('LICENSE', True)
436 if license == "INVALID":
437 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
438
439 if bb.data.inherits_class('license', d):
440 check_license_format(d)
441 unmatched_license_flag = check_license_flags(d)
442 if unmatched_license_flag:
443 bb.debug(1, "Skipping %s because it has a restricted license not"
444 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
445 raise bb.parse.SkipPackage("because it has a restricted license not"
446 " whitelisted in LICENSE_FLAGS_WHITELIST")
447
448 # If we're building a target package we need to use fakeroot (pseudo)
449 # in order to capture permissions, owners, groups and special files
450 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
451 d.setVarFlag('do_unpack', 'umask', '022')
452 d.setVarFlag('do_configure', 'umask', '022')
453 d.setVarFlag('do_compile', 'umask', '022')
454 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500455 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 d.setVarFlag('do_install', 'umask', '022')
457 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500458 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500460 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500461 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500462 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500463 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500464
465 need_machine = d.getVar('COMPATIBLE_MACHINE', True)
466 if need_machine:
467 import re
468 compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":")
469 for m in compat_machines:
470 if re.match(need_machine, m):
471 break
472 else:
473 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True))
474
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600475 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500476 if not source_mirror_fetch:
477 need_host = d.getVar('COMPATIBLE_HOST', True)
478 if need_host:
479 import re
480 this_host = d.getVar('HOST_SYS', True)
481 if not re.match(need_host, this_host):
482 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
483
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split()
485
486 check_license = False if pn.startswith("nativesdk-") else True
487 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600488 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
490 if pn.endswith(d.expand(t)):
491 check_license = False
492 if pn.startswith("gcc-source-"):
493 check_license = False
494
495 if check_license and bad_licenses:
496 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
497
498 whitelist = []
499 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500500 for lic in bad_licenses:
501 spdx_license = return_spdx(d, lic)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500502 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 whitelist.extend((d.getVar(w + lic, True) or "").split())
504 if spdx_license:
505 whitelist.extend((d.getVar(w + spdx_license, True) or "").split())
506 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500507 We need to track what we are whitelisting and why. If pn is
508 incompatible we need to be able to note that the image that
509 is created may infact contain incompatible licenses despite
510 INCOMPATIBLE_LICENSE being set.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500511 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500512 incompatwl.extend((d.getVar(w + lic, True) or "").split())
513 if spdx_license:
514 incompatwl.extend((d.getVar(w + spdx_license, True) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515
516 if not pn in whitelist:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 pkgs = d.getVar('PACKAGES', True).split()
518 skipped_pkgs = []
519 unskipped_pkgs = []
520 for pkg in pkgs:
521 if incompatible_license(d, bad_licenses, pkg):
522 skipped_pkgs.append(pkg)
523 else:
524 unskipped_pkgs.append(pkg)
525 all_skipped = skipped_pkgs and not unskipped_pkgs
526 if unskipped_pkgs:
527 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500528 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500529 mlprefix = d.getVar('MLPREFIX', True)
530 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500531 for pkg in unskipped_pkgs:
532 bb.debug(1, "INCLUDING the package " + pkg)
533 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500534 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
535 raise bb.parse.SkipPackage("incompatible with license %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 elif pn in whitelist:
537 if pn in incompatwl:
538 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500539
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600540 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
541 # superset of all per-package licenses. We do not do advanced (pattern)
542 # matching of license expressions - just check that all license strings
543 # in LICENSE_<pkg> are found in LICENSE.
544 license_set = oe.license.list_licenses(license)
545 for pkg in d.getVar('PACKAGES', True).split():
546 pkg_license = d.getVar('LICENSE_' + pkg, True)
547 if pkg_license:
548 unlisted = oe.license.list_licenses(pkg_license) - license_set
549 if unlisted:
550 bb.warn("LICENSE_%s includes licenses (%s) that are not "
551 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
552
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500553 needsrcrev = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 srcuri = d.getVar('SRC_URI', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500555 for uri in srcuri.split():
556 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500557
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500558 # HTTP/FTP use the wget fetcher
559 if scheme in ("http", "https", "ftp"):
560 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500562 # Svn packages should DEPEND on subversion-native
563 if scheme == "svn":
564 needsrcrev = True
565 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500566
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500567 # Git packages should DEPEND on git-native
568 elif scheme in ("git", "gitsm"):
569 needsrcrev = True
570 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500572 # Mercurial packages should DEPEND on mercurial-native
573 elif scheme == "hg":
574 needsrcrev = True
575 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500576
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600577 # Perforce packages support SRCREV = "${AUTOREV}"
578 elif scheme == "p4":
579 needsrcrev = True
580
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500581 # OSC packages should DEPEND on osc-native
582 elif scheme == "osc":
583 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500584
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500585 elif scheme == "npm":
586 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500587
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500588 # *.lz4 should DEPEND on lz4-native for unpacking
589 if path.endswith('.lz4'):
590 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
591
592 # *.lz should DEPEND on lzip-native for unpacking
593 elif path.endswith('.lz'):
594 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
595
596 # *.xz should DEPEND on xz-native for unpacking
597 elif path.endswith('.xz'):
598 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
599
600 # .zip should DEPEND on unzip-native for unpacking
601 elif path.endswith('.zip'):
602 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
603
604 # file is needed by rpm2cpio.sh
605 elif path.endswith('.src.rpm'):
606 d.appendVarFlag('do_unpack', 'depends', ' file-native:do_populate_sysroot')
607
608 if needsrcrev:
609 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500610
611 set_packagetriplet(d)
612
613 # 'multimachine' handling
614 mach_arch = d.getVar('MACHINE_ARCH', True)
615 pkg_arch = d.getVar('PACKAGE_ARCH', True)
616
617 if (pkg_arch == mach_arch):
618 # Already machine specific - nothing further to do
619 return
620
621 #
622 # We always try to scan SRC_URI for urls with machine overrides
623 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
624 #
625 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True)
626 if override != '0':
627 paths = []
628 fpaths = (d.getVar('FILESPATH', True) or '').split(':')
629 machine = d.getVar('MACHINE', True)
630 for p in fpaths:
631 if os.path.basename(p) == machine and os.path.isdir(p):
632 paths.append(p)
633
634 if len(paths) != 0:
635 for s in srcuri.split():
636 if not s.startswith("file://"):
637 continue
638 fetcher = bb.fetch2.Fetch([s], d)
639 local = fetcher.localpath(s)
640 for mp in paths:
641 if local.startswith(mp):
642 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
643 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
644 return
645
646 packages = d.getVar('PACKAGES', True).split()
647 for pkg in packages:
648 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
649
650 # We could look for != PACKAGE_ARCH here but how to choose
651 # if multiple differences are present?
652 # Look through PACKAGE_ARCHS for the priority order?
653 if pkgarch and pkgarch == mach_arch:
654 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
655 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
656}
657
658addtask cleansstate after do_clean
659python do_cleansstate() {
660 sstate_clean_cachefiles(d)
661}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500663do_cleansstate[nostamp] = "1"
664
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500665python do_cleanall() {
666 src_uri = (d.getVar('SRC_URI', True) or "").split()
667 if len(src_uri) == 0:
668 return
669
670 try:
671 fetcher = bb.fetch2.Fetch(src_uri, d)
672 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600673 except bb.fetch2.BBFetchException as e:
674 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675}
676do_cleanall[nostamp] = "1"
677
678
679EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package