blob: a7ca3a6676b2e0433559a285820c40ac84761710 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
13OE_IMPORTS += "os sys time oe.path oe.utils oe.data oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath"
14OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
19 bbpath = d.getVar("BBPATH", True).split(":")
20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
40 adjust = d.getVar('LSB_DISTRO_ADJUST', True)
41 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
64 #
65 # Ideally this will check a flag so we will operate properly in
66 # the case where host == build == target, for now we don't work in
67 # that case though.
68 #
69
70 deps = ""
71 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
72 # we need that built is the responsibility of the patch function / class, not
73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
75 if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps
78
79BASEDEPENDS = "${@base_dep_prepend(d)}"
80
81DEPENDS_prepend="${BASEDEPENDS} "
82
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=)
86THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
87
88def extra_path_elements(d):
89 path = ""
90 elements = (d.getVar('EXTRANATIVEPATH', True) or "").split()
91 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path
94
95PATH_prepend = "${@extra_path_elements(d)}"
96
97def get_lic_checksum_file_list(d):
98 filelist = []
99 lic_files = d.getVar("LIC_FILES_CHKSUM", True) or ''
100 tmpdir = d.getVar("TMPDIR", True)
101
102 urls = lic_files.split()
103 for url in urls:
104 # We only care about items that are absolute paths since
105 # any others should be covered by SRC_URI.
106 try:
107 path = bb.fetch.decodeurl(url)[2]
108 if path[0] == '/':
109 if path.startswith(tmpdir):
110 continue
111 filelist.append(path + ":" + str(os.path.exists(path)))
112 except bb.fetch.MalformedUrl:
113 raise bb.build.FuncFailed(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
114 return " ".join(filelist)
115
116addtask fetch
117do_fetch[dirs] = "${DL_DIR}"
118do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
119do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
120do_fetch[vardeps] += "SRCREV"
121python base_do_fetch() {
122
123 src_uri = (d.getVar('SRC_URI', True) or "").split()
124 if len(src_uri) == 0:
125 return
126
127 try:
128 fetcher = bb.fetch2.Fetch(src_uri, d)
129 fetcher.download()
130 except bb.fetch2.BBFetchException as e:
131 raise bb.build.FuncFailed(e)
132}
133
134addtask unpack after do_fetch
135do_unpack[dirs] = "${WORKDIR}"
136python base_do_unpack() {
137 src_uri = (d.getVar('SRC_URI', True) or "").split()
138 if len(src_uri) == 0:
139 return
140
141 rootdir = d.getVar('WORKDIR', True)
142
143 # Ensure that we cleanup ${S}/patches
144 # TODO: Investigate if we can remove
145 # the entire ${S} in this case.
146 s_dir = d.getVar('S', True)
147 p_dir = os.path.join(s_dir, 'patches')
148 bb.utils.remove(p_dir, True)
149
150 try:
151 fetcher = bb.fetch2.Fetch(src_uri, d)
152 fetcher.unpack(rootdir)
153 except bb.fetch2.BBFetchException as e:
154 raise bb.build.FuncFailed(e)
155}
156
157def pkgarch_mapping(d):
158 # Compatibility mappings of TUNE_PKGARCH (opt in)
159 if d.getVar("PKGARCHCOMPAT_ARMV7A", True):
160 if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon":
161 d.setVar("TUNE_PKGARCH", "armv7a")
162
163def get_layers_branch_rev(d):
164 layers = (d.getVar("BBLAYERS", True) or "").split()
165 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
166 base_get_metadata_git_branch(i, None).strip(), \
167 base_get_metadata_git_revision(i, None)) \
168 for i in layers]
169 i = len(layers_branch_rev)-1
170 p1 = layers_branch_rev[i].find("=")
171 s1 = layers_branch_rev[i][p1:]
172 while i > 0:
173 p2 = layers_branch_rev[i-1].find("=")
174 s2= layers_branch_rev[i-1][p2:]
175 if s1 == s2:
176 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
177 i -= 1
178 else:
179 i -= 1
180 p1 = layers_branch_rev[i].find("=")
181 s1= layers_branch_rev[i][p1:]
182 return layers_branch_rev
183
184
185BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
186BUILDCFG_FUNCS[type] = "list"
187
188def buildcfg_vars(d):
189 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
190 for var in statusvars:
191 value = d.getVar(var, True)
192 if value is not None:
193 yield '%-17s = "%s"' % (var, value)
194
195def buildcfg_neededvars(d):
196 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
197 pesteruser = []
198 for v in needed_vars:
199 val = d.getVar(v, True)
200 if not val or val == 'INVALID':
201 pesteruser.append(v)
202
203 if pesteruser:
204 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
205
206addhandler base_eventhandler
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500207base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208python base_eventhandler() {
209 import bb.runqueue
210
211 if isinstance(e, bb.event.ConfigParsed):
212 if not e.data.getVar("NATIVELSBSTRING", False):
213 e.data.setVar("NATIVELSBSTRING", lsb_distro_identifier(e.data))
214 e.data.setVar('BB_VERSION', bb.__version__)
215 pkgarch_mapping(e.data)
216 oe.utils.features_backfill("DISTRO_FEATURES", e.data)
217 oe.utils.features_backfill("MACHINE_FEATURES", e.data)
218
219 if isinstance(e, bb.event.BuildStarted):
220 localdata = bb.data.createCopy(e.data)
221 bb.data.update_data(localdata)
222 statuslines = []
223 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
224 g = globals()
225 if func not in g:
226 bb.warn("Build configuration function '%s' does not exist" % func)
227 else:
228 flines = g[func](localdata)
229 if flines:
230 statuslines.extend(flines)
231
232 statusheader = e.data.getVar('BUILDCFG_HEADER', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500233 if statusheader:
234 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500235
236 # This code is to silence warnings where the SDK variables overwrite the
237 # target ones and we'd see dulpicate key names overwriting each other
238 # for various PREFERRED_PROVIDERS
239 if isinstance(e, bb.event.RecipePreFinalise):
240 if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True):
241 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
242 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
243 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
244 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
246
247 if isinstance(e, bb.runqueue.sceneQueueComplete):
248 completions = e.data.expand("${STAGING_DIR}/sstatecompletions")
249 if os.path.exists(completions):
250 cmds = set()
251 with open(completions, "r") as f:
252 cmds = set(f)
253 e.data.setVar("completion_function", "\n".join(cmds))
254 e.data.setVarFlag("completion_function", "func", "1")
255 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
256 bb.build.exec_func("completion_function", e.data)
257 os.remove(completions)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500258
259 if isinstance(e, bb.event.RecipeParsed):
260 #
261 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
262 # skip parsing for all the other providers which will mean they get uninstalled from the
263 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
264 # particular.
265 #
266 pn = d.getVar('PN', True)
267 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
268 if not source_mirror_fetch:
269 provs = (d.getVar("PROVIDES", True) or "").split()
270 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
271 for p in provs:
272 if p.startswith("virtual/") and p not in multiwhitelist:
273 profprov = d.getVar("PREFERRED_PROVIDER_" + p, True)
274 if profprov and pn != profprov:
275 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500276}
277
278CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
279CLEANBROKEN = "0"
280
281addtask configure after do_patch
282do_configure[dirs] = "${B}"
283do_configure[deptask] = "do_populate_sysroot"
284base_do_configure() {
285 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
286 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
287 cd ${B}
288 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
289 oe_runmake clean
290 fi
291 find ${B} -ignore_readdir_race -name \*.la -delete
292 fi
293 fi
294 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500295 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
297 fi
298}
299
300addtask compile after do_configure
301do_compile[dirs] = "${B}"
302base_do_compile() {
303 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
304 oe_runmake || die "make failed"
305 else
306 bbnote "nothing to compile"
307 fi
308}
309
310addtask install after do_compile
311do_install[dirs] = "${D} ${B}"
312# Remove and re-create ${D} so that is it guaranteed to be empty
313do_install[cleandirs] = "${D}"
314
315base_do_install() {
316 :
317}
318
319base_do_package() {
320 :
321}
322
323addtask build after do_populate_sysroot
324do_build[noexec] = "1"
325do_build[recrdeptask] += "do_deploy"
326do_build () {
327 :
328}
329
330def set_packagetriplet(d):
331 archs = []
332 tos = []
333 tvs = []
334
335 archs.append(d.getVar("PACKAGE_ARCHS", True).split())
336 tos.append(d.getVar("TARGET_OS", True))
337 tvs.append(d.getVar("TARGET_VENDOR", True))
338
339 def settriplet(d, varname, archs, tos, tvs):
340 triplets = []
341 for i in range(len(archs)):
342 for arch in archs[i]:
343 triplets.append(arch + tvs[i] + "-" + tos[i])
344 triplets.reverse()
345 d.setVar(varname, " ".join(triplets))
346
347 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
348
349 variants = d.getVar("MULTILIB_VARIANTS", True) or ""
350 for item in variants.split():
351 localdata = bb.data.createCopy(d)
352 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
353 localdata.setVar("OVERRIDES", overrides)
354 bb.data.update_data(localdata)
355
356 archs.append(localdata.getVar("PACKAGE_ARCHS", True).split())
357 tos.append(localdata.getVar("TARGET_OS", True))
358 tvs.append(localdata.getVar("TARGET_VENDOR", True))
359
360 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
361
362python () {
363 import string, re
364
365 # Handle PACKAGECONFIG
366 #
367 # These take the form:
368 #
369 # PACKAGECONFIG ??= "<default options>"
370 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
371 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
372 if pkgconfigflags:
373 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split()
374 pn = d.getVar("PN", True)
375
376 mlprefix = d.getVar("MLPREFIX", True)
377
378 def expandFilter(appends, extension, prefix):
379 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
380 newappends = []
381 for a in appends:
382 if a.endswith("-native") or ("-cross-" in a):
383 newappends.append(a)
384 elif a.startswith("virtual/"):
385 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500386 if subs.startswith(prefix):
387 newappends.append(a + extension)
388 else:
389 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500390 else:
391 if a.startswith(prefix):
392 newappends.append(a + extension)
393 else:
394 newappends.append(prefix + a + extension)
395 return newappends
396
397 def appendVar(varname, appends):
398 if not appends:
399 return
400 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500401 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500402 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500403 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500405 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406 appends = expandFilter(appends, "", mlprefix)
407 varname = d.expand(varname)
408 d.appendVar(varname, " " + " ".join(appends))
409
410 extradeps = []
411 extrardeps = []
412 extraconf = []
413 for flag, flagval in sorted(pkgconfigflags.items()):
414 items = flagval.split(",")
415 num = len(items)
416 if num > 4:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500417 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
418 % (d.getVar('PN', True), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500419
420 if flag in pkgconfig:
421 if num >= 3 and items[2]:
422 extradeps.append(items[2])
423 if num >= 4 and items[3]:
424 extrardeps.append(items[3])
425 if num >= 1 and items[0]:
426 extraconf.append(items[0])
427 elif num >= 2 and items[1]:
428 extraconf.append(items[1])
429 appendVar('DEPENDS', extradeps)
430 appendVar('RDEPENDS_${PN}', extrardeps)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500431 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
432
433 # TODO: once all recipes/classes abusing EXTRA_OECONF
434 # to get PACKAGECONFIG options are fixed to use PACKAGECONFIG_CONFARGS
435 # move this appendVar to autotools.bbclass.
436 if not bb.data.inherits_class('cmake', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437 appendVar('EXTRA_OECONF', extraconf)
438
439 pn = d.getVar('PN', True)
440 license = d.getVar('LICENSE', True)
441 if license == "INVALID":
442 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
443
444 if bb.data.inherits_class('license', d):
445 check_license_format(d)
446 unmatched_license_flag = check_license_flags(d)
447 if unmatched_license_flag:
448 bb.debug(1, "Skipping %s because it has a restricted license not"
449 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
450 raise bb.parse.SkipPackage("because it has a restricted license not"
451 " whitelisted in LICENSE_FLAGS_WHITELIST")
452
453 # If we're building a target package we need to use fakeroot (pseudo)
454 # in order to capture permissions, owners, groups and special files
455 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
456 d.setVarFlag('do_unpack', 'umask', '022')
457 d.setVarFlag('do_configure', 'umask', '022')
458 d.setVarFlag('do_compile', 'umask', '022')
459 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500460 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500461 d.setVarFlag('do_install', 'umask', '022')
462 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500463 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500465 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500467 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500469
470 need_machine = d.getVar('COMPATIBLE_MACHINE', True)
471 if need_machine:
472 import re
473 compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":")
474 for m in compat_machines:
475 if re.match(need_machine, m):
476 break
477 else:
478 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True))
479
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
481 if not source_mirror_fetch:
482 need_host = d.getVar('COMPATIBLE_HOST', True)
483 if need_host:
484 import re
485 this_host = d.getVar('HOST_SYS', True)
486 if not re.match(need_host, this_host):
487 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
488
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split()
490
491 check_license = False if pn.startswith("nativesdk-") else True
492 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
493 "-crosssdk-${SDK_ARCH}", "-crosssdk-initial-${SDK_ARCH}",
494 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
495 if pn.endswith(d.expand(t)):
496 check_license = False
497 if pn.startswith("gcc-source-"):
498 check_license = False
499
500 if check_license and bad_licenses:
501 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
502
503 whitelist = []
504 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 for lic in bad_licenses:
506 spdx_license = return_spdx(d, lic)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500507 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 whitelist.extend((d.getVar(w + lic, True) or "").split())
509 if spdx_license:
510 whitelist.extend((d.getVar(w + spdx_license, True) or "").split())
511 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500512 We need to track what we are whitelisting and why. If pn is
513 incompatible we need to be able to note that the image that
514 is created may infact contain incompatible licenses despite
515 INCOMPATIBLE_LICENSE being set.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500517 incompatwl.extend((d.getVar(w + lic, True) or "").split())
518 if spdx_license:
519 incompatwl.extend((d.getVar(w + spdx_license, True) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520
521 if not pn in whitelist:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 pkgs = d.getVar('PACKAGES', True).split()
523 skipped_pkgs = []
524 unskipped_pkgs = []
525 for pkg in pkgs:
526 if incompatible_license(d, bad_licenses, pkg):
527 skipped_pkgs.append(pkg)
528 else:
529 unskipped_pkgs.append(pkg)
530 all_skipped = skipped_pkgs and not unskipped_pkgs
531 if unskipped_pkgs:
532 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500533 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500534 mlprefix = d.getVar('MLPREFIX', True)
535 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 for pkg in unskipped_pkgs:
537 bb.debug(1, "INCLUDING the package " + pkg)
538 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500539 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
540 raise bb.parse.SkipPackage("incompatible with license %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541 elif pn in whitelist:
542 if pn in incompatwl:
543 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500545 needsrcrev = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 srcuri = d.getVar('SRC_URI', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500547 for uri in srcuri.split():
548 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500550 # HTTP/FTP use the wget fetcher
551 if scheme in ("http", "https", "ftp"):
552 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500553
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500554 # Svn packages should DEPEND on subversion-native
555 if scheme == "svn":
556 needsrcrev = True
557 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500558
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500559 # Git packages should DEPEND on git-native
560 elif scheme in ("git", "gitsm"):
561 needsrcrev = True
562 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500563
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500564 # Mercurial packages should DEPEND on mercurial-native
565 elif scheme == "hg":
566 needsrcrev = True
567 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500568
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500569 # OSC packages should DEPEND on osc-native
570 elif scheme == "osc":
571 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500572
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500573 elif scheme == "npm":
574 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500576 # *.lz4 should DEPEND on lz4-native for unpacking
577 if path.endswith('.lz4'):
578 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
579
580 # *.lz should DEPEND on lzip-native for unpacking
581 elif path.endswith('.lz'):
582 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
583
584 # *.xz should DEPEND on xz-native for unpacking
585 elif path.endswith('.xz'):
586 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
587
588 # .zip should DEPEND on unzip-native for unpacking
589 elif path.endswith('.zip'):
590 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
591
592 # file is needed by rpm2cpio.sh
593 elif path.endswith('.src.rpm'):
594 d.appendVarFlag('do_unpack', 'depends', ' file-native:do_populate_sysroot')
595
596 if needsrcrev:
597 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500598
599 set_packagetriplet(d)
600
601 # 'multimachine' handling
602 mach_arch = d.getVar('MACHINE_ARCH', True)
603 pkg_arch = d.getVar('PACKAGE_ARCH', True)
604
605 if (pkg_arch == mach_arch):
606 # Already machine specific - nothing further to do
607 return
608
609 #
610 # We always try to scan SRC_URI for urls with machine overrides
611 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
612 #
613 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True)
614 if override != '0':
615 paths = []
616 fpaths = (d.getVar('FILESPATH', True) or '').split(':')
617 machine = d.getVar('MACHINE', True)
618 for p in fpaths:
619 if os.path.basename(p) == machine and os.path.isdir(p):
620 paths.append(p)
621
622 if len(paths) != 0:
623 for s in srcuri.split():
624 if not s.startswith("file://"):
625 continue
626 fetcher = bb.fetch2.Fetch([s], d)
627 local = fetcher.localpath(s)
628 for mp in paths:
629 if local.startswith(mp):
630 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
631 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
632 return
633
634 packages = d.getVar('PACKAGES', True).split()
635 for pkg in packages:
636 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
637
638 # We could look for != PACKAGE_ARCH here but how to choose
639 # if multiple differences are present?
640 # Look through PACKAGE_ARCHS for the priority order?
641 if pkgarch and pkgarch == mach_arch:
642 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
643 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
644}
645
646addtask cleansstate after do_clean
647python do_cleansstate() {
648 sstate_clean_cachefiles(d)
649}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500650addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500651do_cleansstate[nostamp] = "1"
652
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653python do_cleanall() {
654 src_uri = (d.getVar('SRC_URI', True) or "").split()
655 if len(src_uri) == 0:
656 return
657
658 try:
659 fetcher = bb.fetch2.Fetch(src_uri, d)
660 fetcher.clean()
661 except bb.fetch2.BBFetchException, e:
662 raise bb.build.FuncFailed(e)
663}
664do_cleanall[nostamp] = "1"
665
666
667EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package