blob: bb1f4b753365b53e2935e90d37d9e1374a7dff33 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop6e60e8b2018-02-01 10:27:11 -050013OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050014OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
Brad Bishop6e60e8b2018-02-01 10:27:11 -050019 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050040 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050064 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
65 return ""
66 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067
Brad Bishopd7bf8c12018-02-25 22:55:05 -050068BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070BASEDEPENDS = ""
71BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
72BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
74DEPENDS_prepend="${BASEDEPENDS} "
75
76FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
77# THISDIR only works properly with imediate expansion as it has to run
78# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050079THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080
81def extra_path_elements(d):
82 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050083 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084 for e in elements:
85 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
86 return path
87
88PATH_prepend = "${@extra_path_elements(d)}"
89
90def get_lic_checksum_file_list(d):
91 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050092 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
93 tmpdir = d.getVar("TMPDIR")
94 s = d.getVar("S")
95 b = d.getVar("B")
96 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 urls = lic_files.split()
99 for url in urls:
100 # We only care about items that are absolute paths since
101 # any others should be covered by SRC_URI.
102 try:
103 path = bb.fetch.decodeurl(url)[2]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600104 if not path:
105 raise bb.fetch.MalformedUrl(url)
106
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500108 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500109 continue
110 filelist.append(path + ":" + str(os.path.exists(path)))
111 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500112 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 return " ".join(filelist)
114
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500115def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
116 tools = d.getVar(toolsvar).split()
117 origbbenv = d.getVar("BB_ORIGENV", False)
118 path = origbbenv.getVar("PATH")
119 bb.utils.mkdirhier(dest)
120 notfound = []
121 for tool in tools:
122 desttool = os.path.join(dest, tool)
123 if not os.path.exists(desttool):
124 srctool = bb.utils.which(path, tool, executable=True)
125 if "ccache" in srctool:
126 srctool = bb.utils.which(path, tool, executable=True, direction=1)
127 if srctool:
128 os.symlink(srctool, desttool)
129 else:
130 notfound.append(tool)
131 if notfound and fatal:
132 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
133
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500134addtask fetch
135do_fetch[dirs] = "${DL_DIR}"
136do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
137do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
138do_fetch[vardeps] += "SRCREV"
139python base_do_fetch() {
140
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500141 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142 if len(src_uri) == 0:
143 return
144
145 try:
146 fetcher = bb.fetch2.Fetch(src_uri, d)
147 fetcher.download()
148 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600149 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500150}
151
152addtask unpack after do_fetch
153do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154
Brad Bishop316dfdd2018-06-25 12:45:53 -0400155do_unpack[cleandirs] = "${@d.getVar('S') if d.getVar('S') != d.getVar('WORKDIR') else os.path.join('${S}', 'patches')}"
156
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500158 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500159 if len(src_uri) == 0:
160 return
161
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500162 try:
163 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500164 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500165 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600166 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500167}
168
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500170 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500171 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 base_get_metadata_git_branch(i, None).strip(), \
173 base_get_metadata_git_revision(i, None)) \
174 for i in layers]
175 i = len(layers_branch_rev)-1
176 p1 = layers_branch_rev[i].find("=")
177 s1 = layers_branch_rev[i][p1:]
178 while i > 0:
179 p2 = layers_branch_rev[i-1].find("=")
180 s2= layers_branch_rev[i-1][p2:]
181 if s1 == s2:
182 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
183 i -= 1
184 else:
185 i -= 1
186 p1 = layers_branch_rev[i].find("=")
187 s1= layers_branch_rev[i][p1:]
188 return layers_branch_rev
189
190
191BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
192BUILDCFG_FUNCS[type] = "list"
193
194def buildcfg_vars(d):
195 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
196 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500197 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500198 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500199 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200
201def buildcfg_neededvars(d):
202 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
203 pesteruser = []
204 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500205 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 if not val or val == 'INVALID':
207 pesteruser.append(v)
208
209 if pesteruser:
210 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
211
212addhandler base_eventhandler
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214python base_eventhandler() {
215 import bb.runqueue
216
217 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400218 if not d.getVar("NATIVELSBSTRING", False):
219 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
220 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500221 # Works with the line in layer.conf which changes PATH to point here
222 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
223 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500224
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500225 if isinstance(e, bb.event.MultiConfigParsed):
226 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
227 # own contexts so the variables get expanded correctly for that arch, then inject back into
228 # the main data store.
229 deps = []
230 for config in e.mcdata:
231 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
232 deps = " ".join(deps)
233 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
234
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500235 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400236 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500237 statuslines = []
238 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
239 g = globals()
240 if func not in g:
241 bb.warn("Build configuration function '%s' does not exist" % func)
242 else:
243 flines = g[func](localdata)
244 if flines:
245 statuslines.extend(flines)
246
Brad Bishop316dfdd2018-06-25 12:45:53 -0400247 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500248 if statusheader:
249 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250
251 # This code is to silence warnings where the SDK variables overwrite the
252 # target ones and we'd see dulpicate key names overwriting each other
253 # for various PREFERRED_PROVIDERS
254 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400255 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
256 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
257 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
258 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
259 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
260 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500261
262 if isinstance(e, bb.runqueue.sceneQueueComplete):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400263 completions = d.expand("${STAGING_DIR}/sstatecompletions")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264 if os.path.exists(completions):
265 cmds = set()
266 with open(completions, "r") as f:
267 cmds = set(f)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400268 d.setVar("completion_function", "\n".join(cmds))
269 d.setVarFlag("completion_function", "func", "1")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500270 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400271 bb.build.exec_func("completion_function", d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272 os.remove(completions)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500273
274 if isinstance(e, bb.event.RecipeParsed):
275 #
276 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
277 # skip parsing for all the other providers which will mean they get uninstalled from the
278 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
279 # particular.
280 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500281 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500282 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
283 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500284 provs = (d.getVar("PROVIDES") or "").split()
285 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500286 for p in provs:
287 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500289 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400290 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500291}
292
293CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
294CLEANBROKEN = "0"
295
296addtask configure after do_patch
297do_configure[dirs] = "${B}"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500298do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500299base_do_configure() {
300 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
301 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
302 cd ${B}
303 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
304 oe_runmake clean
305 fi
306 find ${B} -ignore_readdir_race -name \*.la -delete
307 fi
308 fi
309 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500310 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500311 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
312 fi
313}
314
315addtask compile after do_configure
316do_compile[dirs] = "${B}"
317base_do_compile() {
318 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
319 oe_runmake || die "make failed"
320 else
321 bbnote "nothing to compile"
322 fi
323}
324
325addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600326do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500327# Remove and re-create ${D} so that is it guaranteed to be empty
328do_install[cleandirs] = "${D}"
329
330base_do_install() {
331 :
332}
333
334base_do_package() {
335 :
336}
337
338addtask build after do_populate_sysroot
339do_build[noexec] = "1"
340do_build[recrdeptask] += "do_deploy"
341do_build () {
342 :
343}
344
345def set_packagetriplet(d):
346 archs = []
347 tos = []
348 tvs = []
349
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500350 archs.append(d.getVar("PACKAGE_ARCHS").split())
351 tos.append(d.getVar("TARGET_OS"))
352 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500353
354 def settriplet(d, varname, archs, tos, tvs):
355 triplets = []
356 for i in range(len(archs)):
357 for arch in archs[i]:
358 triplets.append(arch + tvs[i] + "-" + tos[i])
359 triplets.reverse()
360 d.setVar(varname, " ".join(triplets))
361
362 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
363
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500364 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365 for item in variants.split():
366 localdata = bb.data.createCopy(d)
367 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
368 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500370 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
371 tos.append(localdata.getVar("TARGET_OS"))
372 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373
374 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
375
376python () {
377 import string, re
378
Brad Bishop316dfdd2018-06-25 12:45:53 -0400379 # Handle backfilling
380 oe.utils.features_backfill("DISTRO_FEATURES", d)
381 oe.utils.features_backfill("MACHINE_FEATURES", d)
382
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383 # Handle PACKAGECONFIG
384 #
385 # These take the form:
386 #
387 # PACKAGECONFIG ??= "<default options>"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500388 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500389 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
390 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500391 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
392 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500393
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500394 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395
396 def expandFilter(appends, extension, prefix):
397 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
398 newappends = []
399 for a in appends:
400 if a.endswith("-native") or ("-cross-" in a):
401 newappends.append(a)
402 elif a.startswith("virtual/"):
403 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500404 if subs.startswith(prefix):
405 newappends.append(a + extension)
406 else:
407 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500408 else:
409 if a.startswith(prefix):
410 newappends.append(a + extension)
411 else:
412 newappends.append(prefix + a + extension)
413 return newappends
414
415 def appendVar(varname, appends):
416 if not appends:
417 return
418 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500419 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500421 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500423 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424 appends = expandFilter(appends, "", mlprefix)
425 varname = d.expand(varname)
426 d.appendVar(varname, " " + " ".join(appends))
427
428 extradeps = []
429 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500430 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500431 extraconf = []
432 for flag, flagval in sorted(pkgconfigflags.items()):
433 items = flagval.split(",")
434 num = len(items)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500435 if num > 5:
436 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500438
439 if flag in pkgconfig:
440 if num >= 3 and items[2]:
441 extradeps.append(items[2])
442 if num >= 4 and items[3]:
443 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500444 if num >= 5 and items[4]:
445 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446 if num >= 1 and items[0]:
447 extraconf.append(items[0])
448 elif num >= 2 and items[1]:
449 extraconf.append(items[1])
450 appendVar('DEPENDS', extradeps)
451 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500452 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500453 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
454
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500455 pn = d.getVar('PN')
456 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400457 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
459
460 if bb.data.inherits_class('license', d):
461 check_license_format(d)
462 unmatched_license_flag = check_license_flags(d)
463 if unmatched_license_flag:
464 bb.debug(1, "Skipping %s because it has a restricted license not"
465 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400466 raise bb.parse.SkipRecipe("because it has a restricted license not"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500467 " whitelisted in LICENSE_FLAGS_WHITELIST")
468
469 # If we're building a target package we need to use fakeroot (pseudo)
470 # in order to capture permissions, owners, groups and special files
471 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
472 d.setVarFlag('do_unpack', 'umask', '022')
473 d.setVarFlag('do_configure', 'umask', '022')
474 d.setVarFlag('do_compile', 'umask', '022')
475 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500476 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500477 d.setVarFlag('do_install', 'umask', '022')
478 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500479 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500481 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500483 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500485
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500486 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500487 if need_machine:
488 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500489 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500490 for m in compat_machines:
491 if re.match(need_machine, m):
492 break
493 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400494 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500495
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600496 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500498 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 if need_host:
500 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500501 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400503 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500505 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506
507 check_license = False if pn.startswith("nativesdk-") else True
508 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600509 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
511 if pn.endswith(d.expand(t)):
512 check_license = False
513 if pn.startswith("gcc-source-"):
514 check_license = False
515
516 if check_license and bad_licenses:
517 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
518
519 whitelist = []
520 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 for lic in bad_licenses:
522 spdx_license = return_spdx(d, lic)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500523 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500524 whitelist.extend((d.getVar(w + lic) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 if spdx_license:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500526 whitelist.extend((d.getVar(w + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500528 We need to track what we are whitelisting and why. If pn is
529 incompatible we need to be able to note that the image that
530 is created may infact contain incompatible licenses despite
531 INCOMPATIBLE_LICENSE being set.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532 '''
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500533 incompatwl.extend((d.getVar(w + lic) or "").split())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500534 if spdx_license:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500535 incompatwl.extend((d.getVar(w + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536
537 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500538 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500539 skipped_pkgs = []
540 unskipped_pkgs = []
541 for pkg in pkgs:
542 if incompatible_license(d, bad_licenses, pkg):
543 skipped_pkgs.append(pkg)
544 else:
545 unskipped_pkgs.append(pkg)
546 all_skipped = skipped_pkgs and not unskipped_pkgs
547 if unskipped_pkgs:
548 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500549 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500550 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500551 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 for pkg in unskipped_pkgs:
553 bb.debug(1, "INCLUDING the package " + pkg)
554 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500555 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400556 raise bb.parse.SkipRecipe("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500557 elif pn in whitelist:
558 if pn in incompatwl:
559 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500560
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600561 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
562 # superset of all per-package licenses. We do not do advanced (pattern)
563 # matching of license expressions - just check that all license strings
564 # in LICENSE_<pkg> are found in LICENSE.
565 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500566 for pkg in d.getVar('PACKAGES').split():
567 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600568 if pkg_license:
569 unlisted = oe.license.list_licenses(pkg_license) - license_set
570 if unlisted:
571 bb.warn("LICENSE_%s includes licenses (%s) that are not "
572 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
573
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500574 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500575 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500576 for uri in srcuri.split():
577 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500579 # HTTP/FTP use the wget fetcher
580 if scheme in ("http", "https", "ftp"):
581 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500582
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500583 # Svn packages should DEPEND on subversion-native
584 if scheme == "svn":
585 needsrcrev = True
586 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500587
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500588 # Git packages should DEPEND on git-native
589 elif scheme in ("git", "gitsm"):
590 needsrcrev = True
591 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500592
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500593 # Mercurial packages should DEPEND on mercurial-native
594 elif scheme == "hg":
595 needsrcrev = True
596 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500597
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600598 # Perforce packages support SRCREV = "${AUTOREV}"
599 elif scheme == "p4":
600 needsrcrev = True
601
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500602 # OSC packages should DEPEND on osc-native
603 elif scheme == "osc":
604 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500605
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500606 elif scheme == "npm":
607 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500608
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500609 # *.lz4 should DEPEND on lz4-native for unpacking
610 if path.endswith('.lz4'):
611 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
612
613 # *.lz should DEPEND on lzip-native for unpacking
614 elif path.endswith('.lz'):
615 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
616
617 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500618 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500619 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
620
621 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500622 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500623 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
624
625 # file is needed by rpm2cpio.sh
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500626 elif path.endswith('.rpm'):
627 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500628
Brad Bishop316dfdd2018-06-25 12:45:53 -0400629 # *.deb should DEPEND on xz-native for unpacking
630 elif path.endswith('.deb'):
631 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
632
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500633 if needsrcrev:
634 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500635
636 set_packagetriplet(d)
637
638 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500639 mach_arch = d.getVar('MACHINE_ARCH')
640 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500641
642 if (pkg_arch == mach_arch):
643 # Already machine specific - nothing further to do
644 return
645
646 #
647 # We always try to scan SRC_URI for urls with machine overrides
648 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
649 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500650 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500651 if override != '0':
652 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500653 fpaths = (d.getVar('FILESPATH') or '').split(':')
654 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500655 for p in fpaths:
656 if os.path.basename(p) == machine and os.path.isdir(p):
657 paths.append(p)
658
659 if len(paths) != 0:
660 for s in srcuri.split():
661 if not s.startswith("file://"):
662 continue
663 fetcher = bb.fetch2.Fetch([s], d)
664 local = fetcher.localpath(s)
665 for mp in paths:
666 if local.startswith(mp):
667 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
668 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
669 return
670
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500671 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500673 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500674
675 # We could look for != PACKAGE_ARCH here but how to choose
676 # if multiple differences are present?
677 # Look through PACKAGE_ARCHS for the priority order?
678 if pkgarch and pkgarch == mach_arch:
679 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500680 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681}
682
683addtask cleansstate after do_clean
684python do_cleansstate() {
685 sstate_clean_cachefiles(d)
686}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500688do_cleansstate[nostamp] = "1"
689
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500691 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692 if len(src_uri) == 0:
693 return
694
695 try:
696 fetcher = bb.fetch2.Fetch(src_uri, d)
697 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600698 except bb.fetch2.BBFetchException as e:
699 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500700}
701do_cleanall[nostamp] = "1"
702
703
704EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package