blob: bc9b236b840e7dcb7b633a9db7bcab6a9b1ab8c6 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop6e60e8b2018-02-01 10:27:11 -050013OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050014OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
Brad Bishop6e60e8b2018-02-01 10:27:11 -050019 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050040 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050064 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
65 return ""
66 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067
Brad Bishopd7bf8c12018-02-25 22:55:05 -050068BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070BASEDEPENDS = ""
71BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
72BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
74DEPENDS_prepend="${BASEDEPENDS} "
75
76FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
77# THISDIR only works properly with imediate expansion as it has to run
78# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050079THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080
81def extra_path_elements(d):
82 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050083 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084 for e in elements:
85 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
86 return path
87
88PATH_prepend = "${@extra_path_elements(d)}"
89
90def get_lic_checksum_file_list(d):
91 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050092 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
93 tmpdir = d.getVar("TMPDIR")
94 s = d.getVar("S")
95 b = d.getVar("B")
96 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 urls = lic_files.split()
99 for url in urls:
100 # We only care about items that are absolute paths since
101 # any others should be covered by SRC_URI.
102 try:
Brad Bishop220d5532018-08-14 00:59:39 +0100103 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
104 if method != "file" or not path:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600105 raise bb.fetch.MalformedUrl(url)
106
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500108 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500109 continue
110 filelist.append(path + ":" + str(os.path.exists(path)))
111 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500112 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 return " ".join(filelist)
114
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500115def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
116 tools = d.getVar(toolsvar).split()
117 origbbenv = d.getVar("BB_ORIGENV", False)
118 path = origbbenv.getVar("PATH")
119 bb.utils.mkdirhier(dest)
120 notfound = []
121 for tool in tools:
122 desttool = os.path.join(dest, tool)
123 if not os.path.exists(desttool):
124 srctool = bb.utils.which(path, tool, executable=True)
125 if "ccache" in srctool:
126 srctool = bb.utils.which(path, tool, executable=True, direction=1)
127 if srctool:
128 os.symlink(srctool, desttool)
129 else:
130 notfound.append(tool)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800131 # Force "python" -> "python2"
132 desttool = os.path.join(dest, "python")
133 if not os.path.exists(desttool):
134 srctool = "python2"
135 os.symlink(srctool, desttool)
136
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500137 if notfound and fatal:
138 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
139
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500140addtask fetch
141do_fetch[dirs] = "${DL_DIR}"
142do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
143do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
144do_fetch[vardeps] += "SRCREV"
145python base_do_fetch() {
146
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500147 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500148 if len(src_uri) == 0:
149 return
150
151 try:
152 fetcher = bb.fetch2.Fetch(src_uri, d)
153 fetcher.download()
154 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600155 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156}
157
158addtask unpack after do_fetch
159do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600160
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800161do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400162
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500164 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500165 if len(src_uri) == 0:
166 return
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 try:
169 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500170 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600172 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173}
174
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500176 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500177 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 base_get_metadata_git_branch(i, None).strip(), \
179 base_get_metadata_git_revision(i, None)) \
180 for i in layers]
181 i = len(layers_branch_rev)-1
182 p1 = layers_branch_rev[i].find("=")
183 s1 = layers_branch_rev[i][p1:]
184 while i > 0:
185 p2 = layers_branch_rev[i-1].find("=")
186 s2= layers_branch_rev[i-1][p2:]
187 if s1 == s2:
188 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
189 i -= 1
190 else:
191 i -= 1
192 p1 = layers_branch_rev[i].find("=")
193 s1= layers_branch_rev[i][p1:]
194 return layers_branch_rev
195
196
197BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
198BUILDCFG_FUNCS[type] = "list"
199
200def buildcfg_vars(d):
201 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
202 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500203 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500205 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206
207def buildcfg_neededvars(d):
208 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
209 pesteruser = []
210 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500211 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 if not val or val == 'INVALID':
213 pesteruser.append(v)
214
215 if pesteruser:
216 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
217
218addhandler base_eventhandler
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500219base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220python base_eventhandler() {
221 import bb.runqueue
222
223 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400224 if not d.getVar("NATIVELSBSTRING", False):
225 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
226 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500227 # Works with the line in layer.conf which changes PATH to point here
228 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
229 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500230
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500231 if isinstance(e, bb.event.MultiConfigParsed):
232 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
233 # own contexts so the variables get expanded correctly for that arch, then inject back into
234 # the main data store.
235 deps = []
236 for config in e.mcdata:
237 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
238 deps = " ".join(deps)
239 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
240
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500241 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400242 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500243 statuslines = []
244 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
245 g = globals()
246 if func not in g:
247 bb.warn("Build configuration function '%s' does not exist" % func)
248 else:
249 flines = g[func](localdata)
250 if flines:
251 statuslines.extend(flines)
252
Brad Bishop316dfdd2018-06-25 12:45:53 -0400253 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500254 if statusheader:
255 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500256
257 # This code is to silence warnings where the SDK variables overwrite the
258 # target ones and we'd see dulpicate key names overwriting each other
259 # for various PREFERRED_PROVIDERS
260 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400261 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
262 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
263 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
264 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
265 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
266 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500267
268 if isinstance(e, bb.runqueue.sceneQueueComplete):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400269 completions = d.expand("${STAGING_DIR}/sstatecompletions")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500270 if os.path.exists(completions):
271 cmds = set()
272 with open(completions, "r") as f:
273 cmds = set(f)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400274 d.setVar("completion_function", "\n".join(cmds))
275 d.setVarFlag("completion_function", "func", "1")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500276 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400277 bb.build.exec_func("completion_function", d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 os.remove(completions)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500279
280 if isinstance(e, bb.event.RecipeParsed):
281 #
282 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
283 # skip parsing for all the other providers which will mean they get uninstalled from the
284 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
285 # particular.
286 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500287 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500288 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
289 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500290 provs = (d.getVar("PROVIDES") or "").split()
291 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500292 for p in provs:
293 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500294 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500295 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400296 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500297}
298
299CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
300CLEANBROKEN = "0"
301
302addtask configure after do_patch
303do_configure[dirs] = "${B}"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500304do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500305base_do_configure() {
306 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
307 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
308 cd ${B}
309 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
310 oe_runmake clean
311 fi
Brad Bishopc4ea0752018-11-15 14:30:15 -0800312 # -ignore_readdir_race does not work correctly with -delete;
313 # use xargs to avoid spurious build failures
314 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 fi
316 fi
317 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
320 fi
321}
322
323addtask compile after do_configure
324do_compile[dirs] = "${B}"
325base_do_compile() {
326 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
327 oe_runmake || die "make failed"
328 else
329 bbnote "nothing to compile"
330 fi
331}
332
333addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600334do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500335# Remove and re-create ${D} so that is it guaranteed to be empty
336do_install[cleandirs] = "${D}"
337
338base_do_install() {
339 :
340}
341
342base_do_package() {
343 :
344}
345
346addtask build after do_populate_sysroot
347do_build[noexec] = "1"
348do_build[recrdeptask] += "do_deploy"
349do_build () {
350 :
351}
352
353def set_packagetriplet(d):
354 archs = []
355 tos = []
356 tvs = []
357
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500358 archs.append(d.getVar("PACKAGE_ARCHS").split())
359 tos.append(d.getVar("TARGET_OS"))
360 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500361
362 def settriplet(d, varname, archs, tos, tvs):
363 triplets = []
364 for i in range(len(archs)):
365 for arch in archs[i]:
366 triplets.append(arch + tvs[i] + "-" + tos[i])
367 triplets.reverse()
368 d.setVar(varname, " ".join(triplets))
369
370 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373 for item in variants.split():
374 localdata = bb.data.createCopy(d)
375 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
376 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
379 tos.append(localdata.getVar("TARGET_OS"))
380 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500381
382 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
383
384python () {
385 import string, re
386
Brad Bishop316dfdd2018-06-25 12:45:53 -0400387 # Handle backfilling
388 oe.utils.features_backfill("DISTRO_FEATURES", d)
389 oe.utils.features_backfill("MACHINE_FEATURES", d)
390
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 # Handle PACKAGECONFIG
392 #
393 # These take the form:
394 #
395 # PACKAGECONFIG ??= "<default options>"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500396 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500397 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
398 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500399 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
400 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500402 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403
404 def expandFilter(appends, extension, prefix):
405 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
406 newappends = []
407 for a in appends:
408 if a.endswith("-native") or ("-cross-" in a):
409 newappends.append(a)
410 elif a.startswith("virtual/"):
411 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500412 if subs.startswith(prefix):
413 newappends.append(a + extension)
414 else:
415 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416 else:
417 if a.startswith(prefix):
418 newappends.append(a + extension)
419 else:
420 newappends.append(prefix + a + extension)
421 return newappends
422
423 def appendVar(varname, appends):
424 if not appends:
425 return
426 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500427 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500428 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500429 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500431 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 appends = expandFilter(appends, "", mlprefix)
433 varname = d.expand(varname)
434 d.appendVar(varname, " " + " ".join(appends))
435
436 extradeps = []
437 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500438 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 extraconf = []
440 for flag, flagval in sorted(pkgconfigflags.items()):
441 items = flagval.split(",")
442 num = len(items)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500443 if num > 5:
444 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500445 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446
447 if flag in pkgconfig:
448 if num >= 3 and items[2]:
449 extradeps.append(items[2])
450 if num >= 4 and items[3]:
451 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500452 if num >= 5 and items[4]:
453 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500454 if num >= 1 and items[0]:
455 extraconf.append(items[0])
456 elif num >= 2 and items[1]:
457 extraconf.append(items[1])
458 appendVar('DEPENDS', extradeps)
459 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500460 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500461 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
462
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500463 pn = d.getVar('PN')
464 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400465 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
467
468 if bb.data.inherits_class('license', d):
469 check_license_format(d)
470 unmatched_license_flag = check_license_flags(d)
471 if unmatched_license_flag:
472 bb.debug(1, "Skipping %s because it has a restricted license not"
473 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400474 raise bb.parse.SkipRecipe("because it has a restricted license not"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 " whitelisted in LICENSE_FLAGS_WHITELIST")
476
477 # If we're building a target package we need to use fakeroot (pseudo)
478 # in order to capture permissions, owners, groups and special files
479 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
480 d.setVarFlag('do_unpack', 'umask', '022')
481 d.setVarFlag('do_configure', 'umask', '022')
482 d.setVarFlag('do_compile', 'umask', '022')
483 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500484 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500485 d.setVarFlag('do_install', 'umask', '022')
486 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500487 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500489 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500490 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500491 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500492 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500493
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500494 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500495 if need_machine:
496 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500497 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500498 for m in compat_machines:
499 if re.match(need_machine, m):
500 break
501 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400502 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500503
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600504 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500506 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 if need_host:
508 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500509 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400511 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500512
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500513 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514
515 check_license = False if pn.startswith("nativesdk-") else True
516 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600517 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500518 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
519 if pn.endswith(d.expand(t)):
520 check_license = False
521 if pn.startswith("gcc-source-"):
522 check_license = False
523
524 if check_license and bad_licenses:
525 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
526
527 whitelist = []
528 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529 for lic in bad_licenses:
530 spdx_license = return_spdx(d, lic)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800531 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
532 if spdx_license:
533 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
534 '''
535 We need to track what we are whitelisting and why. If pn is
536 incompatible we need to be able to note that the image that
537 is created may infact contain incompatible licenses despite
538 INCOMPATIBLE_LICENSE being set.
539 '''
540 incompatwl.extend((d.getVar("WHITELIST_" + lic) or "").split())
541 if spdx_license:
542 incompatwl.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500543
544 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500545 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 skipped_pkgs = []
547 unskipped_pkgs = []
548 for pkg in pkgs:
549 if incompatible_license(d, bad_licenses, pkg):
550 skipped_pkgs.append(pkg)
551 else:
552 unskipped_pkgs.append(pkg)
553 all_skipped = skipped_pkgs and not unskipped_pkgs
554 if unskipped_pkgs:
555 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500556 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500557 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500558 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500559 for pkg in unskipped_pkgs:
560 bb.debug(1, "INCLUDING the package " + pkg)
561 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500562 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400563 raise bb.parse.SkipRecipe("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564 elif pn in whitelist:
565 if pn in incompatwl:
566 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500567
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600568 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
569 # superset of all per-package licenses. We do not do advanced (pattern)
570 # matching of license expressions - just check that all license strings
571 # in LICENSE_<pkg> are found in LICENSE.
572 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500573 for pkg in d.getVar('PACKAGES').split():
574 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600575 if pkg_license:
576 unlisted = oe.license.list_licenses(pkg_license) - license_set
577 if unlisted:
578 bb.warn("LICENSE_%s includes licenses (%s) that are not "
579 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
580
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500581 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500582 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500583 for uri in srcuri.split():
584 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500585
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500586 # HTTP/FTP use the wget fetcher
587 if scheme in ("http", "https", "ftp"):
588 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500589
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500590 # Svn packages should DEPEND on subversion-native
591 if scheme == "svn":
592 needsrcrev = True
593 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500595 # Git packages should DEPEND on git-native
596 elif scheme in ("git", "gitsm"):
597 needsrcrev = True
598 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500600 # Mercurial packages should DEPEND on mercurial-native
601 elif scheme == "hg":
602 needsrcrev = True
603 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600605 # Perforce packages support SRCREV = "${AUTOREV}"
606 elif scheme == "p4":
607 needsrcrev = True
608
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500609 # OSC packages should DEPEND on osc-native
610 elif scheme == "osc":
611 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500612
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500613 elif scheme == "npm":
614 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500615
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500616 # *.lz4 should DEPEND on lz4-native for unpacking
617 if path.endswith('.lz4'):
618 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
619
620 # *.lz should DEPEND on lzip-native for unpacking
621 elif path.endswith('.lz'):
622 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
623
624 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500625 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500626 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
627
628 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500629 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500630 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
631
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800632 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500633 elif path.endswith('.rpm'):
634 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500635
Brad Bishop316dfdd2018-06-25 12:45:53 -0400636 # *.deb should DEPEND on xz-native for unpacking
637 elif path.endswith('.deb'):
638 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
639
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500640 if needsrcrev:
641 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642
643 set_packagetriplet(d)
644
645 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500646 mach_arch = d.getVar('MACHINE_ARCH')
647 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648
649 if (pkg_arch == mach_arch):
650 # Already machine specific - nothing further to do
651 return
652
653 #
654 # We always try to scan SRC_URI for urls with machine overrides
655 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
656 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500657 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500658 if override != '0':
659 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500660 fpaths = (d.getVar('FILESPATH') or '').split(':')
661 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662 for p in fpaths:
663 if os.path.basename(p) == machine and os.path.isdir(p):
664 paths.append(p)
665
666 if len(paths) != 0:
667 for s in srcuri.split():
668 if not s.startswith("file://"):
669 continue
670 fetcher = bb.fetch2.Fetch([s], d)
671 local = fetcher.localpath(s)
672 for mp in paths:
673 if local.startswith(mp):
674 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
675 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
676 return
677
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500678 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500680 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681
682 # We could look for != PACKAGE_ARCH here but how to choose
683 # if multiple differences are present?
684 # Look through PACKAGE_ARCHS for the priority order?
685 if pkgarch and pkgarch == mach_arch:
686 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500687 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500688}
689
690addtask cleansstate after do_clean
691python do_cleansstate() {
692 sstate_clean_cachefiles(d)
693}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500695do_cleansstate[nostamp] = "1"
696
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500697python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500698 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500699 if len(src_uri) == 0:
700 return
701
702 try:
703 fetcher = bb.fetch2.Fetch(src_uri, d)
704 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600705 except bb.fetch2.BBFetchException as e:
706 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500707}
708do_cleanall[nostamp] = "1"
709
710
711EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package