blob: bd0d6e3ca60c561006431d04384e464b47e8dae7 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop6e60e8b2018-02-01 10:27:11 -050013OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050014OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
Brad Bishop6e60e8b2018-02-01 10:27:11 -050019 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050040 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050064 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
65 return ""
66 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067
Brad Bishopd7bf8c12018-02-25 22:55:05 -050068BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070BASEDEPENDS = ""
71BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
72BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
74DEPENDS_prepend="${BASEDEPENDS} "
75
76FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
77# THISDIR only works properly with imediate expansion as it has to run
78# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050079THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080
81def extra_path_elements(d):
82 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050083 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084 for e in elements:
85 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
86 return path
87
88PATH_prepend = "${@extra_path_elements(d)}"
89
90def get_lic_checksum_file_list(d):
91 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050092 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
93 tmpdir = d.getVar("TMPDIR")
94 s = d.getVar("S")
95 b = d.getVar("B")
96 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 urls = lic_files.split()
99 for url in urls:
100 # We only care about items that are absolute paths since
101 # any others should be covered by SRC_URI.
102 try:
103 path = bb.fetch.decodeurl(url)[2]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600104 if not path:
105 raise bb.fetch.MalformedUrl(url)
106
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500108 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500109 continue
110 filelist.append(path + ":" + str(os.path.exists(path)))
111 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500112 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 return " ".join(filelist)
114
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500115def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
116 tools = d.getVar(toolsvar).split()
117 origbbenv = d.getVar("BB_ORIGENV", False)
118 path = origbbenv.getVar("PATH")
119 bb.utils.mkdirhier(dest)
120 notfound = []
121 for tool in tools:
122 desttool = os.path.join(dest, tool)
123 if not os.path.exists(desttool):
124 srctool = bb.utils.which(path, tool, executable=True)
125 if "ccache" in srctool:
126 srctool = bb.utils.which(path, tool, executable=True, direction=1)
127 if srctool:
128 os.symlink(srctool, desttool)
129 else:
130 notfound.append(tool)
131 if notfound and fatal:
132 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
133
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500134addtask fetch
135do_fetch[dirs] = "${DL_DIR}"
136do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
137do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
138do_fetch[vardeps] += "SRCREV"
139python base_do_fetch() {
140
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500141 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142 if len(src_uri) == 0:
143 return
144
145 try:
146 fetcher = bb.fetch2.Fetch(src_uri, d)
147 fetcher.download()
148 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600149 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500150}
151
152addtask unpack after do_fetch
153do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154
155python () {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500156 if d.getVar('S') != d.getVar('WORKDIR'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600157 d.setVarFlag('do_unpack', 'cleandirs', '${S}')
158 else:
159 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
160}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500161python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500162 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163 if len(src_uri) == 0:
164 return
165
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500166 try:
167 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500168 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600170 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171}
172
173def pkgarch_mapping(d):
174 # Compatibility mappings of TUNE_PKGARCH (opt in)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500175 if d.getVar("PKGARCHCOMPAT_ARMV7A"):
176 if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 d.setVar("TUNE_PKGARCH", "armv7a")
178
179def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500180 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500181 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500182 base_get_metadata_git_branch(i, None).strip(), \
183 base_get_metadata_git_revision(i, None)) \
184 for i in layers]
185 i = len(layers_branch_rev)-1
186 p1 = layers_branch_rev[i].find("=")
187 s1 = layers_branch_rev[i][p1:]
188 while i > 0:
189 p2 = layers_branch_rev[i-1].find("=")
190 s2= layers_branch_rev[i-1][p2:]
191 if s1 == s2:
192 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
193 i -= 1
194 else:
195 i -= 1
196 p1 = layers_branch_rev[i].find("=")
197 s1= layers_branch_rev[i][p1:]
198 return layers_branch_rev
199
200
201BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
202BUILDCFG_FUNCS[type] = "list"
203
204def buildcfg_vars(d):
205 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
206 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500207 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210
211def buildcfg_neededvars(d):
212 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
213 pesteruser = []
214 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500215 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216 if not val or val == 'INVALID':
217 pesteruser.append(v)
218
219 if pesteruser:
220 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
221
222addhandler base_eventhandler
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500223base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500224python base_eventhandler() {
225 import bb.runqueue
226
227 if isinstance(e, bb.event.ConfigParsed):
228 if not e.data.getVar("NATIVELSBSTRING", False):
229 e.data.setVar("NATIVELSBSTRING", lsb_distro_identifier(e.data))
230 e.data.setVar('BB_VERSION', bb.__version__)
231 pkgarch_mapping(e.data)
232 oe.utils.features_backfill("DISTRO_FEATURES", e.data)
233 oe.utils.features_backfill("MACHINE_FEATURES", e.data)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500234 # Works with the line in layer.conf which changes PATH to point here
235 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
236 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500237
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500238 if isinstance(e, bb.event.MultiConfigParsed):
239 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
240 # own contexts so the variables get expanded correctly for that arch, then inject back into
241 # the main data store.
242 deps = []
243 for config in e.mcdata:
244 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
245 deps = " ".join(deps)
246 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
247
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500248 if isinstance(e, bb.event.BuildStarted):
249 localdata = bb.data.createCopy(e.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 statuslines = []
251 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
252 g = globals()
253 if func not in g:
254 bb.warn("Build configuration function '%s' does not exist" % func)
255 else:
256 flines = g[func](localdata)
257 if flines:
258 statuslines.extend(flines)
259
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500260 statusheader = e.data.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500261 if statusheader:
262 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500263
264 # This code is to silence warnings where the SDK variables overwrite the
265 # target ones and we'd see dulpicate key names overwriting each other
266 # for various PREFERRED_PROVIDERS
267 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500268 if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500269 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
270 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
271 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
272 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
273 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
274
275 if isinstance(e, bb.runqueue.sceneQueueComplete):
276 completions = e.data.expand("${STAGING_DIR}/sstatecompletions")
277 if os.path.exists(completions):
278 cmds = set()
279 with open(completions, "r") as f:
280 cmds = set(f)
281 e.data.setVar("completion_function", "\n".join(cmds))
282 e.data.setVarFlag("completion_function", "func", "1")
283 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
284 bb.build.exec_func("completion_function", e.data)
285 os.remove(completions)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500286
287 if isinstance(e, bb.event.RecipeParsed):
288 #
289 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
290 # skip parsing for all the other providers which will mean they get uninstalled from the
291 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
292 # particular.
293 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500294 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500295 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
296 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500297 provs = (d.getVar("PROVIDES") or "").split()
298 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500299 for p in provs:
300 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500301 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500302 if profprov and pn != profprov:
303 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500304}
305
306CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
307CLEANBROKEN = "0"
308
309addtask configure after do_patch
310do_configure[dirs] = "${B}"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500311do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312base_do_configure() {
313 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
314 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
315 cd ${B}
316 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
317 oe_runmake clean
318 fi
319 find ${B} -ignore_readdir_race -name \*.la -delete
320 fi
321 fi
322 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500323 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500324 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
325 fi
326}
327
328addtask compile after do_configure
329do_compile[dirs] = "${B}"
330base_do_compile() {
331 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
332 oe_runmake || die "make failed"
333 else
334 bbnote "nothing to compile"
335 fi
336}
337
338addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600339do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500340# Remove and re-create ${D} so that is it guaranteed to be empty
341do_install[cleandirs] = "${D}"
342
343base_do_install() {
344 :
345}
346
347base_do_package() {
348 :
349}
350
351addtask build after do_populate_sysroot
352do_build[noexec] = "1"
353do_build[recrdeptask] += "do_deploy"
354do_build () {
355 :
356}
357
358def set_packagetriplet(d):
359 archs = []
360 tos = []
361 tvs = []
362
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500363 archs.append(d.getVar("PACKAGE_ARCHS").split())
364 tos.append(d.getVar("TARGET_OS"))
365 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366
367 def settriplet(d, varname, archs, tos, tvs):
368 triplets = []
369 for i in range(len(archs)):
370 for arch in archs[i]:
371 triplets.append(arch + tvs[i] + "-" + tos[i])
372 triplets.reverse()
373 d.setVar(varname, " ".join(triplets))
374
375 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
376
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500377 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378 for item in variants.split():
379 localdata = bb.data.createCopy(d)
380 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
381 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500382
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500383 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
384 tos.append(localdata.getVar("TARGET_OS"))
385 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
387 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
388
389python () {
390 import string, re
391
392 # Handle PACKAGECONFIG
393 #
394 # These take the form:
395 #
396 # PACKAGECONFIG ??= "<default options>"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500397 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
399 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500400 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
401 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500402
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500403 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404
405 def expandFilter(appends, extension, prefix):
406 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
407 newappends = []
408 for a in appends:
409 if a.endswith("-native") or ("-cross-" in a):
410 newappends.append(a)
411 elif a.startswith("virtual/"):
412 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500413 if subs.startswith(prefix):
414 newappends.append(a + extension)
415 else:
416 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417 else:
418 if a.startswith(prefix):
419 newappends.append(a + extension)
420 else:
421 newappends.append(prefix + a + extension)
422 return newappends
423
424 def appendVar(varname, appends):
425 if not appends:
426 return
427 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500428 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500430 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500431 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500432 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433 appends = expandFilter(appends, "", mlprefix)
434 varname = d.expand(varname)
435 d.appendVar(varname, " " + " ".join(appends))
436
437 extradeps = []
438 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500439 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 extraconf = []
441 for flag, flagval in sorted(pkgconfigflags.items()):
442 items = flagval.split(",")
443 num = len(items)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500444 if num > 5:
445 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500446 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500447
448 if flag in pkgconfig:
449 if num >= 3 and items[2]:
450 extradeps.append(items[2])
451 if num >= 4 and items[3]:
452 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500453 if num >= 5 and items[4]:
454 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 if num >= 1 and items[0]:
456 extraconf.append(items[0])
457 elif num >= 2 and items[1]:
458 extraconf.append(items[1])
459 appendVar('DEPENDS', extradeps)
460 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500461 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500462 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
463
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500464 pn = d.getVar('PN')
465 license = d.getVar('LICENSE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 if license == "INVALID":
467 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
468
469 if bb.data.inherits_class('license', d):
470 check_license_format(d)
471 unmatched_license_flag = check_license_flags(d)
472 if unmatched_license_flag:
473 bb.debug(1, "Skipping %s because it has a restricted license not"
474 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
475 raise bb.parse.SkipPackage("because it has a restricted license not"
476 " whitelisted in LICENSE_FLAGS_WHITELIST")
477
478 # If we're building a target package we need to use fakeroot (pseudo)
479 # in order to capture permissions, owners, groups and special files
480 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
481 d.setVarFlag('do_unpack', 'umask', '022')
482 d.setVarFlag('do_configure', 'umask', '022')
483 d.setVarFlag('do_compile', 'umask', '022')
484 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500485 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 d.setVarFlag('do_install', 'umask', '022')
487 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500488 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500490 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500491 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500494
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500495 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500496 if need_machine:
497 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500498 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500499 for m in compat_machines:
500 if re.match(need_machine, m):
501 break
502 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500503 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500504
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500507 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 if need_host:
509 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500510 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500511 if not re.match(need_host, this_host):
512 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
513
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500514 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515
516 check_license = False if pn.startswith("nativesdk-") else True
517 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600518 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
520 if pn.endswith(d.expand(t)):
521 check_license = False
522 if pn.startswith("gcc-source-"):
523 check_license = False
524
525 if check_license and bad_licenses:
526 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
527
528 whitelist = []
529 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 for lic in bad_licenses:
531 spdx_license = return_spdx(d, lic)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500532 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500533 whitelist.extend((d.getVar(w + lic) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500534 if spdx_license:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500535 whitelist.extend((d.getVar(w + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 '''
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500537 We need to track what we are whitelisting and why. If pn is
538 incompatible we need to be able to note that the image that
539 is created may infact contain incompatible licenses despite
540 INCOMPATIBLE_LICENSE being set.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541 '''
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500542 incompatwl.extend((d.getVar(w + lic) or "").split())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500543 if spdx_license:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500544 incompatwl.extend((d.getVar(w + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545
546 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500547 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548 skipped_pkgs = []
549 unskipped_pkgs = []
550 for pkg in pkgs:
551 if incompatible_license(d, bad_licenses, pkg):
552 skipped_pkgs.append(pkg)
553 else:
554 unskipped_pkgs.append(pkg)
555 all_skipped = skipped_pkgs and not unskipped_pkgs
556 if unskipped_pkgs:
557 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500558 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500559 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500560 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561 for pkg in unskipped_pkgs:
562 bb.debug(1, "INCLUDING the package " + pkg)
563 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500564 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500565 raise bb.parse.SkipPackage("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500566 elif pn in whitelist:
567 if pn in incompatwl:
568 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500569
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600570 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
571 # superset of all per-package licenses. We do not do advanced (pattern)
572 # matching of license expressions - just check that all license strings
573 # in LICENSE_<pkg> are found in LICENSE.
574 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500575 for pkg in d.getVar('PACKAGES').split():
576 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600577 if pkg_license:
578 unlisted = oe.license.list_licenses(pkg_license) - license_set
579 if unlisted:
580 bb.warn("LICENSE_%s includes licenses (%s) that are not "
581 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
582
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500583 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500584 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500585 for uri in srcuri.split():
586 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500587
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500588 # HTTP/FTP use the wget fetcher
589 if scheme in ("http", "https", "ftp"):
590 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500591
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500592 # Svn packages should DEPEND on subversion-native
593 if scheme == "svn":
594 needsrcrev = True
595 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500596
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500597 # Git packages should DEPEND on git-native
598 elif scheme in ("git", "gitsm"):
599 needsrcrev = True
600 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500601
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500602 # Mercurial packages should DEPEND on mercurial-native
603 elif scheme == "hg":
604 needsrcrev = True
605 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500606
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600607 # Perforce packages support SRCREV = "${AUTOREV}"
608 elif scheme == "p4":
609 needsrcrev = True
610
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500611 # OSC packages should DEPEND on osc-native
612 elif scheme == "osc":
613 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500614
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500615 elif scheme == "npm":
616 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500617
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500618 # *.lz4 should DEPEND on lz4-native for unpacking
619 if path.endswith('.lz4'):
620 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
621
622 # *.lz should DEPEND on lzip-native for unpacking
623 elif path.endswith('.lz'):
624 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
625
626 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500627 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500628 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
629
630 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500631 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500632 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
633
634 # file is needed by rpm2cpio.sh
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500635 elif path.endswith('.rpm'):
636 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500637
638 if needsrcrev:
639 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640
641 set_packagetriplet(d)
642
643 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500644 mach_arch = d.getVar('MACHINE_ARCH')
645 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500646
647 if (pkg_arch == mach_arch):
648 # Already machine specific - nothing further to do
649 return
650
651 #
652 # We always try to scan SRC_URI for urls with machine overrides
653 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
654 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500655 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656 if override != '0':
657 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500658 fpaths = (d.getVar('FILESPATH') or '').split(':')
659 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500660 for p in fpaths:
661 if os.path.basename(p) == machine and os.path.isdir(p):
662 paths.append(p)
663
664 if len(paths) != 0:
665 for s in srcuri.split():
666 if not s.startswith("file://"):
667 continue
668 fetcher = bb.fetch2.Fetch([s], d)
669 local = fetcher.localpath(s)
670 for mp in paths:
671 if local.startswith(mp):
672 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
673 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
674 return
675
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500676 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500678 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679
680 # We could look for != PACKAGE_ARCH here but how to choose
681 # if multiple differences are present?
682 # Look through PACKAGE_ARCHS for the priority order?
683 if pkgarch and pkgarch == mach_arch:
684 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500685 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500686}
687
688addtask cleansstate after do_clean
689python do_cleansstate() {
690 sstate_clean_cachefiles(d)
691}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500693do_cleansstate[nostamp] = "1"
694
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500695python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500696 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500697 if len(src_uri) == 0:
698 return
699
700 try:
701 fetcher = bb.fetch2.Fetch(src_uri, d)
702 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 except bb.fetch2.BBFetchException as e:
704 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500705}
706do_cleanall[nostamp] = "1"
707
708
709EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package