blob: f0780011bd58d4b510cd0f18a9734ecd657a8b4b [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
13OE_IMPORTS += "os sys time oe.path oe.utils oe.data oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath"
14OE_IMPORTS[type] = "list"
15
16def oe_import(d):
17 import sys
18
19 bbpath = d.getVar("BBPATH", True).split(":")
20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21
22 def inject(name, value):
23 """Make a python object accessible from the metadata"""
24 if hasattr(bb.utils, "_context"):
25 bb.utils._context[name] = value
26 else:
27 __builtins__[name] = value
28
29 import oe.data
30 for toimport in oe.data.typed_value("OE_IMPORTS", d):
31 imported = __import__(toimport)
32 inject(toimport.split(".", 1)[0], imported)
33
34 return ""
35
36# We need the oe module name space early (before INHERITs get added)
37OE_IMPORTED := "${@oe_import(d)}"
38
39def lsb_distro_identifier(d):
40 adjust = d.getVar('LSB_DISTRO_ADJUST', True)
41 adjust_func = None
42 if adjust:
43 try:
44 adjust_func = globals()[adjust]
45 except KeyError:
46 pass
47 return oe.lsb.distro_identifier(adjust_func)
48
49die() {
50 bbfatal_log "$*"
51}
52
53oe_runmake_call() {
54 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
55 ${MAKE} ${EXTRA_OEMAKE} "$@"
56}
57
58oe_runmake() {
59 oe_runmake_call "$@" || die "oe_runmake failed"
60}
61
62
63def base_dep_prepend(d):
64 #
65 # Ideally this will check a flag so we will operate properly in
66 # the case where host == build == target, for now we don't work in
67 # that case though.
68 #
69
70 deps = ""
71 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
72 # we need that built is the responsibility of the patch function / class, not
73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
75 if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps
78
79BASEDEPENDS = "${@base_dep_prepend(d)}"
80
81DEPENDS_prepend="${BASEDEPENDS} "
82
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=)
86THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
87
88def extra_path_elements(d):
89 path = ""
90 elements = (d.getVar('EXTRANATIVEPATH', True) or "").split()
91 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path
94
95PATH_prepend = "${@extra_path_elements(d)}"
96
97def get_lic_checksum_file_list(d):
98 filelist = []
99 lic_files = d.getVar("LIC_FILES_CHKSUM", True) or ''
100 tmpdir = d.getVar("TMPDIR", True)
101
102 urls = lic_files.split()
103 for url in urls:
104 # We only care about items that are absolute paths since
105 # any others should be covered by SRC_URI.
106 try:
107 path = bb.fetch.decodeurl(url)[2]
108 if path[0] == '/':
109 if path.startswith(tmpdir):
110 continue
111 filelist.append(path + ":" + str(os.path.exists(path)))
112 except bb.fetch.MalformedUrl:
113 raise bb.build.FuncFailed(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
114 return " ".join(filelist)
115
116addtask fetch
117do_fetch[dirs] = "${DL_DIR}"
118do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
119do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
120do_fetch[vardeps] += "SRCREV"
121python base_do_fetch() {
122
123 src_uri = (d.getVar('SRC_URI', True) or "").split()
124 if len(src_uri) == 0:
125 return
126
127 try:
128 fetcher = bb.fetch2.Fetch(src_uri, d)
129 fetcher.download()
130 except bb.fetch2.BBFetchException as e:
131 raise bb.build.FuncFailed(e)
132}
133
134addtask unpack after do_fetch
135do_unpack[dirs] = "${WORKDIR}"
136python base_do_unpack() {
137 src_uri = (d.getVar('SRC_URI', True) or "").split()
138 if len(src_uri) == 0:
139 return
140
141 rootdir = d.getVar('WORKDIR', True)
142
143 # Ensure that we cleanup ${S}/patches
144 # TODO: Investigate if we can remove
145 # the entire ${S} in this case.
146 s_dir = d.getVar('S', True)
147 p_dir = os.path.join(s_dir, 'patches')
148 bb.utils.remove(p_dir, True)
149
150 try:
151 fetcher = bb.fetch2.Fetch(src_uri, d)
152 fetcher.unpack(rootdir)
153 except bb.fetch2.BBFetchException as e:
154 raise bb.build.FuncFailed(e)
155}
156
157def pkgarch_mapping(d):
158 # Compatibility mappings of TUNE_PKGARCH (opt in)
159 if d.getVar("PKGARCHCOMPAT_ARMV7A", True):
160 if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon":
161 d.setVar("TUNE_PKGARCH", "armv7a")
162
163def get_layers_branch_rev(d):
164 layers = (d.getVar("BBLAYERS", True) or "").split()
165 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
166 base_get_metadata_git_branch(i, None).strip(), \
167 base_get_metadata_git_revision(i, None)) \
168 for i in layers]
169 i = len(layers_branch_rev)-1
170 p1 = layers_branch_rev[i].find("=")
171 s1 = layers_branch_rev[i][p1:]
172 while i > 0:
173 p2 = layers_branch_rev[i-1].find("=")
174 s2= layers_branch_rev[i-1][p2:]
175 if s1 == s2:
176 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
177 i -= 1
178 else:
179 i -= 1
180 p1 = layers_branch_rev[i].find("=")
181 s1= layers_branch_rev[i][p1:]
182 return layers_branch_rev
183
184
185BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
186BUILDCFG_FUNCS[type] = "list"
187
188def buildcfg_vars(d):
189 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
190 for var in statusvars:
191 value = d.getVar(var, True)
192 if value is not None:
193 yield '%-17s = "%s"' % (var, value)
194
195def buildcfg_neededvars(d):
196 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
197 pesteruser = []
198 for v in needed_vars:
199 val = d.getVar(v, True)
200 if not val or val == 'INVALID':
201 pesteruser.append(v)
202
203 if pesteruser:
204 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
205
206addhandler base_eventhandler
207base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete"
208python base_eventhandler() {
209 import bb.runqueue
210
211 if isinstance(e, bb.event.ConfigParsed):
212 if not e.data.getVar("NATIVELSBSTRING", False):
213 e.data.setVar("NATIVELSBSTRING", lsb_distro_identifier(e.data))
214 e.data.setVar('BB_VERSION', bb.__version__)
215 pkgarch_mapping(e.data)
216 oe.utils.features_backfill("DISTRO_FEATURES", e.data)
217 oe.utils.features_backfill("MACHINE_FEATURES", e.data)
218
219 if isinstance(e, bb.event.BuildStarted):
220 localdata = bb.data.createCopy(e.data)
221 bb.data.update_data(localdata)
222 statuslines = []
223 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
224 g = globals()
225 if func not in g:
226 bb.warn("Build configuration function '%s' does not exist" % func)
227 else:
228 flines = g[func](localdata)
229 if flines:
230 statuslines.extend(flines)
231
232 statusheader = e.data.getVar('BUILDCFG_HEADER', True)
233 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
234
235 # This code is to silence warnings where the SDK variables overwrite the
236 # target ones and we'd see dulpicate key names overwriting each other
237 # for various PREFERRED_PROVIDERS
238 if isinstance(e, bb.event.RecipePreFinalise):
239 if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True):
240 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
241 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
242 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
243 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
244 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
245
246 if isinstance(e, bb.runqueue.sceneQueueComplete):
247 completions = e.data.expand("${STAGING_DIR}/sstatecompletions")
248 if os.path.exists(completions):
249 cmds = set()
250 with open(completions, "r") as f:
251 cmds = set(f)
252 e.data.setVar("completion_function", "\n".join(cmds))
253 e.data.setVarFlag("completion_function", "func", "1")
254 bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
255 bb.build.exec_func("completion_function", e.data)
256 os.remove(completions)
257}
258
259CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
260CLEANBROKEN = "0"
261
262addtask configure after do_patch
263do_configure[dirs] = "${B}"
264do_configure[deptask] = "do_populate_sysroot"
265base_do_configure() {
266 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
267 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
268 cd ${B}
269 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
270 oe_runmake clean
271 fi
272 find ${B} -ignore_readdir_race -name \*.la -delete
273 fi
274 fi
275 if [ -n "${CONFIGURESTAMPFILE}" ]; then
276 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
277 fi
278}
279
280addtask compile after do_configure
281do_compile[dirs] = "${B}"
282base_do_compile() {
283 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
284 oe_runmake || die "make failed"
285 else
286 bbnote "nothing to compile"
287 fi
288}
289
290addtask install after do_compile
291do_install[dirs] = "${D} ${B}"
292# Remove and re-create ${D} so that is it guaranteed to be empty
293do_install[cleandirs] = "${D}"
294
295base_do_install() {
296 :
297}
298
299base_do_package() {
300 :
301}
302
303addtask build after do_populate_sysroot
304do_build[noexec] = "1"
305do_build[recrdeptask] += "do_deploy"
306do_build () {
307 :
308}
309
310def set_packagetriplet(d):
311 archs = []
312 tos = []
313 tvs = []
314
315 archs.append(d.getVar("PACKAGE_ARCHS", True).split())
316 tos.append(d.getVar("TARGET_OS", True))
317 tvs.append(d.getVar("TARGET_VENDOR", True))
318
319 def settriplet(d, varname, archs, tos, tvs):
320 triplets = []
321 for i in range(len(archs)):
322 for arch in archs[i]:
323 triplets.append(arch + tvs[i] + "-" + tos[i])
324 triplets.reverse()
325 d.setVar(varname, " ".join(triplets))
326
327 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
328
329 variants = d.getVar("MULTILIB_VARIANTS", True) or ""
330 for item in variants.split():
331 localdata = bb.data.createCopy(d)
332 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
333 localdata.setVar("OVERRIDES", overrides)
334 bb.data.update_data(localdata)
335
336 archs.append(localdata.getVar("PACKAGE_ARCHS", True).split())
337 tos.append(localdata.getVar("TARGET_OS", True))
338 tvs.append(localdata.getVar("TARGET_VENDOR", True))
339
340 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
341
342python () {
343 import string, re
344
345 # Handle PACKAGECONFIG
346 #
347 # These take the form:
348 #
349 # PACKAGECONFIG ??= "<default options>"
350 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
351 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
352 if pkgconfigflags:
353 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split()
354 pn = d.getVar("PN", True)
355
356 mlprefix = d.getVar("MLPREFIX", True)
357
358 def expandFilter(appends, extension, prefix):
359 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
360 newappends = []
361 for a in appends:
362 if a.endswith("-native") or ("-cross-" in a):
363 newappends.append(a)
364 elif a.startswith("virtual/"):
365 subs = a.split("/", 1)[1]
366 newappends.append("virtual/" + prefix + subs + extension)
367 else:
368 if a.startswith(prefix):
369 newappends.append(a + extension)
370 else:
371 newappends.append(prefix + a + extension)
372 return newappends
373
374 def appendVar(varname, appends):
375 if not appends:
376 return
377 if varname.find("DEPENDS") != -1:
378 if pn.startswith("nativesdk-"):
379 appends = expandFilter(appends, "", "nativesdk-")
380 if pn.endswith("-native"):
381 appends = expandFilter(appends, "-native", "")
382 if mlprefix:
383 appends = expandFilter(appends, "", mlprefix)
384 varname = d.expand(varname)
385 d.appendVar(varname, " " + " ".join(appends))
386
387 extradeps = []
388 extrardeps = []
389 extraconf = []
390 for flag, flagval in sorted(pkgconfigflags.items()):
391 items = flagval.split(",")
392 num = len(items)
393 if num > 4:
394 bb.error("Only enable,disable,depend,rdepend can be specified!")
395
396 if flag in pkgconfig:
397 if num >= 3 and items[2]:
398 extradeps.append(items[2])
399 if num >= 4 and items[3]:
400 extrardeps.append(items[3])
401 if num >= 1 and items[0]:
402 extraconf.append(items[0])
403 elif num >= 2 and items[1]:
404 extraconf.append(items[1])
405 appendVar('DEPENDS', extradeps)
406 appendVar('RDEPENDS_${PN}', extrardeps)
407 if bb.data.inherits_class('cmake', d):
408 appendVar('EXTRA_OECMAKE', extraconf)
409 else:
410 appendVar('EXTRA_OECONF', extraconf)
411
412 pn = d.getVar('PN', True)
413 license = d.getVar('LICENSE', True)
414 if license == "INVALID":
415 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
416
417 if bb.data.inherits_class('license', d):
418 check_license_format(d)
419 unmatched_license_flag = check_license_flags(d)
420 if unmatched_license_flag:
421 bb.debug(1, "Skipping %s because it has a restricted license not"
422 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
423 raise bb.parse.SkipPackage("because it has a restricted license not"
424 " whitelisted in LICENSE_FLAGS_WHITELIST")
425
426 # If we're building a target package we need to use fakeroot (pseudo)
427 # in order to capture permissions, owners, groups and special files
428 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
429 d.setVarFlag('do_unpack', 'umask', '022')
430 d.setVarFlag('do_configure', 'umask', '022')
431 d.setVarFlag('do_compile', 'umask', '022')
432 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
433 d.setVarFlag('do_install', 'fakeroot', 1)
434 d.setVarFlag('do_install', 'umask', '022')
435 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
436 d.setVarFlag('do_package', 'fakeroot', 1)
437 d.setVarFlag('do_package', 'umask', '022')
438 d.setVarFlag('do_package_setscene', 'fakeroot', 1)
439 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
440 d.setVarFlag('do_devshell', 'fakeroot', 1)
441 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
442 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
443 if not source_mirror_fetch:
444 need_host = d.getVar('COMPATIBLE_HOST', True)
445 if need_host:
446 import re
447 this_host = d.getVar('HOST_SYS', True)
448 if not re.match(need_host, this_host):
449 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
450
451 need_machine = d.getVar('COMPATIBLE_MACHINE', True)
452 if need_machine:
453 import re
454 compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":")
455 for m in compat_machines:
456 if re.match(need_machine, m):
457 break
458 else:
459 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True))
460
461
462 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split()
463
464 check_license = False if pn.startswith("nativesdk-") else True
465 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
466 "-crosssdk-${SDK_ARCH}", "-crosssdk-initial-${SDK_ARCH}",
467 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
468 if pn.endswith(d.expand(t)):
469 check_license = False
470 if pn.startswith("gcc-source-"):
471 check_license = False
472
473 if check_license and bad_licenses:
474 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
475
476 whitelist = []
477 incompatwl = []
478 htincompatwl = []
479 for lic in bad_licenses:
480 spdx_license = return_spdx(d, lic)
481 for w in ["HOSTTOOLS_WHITELIST_", "LGPLv2_WHITELIST_", "WHITELIST_"]:
482 whitelist.extend((d.getVar(w + lic, True) or "").split())
483 if spdx_license:
484 whitelist.extend((d.getVar(w + spdx_license, True) or "").split())
485 '''
486 We need to track what we are whitelisting and why. If pn is
487 incompatible and is not HOSTTOOLS_WHITELIST_ we need to be
488 able to note that the image that is created may infact
489 contain incompatible licenses despite INCOMPATIBLE_LICENSE
490 being set.
491 '''
492 if "HOSTTOOLS" in w:
493 htincompatwl.extend((d.getVar(w + lic, True) or "").split())
494 if spdx_license:
495 htincompatwl.extend((d.getVar(w + spdx_license, True) or "").split())
496 else:
497 incompatwl.extend((d.getVar(w + lic, True) or "").split())
498 if spdx_license:
499 incompatwl.extend((d.getVar(w + spdx_license, True) or "").split())
500
501 if not pn in whitelist:
502 recipe_license = d.getVar('LICENSE', True)
503 pkgs = d.getVar('PACKAGES', True).split()
504 skipped_pkgs = []
505 unskipped_pkgs = []
506 for pkg in pkgs:
507 if incompatible_license(d, bad_licenses, pkg):
508 skipped_pkgs.append(pkg)
509 else:
510 unskipped_pkgs.append(pkg)
511 all_skipped = skipped_pkgs and not unskipped_pkgs
512 if unskipped_pkgs:
513 for pkg in skipped_pkgs:
514 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + recipe_license)
515 d.setVar('LICENSE_EXCLUSION-' + pkg, 1)
516 for pkg in unskipped_pkgs:
517 bb.debug(1, "INCLUDING the package " + pkg)
518 elif all_skipped or incompatible_license(d, bad_licenses):
519 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, recipe_license))
520 raise bb.parse.SkipPackage("incompatible with license %s" % recipe_license)
521 elif pn in whitelist:
522 if pn in incompatwl:
523 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
524 elif pn in htincompatwl:
525 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted for HOSTTOOLS")
526
527 srcuri = d.getVar('SRC_URI', True)
528 # Svn packages should DEPEND on subversion-native
529 if "svn://" in srcuri:
530 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
531
532 # Git packages should DEPEND on git-native
533 if "git://" in srcuri:
534 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
535
536 # Mercurial packages should DEPEND on mercurial-native
537 elif "hg://" in srcuri:
538 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
539
540 # OSC packages should DEPEND on osc-native
541 elif "osc://" in srcuri:
542 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
543
544 # *.lz4 should depends on lz4-native for unpacking
545 # Not endswith because of "*.patch.lz4;patch=1". Need bb.fetch.decodeurl in future
546 if '.lz4' in srcuri:
547 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
548
549 # *.xz should depends on xz-native for unpacking
550 # Not endswith because of "*.patch.xz;patch=1". Need bb.fetch.decodeurl in future
551 if '.xz' in srcuri:
552 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
553
554 # unzip-native should already be staged before unpacking ZIP recipes
555 if ".zip" in srcuri:
556 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
557
558 # file is needed by rpm2cpio.sh
559 if ".src.rpm" in srcuri:
560 d.appendVarFlag('do_unpack', 'depends', ' file-native:do_populate_sysroot')
561
562 set_packagetriplet(d)
563
564 # 'multimachine' handling
565 mach_arch = d.getVar('MACHINE_ARCH', True)
566 pkg_arch = d.getVar('PACKAGE_ARCH', True)
567
568 if (pkg_arch == mach_arch):
569 # Already machine specific - nothing further to do
570 return
571
572 #
573 # We always try to scan SRC_URI for urls with machine overrides
574 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
575 #
576 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True)
577 if override != '0':
578 paths = []
579 fpaths = (d.getVar('FILESPATH', True) or '').split(':')
580 machine = d.getVar('MACHINE', True)
581 for p in fpaths:
582 if os.path.basename(p) == machine and os.path.isdir(p):
583 paths.append(p)
584
585 if len(paths) != 0:
586 for s in srcuri.split():
587 if not s.startswith("file://"):
588 continue
589 fetcher = bb.fetch2.Fetch([s], d)
590 local = fetcher.localpath(s)
591 for mp in paths:
592 if local.startswith(mp):
593 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
594 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
595 return
596
597 packages = d.getVar('PACKAGES', True).split()
598 for pkg in packages:
599 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
600
601 # We could look for != PACKAGE_ARCH here but how to choose
602 # if multiple differences are present?
603 # Look through PACKAGE_ARCHS for the priority order?
604 if pkgarch and pkgarch == mach_arch:
605 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
606 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
607}
608
609addtask cleansstate after do_clean
610python do_cleansstate() {
611 sstate_clean_cachefiles(d)
612}
613
614addtask cleanall after do_cleansstate
615python do_cleanall() {
616 src_uri = (d.getVar('SRC_URI', True) or "").split()
617 if len(src_uri) == 0:
618 return
619
620 try:
621 fetcher = bb.fetch2.Fetch(src_uri, d)
622 fetcher.clean()
623 except bb.fetch2.BBFetchException, e:
624 raise bb.build.FuncFailed(e)
625}
626do_cleanall[nostamp] = "1"
627
628
629EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package