blob: 45f9435fd8ed718207b890ad4e6b394d3cb8323f [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop15ae2502019-06-18 21:44:24 -040013OE_EXTRA_IMPORTS ?= ""
14
15OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050016OE_IMPORTS[type] = "list"
17
Brad Bishopf3fd2882019-06-21 08:06:37 -040018PACKAGECONFIG_CONFARGS ??= ""
19
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020def oe_import(d):
21 import sys
22
Brad Bishop6e60e8b2018-02-01 10:27:11 -050023 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
25
26 def inject(name, value):
27 """Make a python object accessible from the metadata"""
28 if hasattr(bb.utils, "_context"):
29 bb.utils._context[name] = value
30 else:
31 __builtins__[name] = value
32
33 import oe.data
34 for toimport in oe.data.typed_value("OE_IMPORTS", d):
Brad Bishop00e122a2019-10-05 11:10:57 -040035 try:
36 imported = __import__(toimport)
37 inject(toimport.split(".", 1)[0], imported)
38 except AttributeError as e:
39 bb.error("Error importing OE modules: %s" % str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040 return ""
41
42# We need the oe module name space early (before INHERITs get added)
43OE_IMPORTED := "${@oe_import(d)}"
44
45def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050046 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047 adjust_func = None
48 if adjust:
49 try:
50 adjust_func = globals()[adjust]
51 except KeyError:
52 pass
53 return oe.lsb.distro_identifier(adjust_func)
54
55die() {
56 bbfatal_log "$*"
57}
58
59oe_runmake_call() {
60 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
61 ${MAKE} ${EXTRA_OEMAKE} "$@"
62}
63
64oe_runmake() {
65 oe_runmake_call "$@" || die "oe_runmake failed"
66}
67
68
69def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
71 return ""
72 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
Brad Bishopd7bf8c12018-02-25 22:55:05 -050074BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075
Brad Bishopd7bf8c12018-02-25 22:55:05 -050076BASEDEPENDS = ""
77BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
78BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079
80DEPENDS_prepend="${BASEDEPENDS} "
81
82FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
83# THISDIR only works properly with imediate expansion as it has to run
84# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050085THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086
87def extra_path_elements(d):
88 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050089 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090 for e in elements:
91 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
92 return path
93
94PATH_prepend = "${@extra_path_elements(d)}"
95
96def get_lic_checksum_file_list(d):
97 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050098 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
99 tmpdir = d.getVar("TMPDIR")
100 s = d.getVar("S")
101 b = d.getVar("B")
102 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104 urls = lic_files.split()
105 for url in urls:
106 # We only care about items that are absolute paths since
107 # any others should be covered by SRC_URI.
108 try:
Brad Bishop220d5532018-08-14 00:59:39 +0100109 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
110 if method != "file" or not path:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600111 raise bb.fetch.MalformedUrl(url)
112
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500114 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500115 continue
116 filelist.append(path + ":" + str(os.path.exists(path)))
117 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500118 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500119 return " ".join(filelist)
120
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500121def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
122 tools = d.getVar(toolsvar).split()
123 origbbenv = d.getVar("BB_ORIGENV", False)
124 path = origbbenv.getVar("PATH")
125 bb.utils.mkdirhier(dest)
126 notfound = []
127 for tool in tools:
128 desttool = os.path.join(dest, tool)
129 if not os.path.exists(desttool):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500130 # clean up dead symlink
131 if os.path.islink(desttool):
132 os.unlink(desttool)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500133 srctool = bb.utils.which(path, tool, executable=True)
Brad Bishop19323692019-04-05 15:28:33 -0400134 # gcc/g++ may link to ccache on some hosts, e.g.,
135 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
136 # would return /usr/local/bin/ccache/gcc, but what we need is
137 # /usr/bin/gcc, this code can check and fix that.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500138 if "ccache" in srctool:
139 srctool = bb.utils.which(path, tool, executable=True, direction=1)
140 if srctool:
141 os.symlink(srctool, desttool)
142 else:
143 notfound.append(tool)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800144
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500145 if notfound and fatal:
146 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
147
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500148addtask fetch
149do_fetch[dirs] = "${DL_DIR}"
150do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
151do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
152do_fetch[vardeps] += "SRCREV"
153python base_do_fetch() {
154
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500155 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 if len(src_uri) == 0:
157 return
158
159 try:
160 fetcher = bb.fetch2.Fetch(src_uri, d)
161 fetcher.download()
162 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600163 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164}
165
166addtask unpack after do_fetch
167do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600168
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800169do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400170
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500172 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 if len(src_uri) == 0:
174 return
175
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 try:
177 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500178 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600180 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181}
182
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500184 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500185 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500186 base_get_metadata_git_branch(i, None).strip(), \
187 base_get_metadata_git_revision(i, None)) \
188 for i in layers]
189 i = len(layers_branch_rev)-1
190 p1 = layers_branch_rev[i].find("=")
191 s1 = layers_branch_rev[i][p1:]
192 while i > 0:
193 p2 = layers_branch_rev[i-1].find("=")
194 s2= layers_branch_rev[i-1][p2:]
195 if s1 == s2:
196 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
197 i -= 1
198 else:
199 i -= 1
200 p1 = layers_branch_rev[i].find("=")
201 s1= layers_branch_rev[i][p1:]
202 return layers_branch_rev
203
204
205BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
206BUILDCFG_FUNCS[type] = "list"
207
208def buildcfg_vars(d):
209 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
210 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500211 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214
215def buildcfg_neededvars(d):
216 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
217 pesteruser = []
218 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500219 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220 if not val or val == 'INVALID':
221 pesteruser.append(v)
222
223 if pesteruser:
224 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
225
226addhandler base_eventhandler
Brad Bishop19323692019-04-05 15:28:33 -0400227base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500228python base_eventhandler() {
229 import bb.runqueue
230
231 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400232 if not d.getVar("NATIVELSBSTRING", False):
233 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
234 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop19323692019-04-05 15:28:33 -0400235
236 # There might be no bb.event.ConfigParsed event if bitbake server is
237 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
238 # exists.
239 if isinstance(e, bb.event.ConfigParsed) or \
240 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500241 # Works with the line in layer.conf which changes PATH to point here
242 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
243 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500244
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500245 if isinstance(e, bb.event.MultiConfigParsed):
246 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
247 # own contexts so the variables get expanded correctly for that arch, then inject back into
248 # the main data store.
249 deps = []
250 for config in e.mcdata:
251 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
252 deps = " ".join(deps)
253 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
254
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500255 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400256 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 statuslines = []
258 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
259 g = globals()
260 if func not in g:
261 bb.warn("Build configuration function '%s' does not exist" % func)
262 else:
263 flines = g[func](localdata)
264 if flines:
265 statuslines.extend(flines)
266
Brad Bishop316dfdd2018-06-25 12:45:53 -0400267 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500268 if statusheader:
269 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500270
271 # This code is to silence warnings where the SDK variables overwrite the
272 # target ones and we'd see dulpicate key names overwriting each other
273 # for various PREFERRED_PROVIDERS
274 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400275 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
276 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400277 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
278 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
279 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500280
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500281 if isinstance(e, bb.event.RecipeParsed):
282 #
283 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
284 # skip parsing for all the other providers which will mean they get uninstalled from the
285 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
286 # particular.
287 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500289 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
290 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500291 provs = (d.getVar("PROVIDES") or "").split()
292 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500293 for p in provs:
294 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500295 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500296 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400297 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500298}
299
300CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
301CLEANBROKEN = "0"
302
303addtask configure after do_patch
304do_configure[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500305base_do_configure() {
306 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
307 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
308 cd ${B}
309 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
310 oe_runmake clean
311 fi
Brad Bishopc4ea0752018-11-15 14:30:15 -0800312 # -ignore_readdir_race does not work correctly with -delete;
313 # use xargs to avoid spurious build failures
314 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 fi
316 fi
317 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
320 fi
321}
322
323addtask compile after do_configure
324do_compile[dirs] = "${B}"
325base_do_compile() {
326 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
327 oe_runmake || die "make failed"
328 else
329 bbnote "nothing to compile"
330 fi
331}
332
333addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600334do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500335# Remove and re-create ${D} so that is it guaranteed to be empty
336do_install[cleandirs] = "${D}"
337
338base_do_install() {
339 :
340}
341
342base_do_package() {
343 :
344}
345
346addtask build after do_populate_sysroot
347do_build[noexec] = "1"
348do_build[recrdeptask] += "do_deploy"
349do_build () {
350 :
351}
352
353def set_packagetriplet(d):
354 archs = []
355 tos = []
356 tvs = []
357
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500358 archs.append(d.getVar("PACKAGE_ARCHS").split())
359 tos.append(d.getVar("TARGET_OS"))
360 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500361
362 def settriplet(d, varname, archs, tos, tvs):
363 triplets = []
364 for i in range(len(archs)):
365 for arch in archs[i]:
366 triplets.append(arch + tvs[i] + "-" + tos[i])
367 triplets.reverse()
368 d.setVar(varname, " ".join(triplets))
369
370 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373 for item in variants.split():
374 localdata = bb.data.createCopy(d)
375 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
376 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
379 tos.append(localdata.getVar("TARGET_OS"))
380 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500381
382 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
383
384python () {
385 import string, re
386
Brad Bishop316dfdd2018-06-25 12:45:53 -0400387 # Handle backfilling
388 oe.utils.features_backfill("DISTRO_FEATURES", d)
389 oe.utils.features_backfill("MACHINE_FEATURES", d)
390
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 # Handle PACKAGECONFIG
392 #
393 # These take the form:
394 #
395 # PACKAGECONFIG ??= "<default options>"
Andrew Geissler82c905d2020-04-13 13:39:40 -0500396 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500397 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
398 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500399 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
400 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500402 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403
404 def expandFilter(appends, extension, prefix):
405 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
406 newappends = []
407 for a in appends:
408 if a.endswith("-native") or ("-cross-" in a):
409 newappends.append(a)
410 elif a.startswith("virtual/"):
411 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500412 if subs.startswith(prefix):
413 newappends.append(a + extension)
414 else:
415 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416 else:
417 if a.startswith(prefix):
418 newappends.append(a + extension)
419 else:
420 newappends.append(prefix + a + extension)
421 return newappends
422
423 def appendVar(varname, appends):
424 if not appends:
425 return
426 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500427 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500428 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500429 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500431 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 appends = expandFilter(appends, "", mlprefix)
433 varname = d.expand(varname)
434 d.appendVar(varname, " " + " ".join(appends))
435
436 extradeps = []
437 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500438 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 extraconf = []
440 for flag, flagval in sorted(pkgconfigflags.items()):
441 items = flagval.split(",")
442 num = len(items)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500443 if num > 6:
444 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500445 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446
447 if flag in pkgconfig:
448 if num >= 3 and items[2]:
449 extradeps.append(items[2])
450 if num >= 4 and items[3]:
451 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500452 if num >= 5 and items[4]:
453 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500454 if num >= 1 and items[0]:
455 extraconf.append(items[0])
456 elif num >= 2 and items[1]:
457 extraconf.append(items[1])
Andrew Geissler82c905d2020-04-13 13:39:40 -0500458
459 if num >= 6 and items[5]:
460 conflicts = set(items[5].split())
461 invalid = conflicts.difference(set(pkgconfigflags.keys()))
462 if invalid:
463 bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
464 % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
465
466 if flag in pkgconfig:
467 intersec = conflicts.intersection(set(pkgconfig))
468 if intersec:
469 bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
470 % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
471
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500472 appendVar('DEPENDS', extradeps)
473 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500474 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500475 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
476
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500477 pn = d.getVar('PN')
478 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400479 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
481
482 if bb.data.inherits_class('license', d):
483 check_license_format(d)
Brad Bishop19323692019-04-05 15:28:33 -0400484 unmatched_license_flags = check_license_flags(d)
485 if unmatched_license_flags:
486 if len(unmatched_license_flags) == 1:
487 message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
488 else:
489 message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
490 ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
491 bb.debug(1, "Skipping %s %s" % (pn, message))
492 raise bb.parse.SkipRecipe(message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493
494 # If we're building a target package we need to use fakeroot (pseudo)
495 # in order to capture permissions, owners, groups and special files
496 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
Brad Bishop64c979e2019-11-04 13:55:29 -0500497 d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 d.setVarFlag('do_unpack', 'umask', '022')
499 d.setVarFlag('do_configure', 'umask', '022')
500 d.setVarFlag('do_compile', 'umask', '022')
501 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500502 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 d.setVarFlag('do_install', 'umask', '022')
504 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500505 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500507 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500509 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500511
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500512 need_machine = d.getVar('COMPATIBLE_MACHINE')
Andrew Geissler82c905d2020-04-13 13:39:40 -0500513 if need_machine and not d.getVar('PARSE_ALL_RECIPES', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500514 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500515 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500516 for m in compat_machines:
517 if re.match(need_machine, m):
518 break
519 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400520 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500521
Andrew Geissler82c905d2020-04-13 13:39:40 -0500522 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500523 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500524 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 if need_host:
526 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500527 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400529 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500531 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532
533 check_license = False if pn.startswith("nativesdk-") else True
534 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600535 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
537 if pn.endswith(d.expand(t)):
538 check_license = False
539 if pn.startswith("gcc-source-"):
540 check_license = False
541
542 if check_license and bad_licenses:
543 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
544
545 whitelist = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 for lic in bad_licenses:
547 spdx_license = return_spdx(d, lic)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800548 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
549 if spdx_license:
550 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
Andrew Geissler82c905d2020-04-13 13:39:40 -0500551
552 if pn in whitelist:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800553 '''
554 We need to track what we are whitelisting and why. If pn is
555 incompatible we need to be able to note that the image that
556 is created may infact contain incompatible licenses despite
557 INCOMPATIBLE_LICENSE being set.
558 '''
Andrew Geissler82c905d2020-04-13 13:39:40 -0500559 bb.note("Including %s as buildable despite it having an incompatible license because it has been whitelisted" % pn)
560 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500561 pkgs = d.getVar('PACKAGES').split()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500562 skipped_pkgs = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500563 unskipped_pkgs = []
564 for pkg in pkgs:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500565 incompatible_lic = incompatible_license(d, bad_licenses, pkg)
566 if incompatible_lic:
567 skipped_pkgs[pkg] = incompatible_lic
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500568 else:
569 unskipped_pkgs.append(pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570 if unskipped_pkgs:
571 for pkg in skipped_pkgs:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500572 bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500573 mlprefix = d.getVar('MLPREFIX')
Andrew Geissler82c905d2020-04-13 13:39:40 -0500574 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, ' '.join(skipped_pkgs[pkg]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575 for pkg in unskipped_pkgs:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500576 bb.debug(1, "Including the package %s" % pkg)
577 else:
578 incompatible_lic = incompatible_license(d, bad_licenses)
579 for pkg in skipped_pkgs:
580 incompatible_lic += skipped_pkgs[pkg]
581 incompatible_lic = sorted(list(set(incompatible_lic)))
582
583 if incompatible_lic:
584 bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
585 raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500586
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600587 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
588 # superset of all per-package licenses. We do not do advanced (pattern)
589 # matching of license expressions - just check that all license strings
590 # in LICENSE_<pkg> are found in LICENSE.
591 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500592 for pkg in d.getVar('PACKAGES').split():
593 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600594 if pkg_license:
595 unlisted = oe.license.list_licenses(pkg_license) - license_set
596 if unlisted:
597 bb.warn("LICENSE_%s includes licenses (%s) that are not "
598 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
599
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500600 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500601 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500602 for uri in srcuri.split():
603 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500605 # HTTP/FTP use the wget fetcher
606 if scheme in ("http", "https", "ftp"):
607 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500608
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500609 # Svn packages should DEPEND on subversion-native
610 if scheme == "svn":
611 needsrcrev = True
612 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500614 # Git packages should DEPEND on git-native
615 elif scheme in ("git", "gitsm"):
616 needsrcrev = True
617 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500619 # Mercurial packages should DEPEND on mercurial-native
620 elif scheme == "hg":
621 needsrcrev = True
Andrew Geissler82c905d2020-04-13 13:39:40 -0500622 d.appendVar("EXTRANATIVEPATH", ' python3-native ')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500623 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600625 # Perforce packages support SRCREV = "${AUTOREV}"
626 elif scheme == "p4":
627 needsrcrev = True
628
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500629 # OSC packages should DEPEND on osc-native
630 elif scheme == "osc":
631 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500632
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500633 elif scheme == "npm":
634 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500635
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500636 # *.lz4 should DEPEND on lz4-native for unpacking
637 if path.endswith('.lz4'):
638 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
639
640 # *.lz should DEPEND on lzip-native for unpacking
641 elif path.endswith('.lz'):
642 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
643
644 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500645 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500646 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
647
648 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500649 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500650 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
651
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800652 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500653 elif path.endswith('.rpm'):
654 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500655
Brad Bishop316dfdd2018-06-25 12:45:53 -0400656 # *.deb should DEPEND on xz-native for unpacking
657 elif path.endswith('.deb'):
658 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
659
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500660 if needsrcrev:
661 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662
Brad Bishop15ae2502019-06-18 21:44:24 -0400663 # Gather all named SRCREVs to add to the sstate hash calculation
664 # This anonymous python snippet is called multiple times so we
665 # need to be careful to not double up the appends here and cause
666 # the base hash to mismatch the task hash
667 for uri in srcuri.split():
668 parm = bb.fetch.decodeurl(uri)[5]
669 uri_names = parm.get("name", "").split(",")
670 for uri_name in filter(None, uri_names):
671 srcrev_name = "SRCREV_{}".format(uri_name)
672 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
673 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
674
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675 set_packagetriplet(d)
676
677 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500678 mach_arch = d.getVar('MACHINE_ARCH')
679 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500680
681 if (pkg_arch == mach_arch):
682 # Already machine specific - nothing further to do
683 return
684
685 #
686 # We always try to scan SRC_URI for urls with machine overrides
687 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
688 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500689 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690 if override != '0':
691 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500692 fpaths = (d.getVar('FILESPATH') or '').split(':')
693 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694 for p in fpaths:
695 if os.path.basename(p) == machine and os.path.isdir(p):
696 paths.append(p)
697
698 if len(paths) != 0:
699 for s in srcuri.split():
700 if not s.startswith("file://"):
701 continue
702 fetcher = bb.fetch2.Fetch([s], d)
703 local = fetcher.localpath(s)
704 for mp in paths:
705 if local.startswith(mp):
706 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
707 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
708 return
709
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500710 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500712 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500713
714 # We could look for != PACKAGE_ARCH here but how to choose
715 # if multiple differences are present?
716 # Look through PACKAGE_ARCHS for the priority order?
717 if pkgarch and pkgarch == mach_arch:
718 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500719 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720}
721
722addtask cleansstate after do_clean
723python do_cleansstate() {
724 sstate_clean_cachefiles(d)
725}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500726addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500727do_cleansstate[nostamp] = "1"
728
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500729python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500730 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500731 if len(src_uri) == 0:
732 return
733
734 try:
735 fetcher = bb.fetch2.Fetch(src_uri, d)
736 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 except bb.fetch2.BBFetchException as e:
738 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739}
740do_cleanall[nostamp] = "1"
741
742
743EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package