| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # | 
 | 2 | # Copyright OpenEmbedded Contributors | 
 | 3 | # | 
 | 4 | # SPDX-License-Identifier: MIT | 
 | 5 | # | 
 | 6 |  | 
 | 7 | BB_DEFAULT_TASK ?= "build" | 
 | 8 | CLASSOVERRIDE ?= "class-target" | 
 | 9 |  | 
 | 10 | inherit patch | 
 | 11 | inherit staging | 
 | 12 |  | 
 | 13 | inherit mirrors | 
 | 14 | inherit utils | 
 | 15 | inherit utility-tasks | 
 | 16 | inherit logging | 
 | 17 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 18 | PACKAGECONFIG_CONFARGS ??= "" | 
 | 19 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 20 | inherit metadata_scm | 
 | 21 |  | 
 | 22 | def lsb_distro_identifier(d): | 
 | 23 |     adjust = d.getVar('LSB_DISTRO_ADJUST') | 
 | 24 |     adjust_func = None | 
 | 25 |     if adjust: | 
 | 26 |         try: | 
 | 27 |             adjust_func = globals()[adjust] | 
 | 28 |         except KeyError: | 
 | 29 |             pass | 
 | 30 |     return oe.lsb.distro_identifier(adjust_func) | 
 | 31 |  | 
 | 32 | die() { | 
 | 33 | 	bbfatal_log "$*" | 
 | 34 | } | 
 | 35 |  | 
 | 36 | oe_runmake_call() { | 
 | 37 | 	bbnote ${MAKE} ${EXTRA_OEMAKE} "$@" | 
 | 38 | 	${MAKE} ${EXTRA_OEMAKE} "$@" | 
 | 39 | } | 
 | 40 |  | 
 | 41 | oe_runmake() { | 
 | 42 | 	oe_runmake_call "$@" || die "oe_runmake failed" | 
 | 43 | } | 
 | 44 |  | 
 | 45 |  | 
 | 46 | def get_base_dep(d): | 
 | 47 |     if d.getVar('INHIBIT_DEFAULT_DEPS', False): | 
 | 48 |         return "" | 
 | 49 |     return "${BASE_DEFAULT_DEPS}" | 
 | 50 |  | 
 | 51 | BASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc" | 
 | 52 |  | 
 | 53 | BASEDEPENDS = "" | 
 | 54 | BASEDEPENDS:class-target = "${@get_base_dep(d)}" | 
 | 55 | BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" | 
 | 56 |  | 
 | 57 | DEPENDS:prepend="${BASEDEPENDS} " | 
 | 58 |  | 
 | 59 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" | 
 | 60 | # THISDIR only works properly with imediate expansion as it has to run | 
 | 61 | # in the context of the location its used (:=) | 
 | 62 | THISDIR = "${@os.path.dirname(d.getVar('FILE'))}" | 
 | 63 |  | 
 | 64 | def extra_path_elements(d): | 
 | 65 |     path = "" | 
 | 66 |     elements = (d.getVar('EXTRANATIVEPATH') or "").split() | 
 | 67 |     for e in elements: | 
 | 68 |         path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" | 
 | 69 |     return path | 
 | 70 |  | 
 | 71 | PATH:prepend = "${@extra_path_elements(d)}" | 
 | 72 |  | 
 | 73 | def get_lic_checksum_file_list(d): | 
 | 74 |     filelist = [] | 
 | 75 |     lic_files = d.getVar("LIC_FILES_CHKSUM") or '' | 
 | 76 |     tmpdir = d.getVar("TMPDIR") | 
 | 77 |     s = d.getVar("S") | 
 | 78 |     b = d.getVar("B") | 
 | 79 |     workdir = d.getVar("WORKDIR") | 
 | 80 |  | 
 | 81 |     urls = lic_files.split() | 
 | 82 |     for url in urls: | 
 | 83 |         # We only care about items that are absolute paths since | 
 | 84 |         # any others should be covered by SRC_URI. | 
 | 85 |         try: | 
 | 86 |             (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | 
 | 87 |             if method != "file" or not path: | 
 | 88 |                 raise bb.fetch.MalformedUrl(url) | 
 | 89 |  | 
 | 90 |             if path[0] == '/': | 
 | 91 |                 if path.startswith((tmpdir, s, b, workdir)): | 
 | 92 |                     continue | 
 | 93 |                 filelist.append(path + ":" + str(os.path.exists(path))) | 
 | 94 |         except bb.fetch.MalformedUrl: | 
 | 95 |             bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) | 
 | 96 |     return " ".join(filelist) | 
 | 97 |  | 
 | 98 | def setup_hosttools_dir(dest, toolsvar, d, fatal=True): | 
 | 99 |     tools = d.getVar(toolsvar).split() | 
 | 100 |     origbbenv = d.getVar("BB_ORIGENV", False) | 
 | 101 |     path = origbbenv.getVar("PATH") | 
 | 102 |     # Need to ignore our own scripts directories to avoid circular links | 
 | 103 |     for p in path.split(":"): | 
 | 104 |         if p.endswith("/scripts"): | 
 | 105 |             path = path.replace(p, "/ignoreme") | 
 | 106 |     bb.utils.mkdirhier(dest) | 
 | 107 |     notfound = [] | 
 | 108 |     for tool in tools: | 
 | 109 |         desttool = os.path.join(dest, tool) | 
 | 110 |         if not os.path.exists(desttool): | 
 | 111 |             # clean up dead symlink | 
 | 112 |             if os.path.islink(desttool): | 
 | 113 |                 os.unlink(desttool) | 
 | 114 |             srctool = bb.utils.which(path, tool, executable=True) | 
 | 115 |             # gcc/g++ may link to ccache on some hosts, e.g., | 
 | 116 |             # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc) | 
 | 117 |             # would return /usr/local/bin/ccache/gcc, but what we need is | 
 | 118 |             # /usr/bin/gcc, this code can check and fix that. | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 119 |             if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache': | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 120 |                 srctool = bb.utils.which(path, tool, executable=True, direction=1) | 
 | 121 |             if srctool: | 
 | 122 |                 os.symlink(srctool, desttool) | 
 | 123 |             else: | 
 | 124 |                 notfound.append(tool) | 
 | 125 |  | 
 | 126 |     if notfound and fatal: | 
 | 127 |         bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n  %s" % " ".join(notfound)) | 
 | 128 |  | 
 | 129 | addtask fetch | 
 | 130 | do_fetch[dirs] = "${DL_DIR}" | 
 | 131 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" | 
 | 132 | do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" | 
 | 133 | do_fetch[vardeps] += "SRCREV" | 
 | 134 | do_fetch[network] = "1" | 
 | 135 | python base_do_fetch() { | 
 | 136 |  | 
 | 137 |     src_uri = (d.getVar('SRC_URI') or "").split() | 
 | 138 |     if not src_uri: | 
 | 139 |         return | 
 | 140 |  | 
 | 141 |     try: | 
 | 142 |         fetcher = bb.fetch2.Fetch(src_uri, d) | 
 | 143 |         fetcher.download() | 
 | 144 |     except bb.fetch2.BBFetchException as e: | 
 | 145 |         bb.fatal("Bitbake Fetcher Error: " + repr(e)) | 
 | 146 | } | 
 | 147 |  | 
 | 148 | addtask unpack after do_fetch | 
 | 149 | do_unpack[dirs] = "${WORKDIR}" | 
 | 150 |  | 
 | 151 | do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}" | 
 | 152 |  | 
 | 153 | python base_do_unpack() { | 
 | 154 |     src_uri = (d.getVar('SRC_URI') or "").split() | 
 | 155 |     if not src_uri: | 
 | 156 |         return | 
 | 157 |  | 
 | 158 |     try: | 
 | 159 |         fetcher = bb.fetch2.Fetch(src_uri, d) | 
 | 160 |         fetcher.unpack(d.getVar('WORKDIR')) | 
 | 161 |     except bb.fetch2.BBFetchException as e: | 
 | 162 |         bb.fatal("Bitbake Fetcher Error: " + repr(e)) | 
 | 163 | } | 
 | 164 |  | 
 | 165 | SSTATETASKS += "do_deploy_source_date_epoch" | 
 | 166 |  | 
 | 167 | do_deploy_source_date_epoch () { | 
 | 168 |     mkdir -p ${SDE_DEPLOYDIR} | 
 | 169 |     if [ -e ${SDE_FILE} ]; then | 
 | 170 |         echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}." | 
 | 171 |         cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt | 
 | 172 |     else | 
 | 173 |         echo "${SDE_FILE} not found!" | 
 | 174 |     fi | 
 | 175 | } | 
 | 176 |  | 
 | 177 | python do_deploy_source_date_epoch_setscene () { | 
 | 178 |     sstate_setscene(d) | 
 | 179 |     bb.utils.mkdirhier(d.getVar('SDE_DIR')) | 
 | 180 |     sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt') | 
 | 181 |     if os.path.exists(sde_file): | 
 | 182 |         target = d.getVar('SDE_FILE') | 
 | 183 |         bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target)) | 
 | 184 |         bb.utils.rename(sde_file, target) | 
 | 185 |     else: | 
 | 186 |         bb.debug(1, "%s not found!" % sde_file) | 
 | 187 | } | 
 | 188 |  | 
 | 189 | do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}" | 
 | 190 | do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}" | 
 | 191 | addtask do_deploy_source_date_epoch_setscene | 
 | 192 | addtask do_deploy_source_date_epoch before do_configure after do_patch | 
 | 193 |  | 
 | 194 | python create_source_date_epoch_stamp() { | 
 | 195 |     # Version: 1 | 
 | 196 |     source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) | 
 | 197 |     oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) | 
 | 198 | } | 
 | 199 | do_unpack[postfuncs] += "create_source_date_epoch_stamp" | 
 | 200 |  | 
 | 201 | def get_source_date_epoch_value(d): | 
 | 202 |     return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d) | 
 | 203 |  | 
 | 204 | def get_layers_branch_rev(d): | 
 | 205 |     revisions = oe.buildcfg.get_layer_revisions(d) | 
 | 206 |     layers_branch_rev = ["%-20s = \"%s:%s\"" % (r[1], r[2], r[3]) for r in revisions] | 
 | 207 |     i = len(layers_branch_rev)-1 | 
 | 208 |     p1 = layers_branch_rev[i].find("=") | 
 | 209 |     s1 = layers_branch_rev[i][p1:] | 
 | 210 |     while i > 0: | 
 | 211 |         p2 = layers_branch_rev[i-1].find("=") | 
 | 212 |         s2= layers_branch_rev[i-1][p2:] | 
 | 213 |         if s1 == s2: | 
 | 214 |             layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] | 
 | 215 |             i -= 1 | 
 | 216 |         else: | 
 | 217 |             i -= 1 | 
 | 218 |             p1 = layers_branch_rev[i].find("=") | 
 | 219 |             s1= layers_branch_rev[i][p1:] | 
 | 220 |     return layers_branch_rev | 
 | 221 |  | 
 | 222 |  | 
 | 223 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" | 
 | 224 | BUILDCFG_FUNCS[type] = "list" | 
 | 225 |  | 
 | 226 | def buildcfg_vars(d): | 
 | 227 |     statusvars = oe.data.typed_value('BUILDCFG_VARS', d) | 
 | 228 |     for var in statusvars: | 
 | 229 |         value = d.getVar(var) | 
 | 230 |         if value is not None: | 
 | 231 |             yield '%-20s = "%s"' % (var, value) | 
 | 232 |  | 
 | 233 | def buildcfg_neededvars(d): | 
 | 234 |     needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) | 
 | 235 |     pesteruser = [] | 
 | 236 |     for v in needed_vars: | 
 | 237 |         val = d.getVar(v) | 
 | 238 |         if not val or val == 'INVALID': | 
 | 239 |             pesteruser.append(v) | 
 | 240 |  | 
 | 241 |     if pesteruser: | 
 | 242 |         bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | 
 | 243 |  | 
 | 244 | addhandler base_eventhandler | 
 | 245 | base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed" | 
 | 246 | python base_eventhandler() { | 
 | 247 |     import bb.runqueue | 
 | 248 |  | 
 | 249 |     if isinstance(e, bb.event.ConfigParsed): | 
 | 250 |         if not d.getVar("NATIVELSBSTRING", False): | 
 | 251 |             d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) | 
 | 252 |         d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False)) | 
 | 253 |         d.setVar('BB_VERSION', bb.__version__) | 
 | 254 |  | 
 | 255 |     # There might be no bb.event.ConfigParsed event if bitbake server is | 
 | 256 |     # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR} | 
 | 257 |     # exists. | 
 | 258 |     if isinstance(e, bb.event.ConfigParsed) or \ | 
 | 259 |             (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))): | 
 | 260 |         # Works with the line in layer.conf which changes PATH to point here | 
 | 261 |         setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d) | 
 | 262 |         setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False) | 
 | 263 |  | 
 | 264 |     if isinstance(e, bb.event.MultiConfigParsed): | 
 | 265 |         # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores | 
 | 266 |         # own contexts so the variables get expanded correctly for that arch, then inject back into | 
 | 267 |         # the main data store. | 
 | 268 |         deps = [] | 
 | 269 |         for config in e.mcdata: | 
 | 270 |             deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS")) | 
 | 271 |         deps = " ".join(deps) | 
 | 272 |         e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps) | 
 | 273 |  | 
 | 274 |     if isinstance(e, bb.event.BuildStarted): | 
 | 275 |         localdata = bb.data.createCopy(d) | 
 | 276 |         statuslines = [] | 
 | 277 |         for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): | 
 | 278 |             g = globals() | 
 | 279 |             if func not in g: | 
 | 280 |                 bb.warn("Build configuration function '%s' does not exist" % func) | 
 | 281 |             else: | 
 | 282 |                 flines = g[func](localdata) | 
 | 283 |                 if flines: | 
 | 284 |                     statuslines.extend(flines) | 
 | 285 |  | 
 | 286 |         statusheader = d.getVar('BUILDCFG_HEADER') | 
 | 287 |         if statusheader: | 
 | 288 |             bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 
 | 289 |  | 
 | 290 |     # This code is to silence warnings where the SDK variables overwrite the  | 
| Andrew Geissler | fc113ea | 2023-03-31 09:59:46 -0500 | [diff] [blame] | 291 |     # target ones and we'd see duplicate key names overwriting each other | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 292 |     # for various PREFERRED_PROVIDERS | 
 | 293 |     if isinstance(e, bb.event.RecipePreFinalise): | 
 | 294 |         if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"): | 
 | 295 |             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") | 
 | 296 |             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") | 
 | 297 |             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++") | 
 | 298 |             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs") | 
 | 299 |  | 
 | 300 |     if isinstance(e, bb.event.RecipeParsed): | 
 | 301 |         # | 
 | 302 |         # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set | 
 | 303 |         # skip parsing for all the other providers which will mean they get uninstalled from the | 
 | 304 |         # sysroot since they're now "unreachable". This makes switching virtual/kernel work in  | 
 | 305 |         # particular. | 
 | 306 |         # | 
 | 307 |         pn = d.getVar('PN') | 
 | 308 |         source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) | 
 | 309 |         if not source_mirror_fetch: | 
 | 310 |             provs = (d.getVar("PROVIDES") or "").split() | 
 | 311 |             multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() | 
 | 312 |             for p in provs: | 
 | 313 |                 if p.startswith("virtual/") and p not in multiprovidersallowed: | 
 | 314 |                     profprov = d.getVar("PREFERRED_PROVIDER_" + p) | 
 | 315 |                     if profprov and pn != profprov: | 
 | 316 |                         raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) | 
 | 317 | } | 
 | 318 |  | 
 | 319 | CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate" | 
 | 320 | CLEANBROKEN = "0" | 
 | 321 |  | 
 | 322 | addtask configure after do_patch | 
 | 323 | do_configure[dirs] = "${B}" | 
 | 324 | base_do_configure() { | 
 | 325 | 	if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then | 
 | 326 | 		if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then | 
 | 327 | 			cd ${B} | 
 | 328 | 			if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then | 
 | 329 | 				oe_runmake clean | 
 | 330 | 			fi | 
 | 331 | 			# -ignore_readdir_race does not work correctly with -delete; | 
 | 332 | 			# use xargs to avoid spurious build failures | 
 | 333 | 			find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f | 
 | 334 | 		fi | 
 | 335 | 	fi | 
 | 336 | 	if [ -n "${CONFIGURESTAMPFILE}" ]; then | 
 | 337 | 		mkdir -p `dirname ${CONFIGURESTAMPFILE}` | 
 | 338 | 		echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE} | 
 | 339 | 	fi | 
 | 340 | } | 
 | 341 |  | 
 | 342 | addtask compile after do_configure | 
 | 343 | do_compile[dirs] = "${B}" | 
 | 344 | base_do_compile() { | 
 | 345 | 	if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then | 
 | 346 | 		oe_runmake || die "make failed" | 
 | 347 | 	else | 
 | 348 | 		bbnote "nothing to compile" | 
 | 349 | 	fi | 
 | 350 | } | 
 | 351 |  | 
 | 352 | addtask install after do_compile | 
 | 353 | do_install[dirs] = "${B}" | 
| Andrew Geissler | fc113ea | 2023-03-31 09:59:46 -0500 | [diff] [blame] | 354 | # Remove and re-create ${D} so that it is guaranteed to be empty | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 355 | do_install[cleandirs] = "${D}" | 
 | 356 |  | 
 | 357 | base_do_install() { | 
 | 358 | 	: | 
 | 359 | } | 
 | 360 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 361 | addtask build after do_populate_sysroot | 
 | 362 | do_build[noexec] = "1" | 
 | 363 | do_build[recrdeptask] += "do_deploy" | 
 | 364 | do_build () { | 
 | 365 | 	: | 
 | 366 | } | 
 | 367 |  | 
 | 368 | def set_packagetriplet(d): | 
 | 369 |     archs = [] | 
 | 370 |     tos = [] | 
 | 371 |     tvs = [] | 
 | 372 |  | 
 | 373 |     archs.append(d.getVar("PACKAGE_ARCHS").split()) | 
 | 374 |     tos.append(d.getVar("TARGET_OS")) | 
 | 375 |     tvs.append(d.getVar("TARGET_VENDOR")) | 
 | 376 |  | 
 | 377 |     def settriplet(d, varname, archs, tos, tvs): | 
 | 378 |         triplets = [] | 
 | 379 |         for i in range(len(archs)): | 
 | 380 |             for arch in archs[i]: | 
 | 381 |                 triplets.append(arch + tvs[i] + "-" + tos[i]) | 
 | 382 |         triplets.reverse() | 
 | 383 |         d.setVar(varname, " ".join(triplets)) | 
 | 384 |  | 
 | 385 |     settriplet(d, "PKGTRIPLETS", archs, tos, tvs) | 
 | 386 |  | 
 | 387 |     variants = d.getVar("MULTILIB_VARIANTS") or "" | 
 | 388 |     for item in variants.split(): | 
 | 389 |         localdata = bb.data.createCopy(d) | 
 | 390 |         overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item | 
 | 391 |         localdata.setVar("OVERRIDES", overrides) | 
 | 392 |  | 
 | 393 |         archs.append(localdata.getVar("PACKAGE_ARCHS").split()) | 
 | 394 |         tos.append(localdata.getVar("TARGET_OS")) | 
 | 395 |         tvs.append(localdata.getVar("TARGET_VENDOR")) | 
 | 396 |  | 
 | 397 |     settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) | 
 | 398 |  | 
 | 399 | python () { | 
 | 400 |     import string, re | 
 | 401 |  | 
 | 402 |     # Handle backfilling | 
 | 403 |     oe.utils.features_backfill("DISTRO_FEATURES", d) | 
 | 404 |     oe.utils.features_backfill("MACHINE_FEATURES", d) | 
 | 405 |  | 
 | 406 |     if d.getVar("S")[-1] == '/': | 
 | 407 |         bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | 
 | 408 |     if d.getVar("B")[-1] == '/': | 
 | 409 |         bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | 
 | 410 |  | 
 | 411 |     if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")): | 
 | 412 |         d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}") | 
 | 413 |     if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")): | 
 | 414 |         d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}") | 
 | 415 |  | 
 | 416 |     # To add a recipe to the skip list , set: | 
 | 417 |     #   SKIP_RECIPE[pn] = "message" | 
 | 418 |     pn = d.getVar('PN') | 
 | 419 |     skip_msg = d.getVarFlag('SKIP_RECIPE', pn) | 
 | 420 |     if skip_msg: | 
 | 421 |         bb.debug(1, "Skipping %s %s" % (pn, skip_msg)) | 
 | 422 |         raise bb.parse.SkipRecipe("Recipe will be skipped because: %s" % (skip_msg)) | 
 | 423 |  | 
 | 424 |     # Handle PACKAGECONFIG | 
 | 425 |     # | 
 | 426 |     # These take the form: | 
 | 427 |     # | 
 | 428 |     # PACKAGECONFIG ??= "<default options>" | 
 | 429 |     # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig" | 
 | 430 |     pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | 
 | 431 |     if pkgconfigflags: | 
 | 432 |         pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() | 
 | 433 |         pn = d.getVar("PN") | 
 | 434 |  | 
 | 435 |         mlprefix = d.getVar("MLPREFIX") | 
 | 436 |  | 
 | 437 |         def expandFilter(appends, extension, prefix): | 
 | 438 |             appends = bb.utils.explode_deps(d.expand(" ".join(appends))) | 
 | 439 |             newappends = [] | 
 | 440 |             for a in appends: | 
 | 441 |                 if a.endswith("-native") or ("-cross-" in a): | 
 | 442 |                     newappends.append(a) | 
 | 443 |                 elif a.startswith("virtual/"): | 
 | 444 |                     subs = a.split("/", 1)[1] | 
 | 445 |                     if subs.startswith(prefix): | 
 | 446 |                         newappends.append(a + extension) | 
 | 447 |                     else: | 
 | 448 |                         newappends.append("virtual/" + prefix + subs + extension) | 
 | 449 |                 else: | 
 | 450 |                     if a.startswith(prefix): | 
 | 451 |                         newappends.append(a + extension) | 
 | 452 |                     else: | 
 | 453 |                         newappends.append(prefix + a + extension) | 
 | 454 |             return newappends | 
 | 455 |  | 
 | 456 |         def appendVar(varname, appends): | 
 | 457 |             if not appends: | 
 | 458 |                 return | 
 | 459 |             if varname.find("DEPENDS") != -1: | 
 | 460 |                 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : | 
 | 461 |                     appends = expandFilter(appends, "", "nativesdk-") | 
 | 462 |                 elif bb.data.inherits_class('native', d): | 
 | 463 |                     appends = expandFilter(appends, "-native", "") | 
 | 464 |                 elif mlprefix: | 
 | 465 |                     appends = expandFilter(appends, "", mlprefix) | 
 | 466 |             varname = d.expand(varname) | 
 | 467 |             d.appendVar(varname, " " + " ".join(appends)) | 
 | 468 |  | 
 | 469 |         extradeps = [] | 
 | 470 |         extrardeps = [] | 
 | 471 |         extrarrecs = [] | 
 | 472 |         extraconf = [] | 
 | 473 |         for flag, flagval in sorted(pkgconfigflags.items()): | 
 | 474 |             items = flagval.split(",") | 
 | 475 |             num = len(items) | 
 | 476 |             if num > 6: | 
 | 477 |                 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!" | 
 | 478 |                     % (d.getVar('PN'), flag)) | 
 | 479 |  | 
 | 480 |             if flag in pkgconfig: | 
 | 481 |                 if num >= 3 and items[2]: | 
 | 482 |                     extradeps.append(items[2]) | 
 | 483 |                 if num >= 4 and items[3]: | 
 | 484 |                     extrardeps.append(items[3]) | 
 | 485 |                 if num >= 5 and items[4]: | 
 | 486 |                     extrarrecs.append(items[4]) | 
 | 487 |                 if num >= 1 and items[0]: | 
 | 488 |                     extraconf.append(items[0]) | 
 | 489 |             elif num >= 2 and items[1]: | 
 | 490 |                     extraconf.append(items[1]) | 
 | 491 |  | 
 | 492 |             if num >= 6 and items[5]: | 
 | 493 |                 conflicts = set(items[5].split()) | 
 | 494 |                 invalid = conflicts.difference(set(pkgconfigflags.keys())) | 
 | 495 |                 if invalid: | 
 | 496 |                     bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified." | 
 | 497 |                         % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid))) | 
 | 498 |  | 
 | 499 |                 if flag in pkgconfig: | 
 | 500 |                     intersec = conflicts.intersection(set(pkgconfig)) | 
 | 501 |                     if intersec: | 
 | 502 |                         bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG." | 
 | 503 |                             % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec))) | 
 | 504 |  | 
 | 505 |         appendVar('DEPENDS', extradeps) | 
 | 506 |         appendVar('RDEPENDS:${PN}', extrardeps) | 
 | 507 |         appendVar('RRECOMMENDS:${PN}', extrarrecs) | 
 | 508 |         appendVar('PACKAGECONFIG_CONFARGS', extraconf) | 
 | 509 |  | 
 | 510 |     pn = d.getVar('PN') | 
 | 511 |     license = d.getVar('LICENSE') | 
 | 512 |     if license == "INVALID" and pn != "defaultpkgname": | 
 | 513 |         bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | 
 | 514 |  | 
 | 515 |     if bb.data.inherits_class('license', d): | 
 | 516 |         check_license_format(d) | 
 | 517 |         unmatched_license_flags = check_license_flags(d) | 
 | 518 |         if unmatched_license_flags: | 
| Patrick Williams | b542dec | 2023-06-09 01:26:37 -0500 | [diff] [blame] | 519 |             for unmatched in unmatched_license_flags: | 
 | 520 |                 message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched | 
 | 521 |                 details = d.getVarFlag("LICENSE_FLAGS_DETAILS", unmatched) | 
 | 522 |                 if details: | 
| Patrick Williams | 520786c | 2023-06-25 16:20:36 -0500 | [diff] [blame] | 523 |                     message += details | 
| Patrick Williams | b542dec | 2023-06-09 01:26:37 -0500 | [diff] [blame] | 524 |             bb.debug(1, "Skipping %s: %s" % (pn, message)) | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 525 |             raise bb.parse.SkipRecipe(message) | 
 | 526 |  | 
 | 527 |     # If we're building a target package we need to use fakeroot (pseudo) | 
 | 528 |     # in order to capture permissions, owners, groups and special files | 
 | 529 |     if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): | 
 | 530 |         d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
 | 531 |         d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
 | 532 |         d.setVarFlag('do_install', 'fakeroot', '1') | 
 | 533 |         d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
 | 534 |         d.setVarFlag('do_package', 'fakeroot', '1') | 
 | 535 |         d.setVarFlag('do_package_setscene', 'fakeroot', '1') | 
 | 536 |         d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
 | 537 |         d.setVarFlag('do_devshell', 'fakeroot', '1') | 
 | 538 |         d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
 | 539 |  | 
 | 540 |     need_machine = d.getVar('COMPATIBLE_MACHINE') | 
 | 541 |     if need_machine and not d.getVar('PARSE_ALL_RECIPES', False): | 
 | 542 |         import re | 
 | 543 |         compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") | 
 | 544 |         for m in compat_machines: | 
 | 545 |             if re.match(need_machine, m): | 
 | 546 |                 break | 
 | 547 |         else: | 
 | 548 |             raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) | 
 | 549 |  | 
 | 550 |     source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False) | 
 | 551 |     if not source_mirror_fetch: | 
 | 552 |         need_host = d.getVar('COMPATIBLE_HOST') | 
 | 553 |         if need_host: | 
 | 554 |             import re | 
 | 555 |             this_host = d.getVar('HOST_SYS') | 
 | 556 |             if not re.match(need_host, this_host): | 
 | 557 |                 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) | 
 | 558 |  | 
 | 559 |         bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | 
 | 560 |  | 
 | 561 |         check_license = False if pn.startswith("nativesdk-") else True | 
 | 562 |         for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | 
 | 563 |               "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", | 
 | 564 |               "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: | 
 | 565 |             if pn.endswith(d.expand(t)): | 
 | 566 |                 check_license = False | 
 | 567 |         if pn.startswith("gcc-source-"): | 
 | 568 |             check_license = False | 
 | 569 |  | 
 | 570 |         if check_license and bad_licenses: | 
 | 571 |             bad_licenses = expand_wildcard_licenses(d, bad_licenses) | 
 | 572 |  | 
 | 573 |             exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() | 
 | 574 |  | 
 | 575 |             for lic_exception in exceptions: | 
 | 576 |                 if ":" in lic_exception: | 
 | 577 |                     lic_exception = lic_exception.split(":")[1] | 
 | 578 |                 if lic_exception in oe.license.obsolete_license_list(): | 
 | 579 |                     bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) | 
 | 580 |  | 
 | 581 |             pkgs = d.getVar('PACKAGES').split() | 
 | 582 |             skipped_pkgs = {} | 
 | 583 |             unskipped_pkgs = [] | 
 | 584 |             for pkg in pkgs: | 
 | 585 |                 remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions) | 
 | 586 |  | 
 | 587 |                 incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) | 
 | 588 |                 if incompatible_lic: | 
 | 589 |                     skipped_pkgs[pkg] = incompatible_lic | 
 | 590 |                 else: | 
 | 591 |                     unskipped_pkgs.append(pkg) | 
 | 592 |  | 
 | 593 |             if unskipped_pkgs: | 
 | 594 |                 for pkg in skipped_pkgs: | 
 | 595 |                     bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) | 
 | 596 |                     d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg])) | 
 | 597 |                 for pkg in unskipped_pkgs: | 
 | 598 |                     bb.debug(1, "Including the package %s" % pkg) | 
 | 599 |             else: | 
 | 600 |                 incompatible_lic = incompatible_license(d, bad_licenses) | 
 | 601 |                 for pkg in skipped_pkgs: | 
 | 602 |                     incompatible_lic += skipped_pkgs[pkg] | 
 | 603 |                 incompatible_lic = sorted(list(set(incompatible_lic))) | 
 | 604 |  | 
 | 605 |                 if incompatible_lic: | 
 | 606 |                     bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic))) | 
 | 607 |                     raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic)) | 
 | 608 |  | 
 | 609 |     needsrcrev = False | 
 | 610 |     srcuri = d.getVar('SRC_URI') | 
 | 611 |     for uri_string in srcuri.split(): | 
 | 612 |         uri = bb.fetch.URI(uri_string) | 
 | 613 |         # Also check downloadfilename as the URL path might not be useful for sniffing | 
 | 614 |         path = uri.params.get("downloadfilename", uri.path) | 
 | 615 |  | 
 | 616 |         # HTTP/FTP use the wget fetcher | 
 | 617 |         if uri.scheme in ("http", "https", "ftp"): | 
 | 618 |             d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot') | 
 | 619 |  | 
 | 620 |         # Svn packages should DEPEND on subversion-native | 
 | 621 |         if uri.scheme == "svn": | 
 | 622 |             needsrcrev = True | 
 | 623 |             d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') | 
 | 624 |  | 
 | 625 |         # Git packages should DEPEND on git-native | 
 | 626 |         elif uri.scheme in ("git", "gitsm"): | 
 | 627 |             needsrcrev = True | 
 | 628 |             d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot') | 
 | 629 |  | 
 | 630 |         # Mercurial packages should DEPEND on mercurial-native | 
 | 631 |         elif uri.scheme == "hg": | 
 | 632 |             needsrcrev = True | 
 | 633 |             d.appendVar("EXTRANATIVEPATH", ' python3-native ') | 
 | 634 |             d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot') | 
 | 635 |  | 
 | 636 |         # Perforce packages support SRCREV = "${AUTOREV}" | 
 | 637 |         elif uri.scheme == "p4": | 
 | 638 |             needsrcrev = True | 
 | 639 |  | 
 | 640 |         # OSC packages should DEPEND on osc-native | 
 | 641 |         elif uri.scheme == "osc": | 
 | 642 |             d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') | 
 | 643 |  | 
 | 644 |         elif uri.scheme == "npm": | 
 | 645 |             d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot') | 
 | 646 |  | 
 | 647 |         elif uri.scheme == "repo": | 
 | 648 |             needsrcrev = True | 
 | 649 |             d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot') | 
 | 650 |  | 
 | 651 |         # *.lz4 should DEPEND on lz4-native for unpacking | 
 | 652 |         if path.endswith('.lz4'): | 
 | 653 |             d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot') | 
 | 654 |  | 
 | 655 |         # *.zst should DEPEND on zstd-native for unpacking | 
 | 656 |         elif path.endswith('.zst'): | 
 | 657 |             d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot') | 
 | 658 |  | 
 | 659 |         # *.lz should DEPEND on lzip-native for unpacking | 
 | 660 |         elif path.endswith('.lz'): | 
 | 661 |             d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot') | 
 | 662 |  | 
 | 663 |         # *.xz should DEPEND on xz-native for unpacking | 
 | 664 |         elif path.endswith('.xz') or path.endswith('.txz'): | 
 | 665 |             d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 
 | 666 |  | 
 | 667 |         # .zip should DEPEND on unzip-native for unpacking | 
 | 668 |         elif path.endswith('.zip') or path.endswith('.jar'): | 
 | 669 |             d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') | 
 | 670 |  | 
 | 671 |         # Some rpm files may be compressed internally using xz (for example, rpms from Fedora) | 
 | 672 |         elif path.endswith('.rpm'): | 
 | 673 |             d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 
 | 674 |  | 
 | 675 |         # *.deb should DEPEND on xz-native for unpacking | 
 | 676 |         elif path.endswith('.deb'): | 
 | 677 |             d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 
 | 678 |  | 
 | 679 |     if needsrcrev: | 
 | 680 |         d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}") | 
 | 681 |  | 
 | 682 |         # Gather all named SRCREVs to add to the sstate hash calculation | 
 | 683 |         # This anonymous python snippet is called multiple times so we | 
 | 684 |         # need to be careful to not double up the appends here and cause | 
 | 685 |         # the base hash to mismatch the task hash | 
 | 686 |         for uri in srcuri.split(): | 
 | 687 |             parm = bb.fetch.decodeurl(uri)[5] | 
 | 688 |             uri_names = parm.get("name", "").split(",") | 
 | 689 |             for uri_name in filter(None, uri_names): | 
 | 690 |                 srcrev_name = "SRCREV_{}".format(uri_name) | 
 | 691 |                 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split(): | 
 | 692 |                     d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name)) | 
 | 693 |  | 
 | 694 |     set_packagetriplet(d) | 
 | 695 |  | 
 | 696 |     # 'multimachine' handling | 
 | 697 |     mach_arch = d.getVar('MACHINE_ARCH') | 
 | 698 |     pkg_arch = d.getVar('PACKAGE_ARCH') | 
 | 699 |  | 
 | 700 |     if (pkg_arch == mach_arch): | 
 | 701 |         # Already machine specific - nothing further to do | 
 | 702 |         return | 
 | 703 |  | 
 | 704 |     # | 
 | 705 |     # We always try to scan SRC_URI for urls with machine overrides | 
 | 706 |     # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 | 
 | 707 |     # | 
 | 708 |     override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') | 
 | 709 |     if override != '0': | 
 | 710 |         paths = [] | 
 | 711 |         fpaths = (d.getVar('FILESPATH') or '').split(':') | 
 | 712 |         machine = d.getVar('MACHINE') | 
 | 713 |         for p in fpaths: | 
 | 714 |             if os.path.basename(p) == machine and os.path.isdir(p): | 
 | 715 |                 paths.append(p) | 
 | 716 |  | 
 | 717 |         if paths: | 
 | 718 |             for s in srcuri.split(): | 
 | 719 |                 if not s.startswith("file://"): | 
 | 720 |                     continue | 
 | 721 |                 fetcher = bb.fetch2.Fetch([s], d) | 
 | 722 |                 local = fetcher.localpath(s) | 
 | 723 |                 for mp in paths: | 
 | 724 |                     if local.startswith(mp): | 
 | 725 |                         #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn)) | 
 | 726 |                         d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 
 | 727 |                         return | 
 | 728 |  | 
 | 729 |     packages = d.getVar('PACKAGES').split() | 
 | 730 |     for pkg in packages: | 
 | 731 |         pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) | 
 | 732 |  | 
 | 733 |         # We could look for != PACKAGE_ARCH here but how to choose | 
 | 734 |         # if multiple differences are present? | 
 | 735 |         # Look through PACKAGE_ARCHS for the priority order? | 
 | 736 |         if pkgarch and pkgarch == mach_arch: | 
 | 737 |             d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 
 | 738 |             bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) | 
 | 739 | } | 
 | 740 |  | 
 | 741 | addtask cleansstate after do_clean | 
 | 742 | python do_cleansstate() { | 
 | 743 |         sstate_clean_cachefiles(d) | 
 | 744 | } | 
 | 745 | addtask cleanall after do_cleansstate | 
 | 746 | do_cleansstate[nostamp] = "1" | 
 | 747 |  | 
 | 748 | python do_cleanall() { | 
 | 749 |     src_uri = (d.getVar('SRC_URI') or "").split() | 
 | 750 |     if not src_uri: | 
 | 751 |         return | 
 | 752 |  | 
 | 753 |     try: | 
 | 754 |         fetcher = bb.fetch2.Fetch(src_uri, d) | 
 | 755 |         fetcher.clean() | 
 | 756 |     except bb.fetch2.BBFetchException as e: | 
 | 757 |         bb.fatal(str(e)) | 
 | 758 | } | 
 | 759 | do_cleanall[nostamp] = "1" | 
 | 760 |  | 
 | 761 |  | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 762 | EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install |