| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # | 
|  | 2 | # Copyright OpenEmbedded Contributors | 
|  | 3 | # | 
|  | 4 | # SPDX-License-Identifier: MIT | 
|  | 5 | # | 
|  | 6 |  | 
|  | 7 | BB_DEFAULT_TASK ?= "build" | 
|  | 8 | CLASSOVERRIDE ?= "class-target" | 
|  | 9 |  | 
|  | 10 | inherit patch | 
|  | 11 | inherit staging | 
|  | 12 |  | 
|  | 13 | inherit mirrors | 
|  | 14 | inherit utils | 
|  | 15 | inherit utility-tasks | 
|  | 16 | inherit logging | 
|  | 17 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 18 | PACKAGECONFIG_CONFARGS ??= "" | 
|  | 19 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 20 | inherit metadata_scm | 
|  | 21 |  | 
|  | 22 | def lsb_distro_identifier(d): | 
|  | 23 | adjust = d.getVar('LSB_DISTRO_ADJUST') | 
|  | 24 | adjust_func = None | 
|  | 25 | if adjust: | 
|  | 26 | try: | 
|  | 27 | adjust_func = globals()[adjust] | 
|  | 28 | except KeyError: | 
|  | 29 | pass | 
|  | 30 | return oe.lsb.distro_identifier(adjust_func) | 
|  | 31 |  | 
|  | 32 | die() { | 
|  | 33 | bbfatal_log "$*" | 
|  | 34 | } | 
|  | 35 |  | 
|  | 36 | oe_runmake_call() { | 
|  | 37 | bbnote ${MAKE} ${EXTRA_OEMAKE} "$@" | 
|  | 38 | ${MAKE} ${EXTRA_OEMAKE} "$@" | 
|  | 39 | } | 
|  | 40 |  | 
|  | 41 | oe_runmake() { | 
|  | 42 | oe_runmake_call "$@" || die "oe_runmake failed" | 
|  | 43 | } | 
|  | 44 |  | 
|  | 45 |  | 
|  | 46 | def get_base_dep(d): | 
|  | 47 | if d.getVar('INHIBIT_DEFAULT_DEPS', False): | 
|  | 48 | return "" | 
|  | 49 | return "${BASE_DEFAULT_DEPS}" | 
|  | 50 |  | 
|  | 51 | BASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc" | 
|  | 52 |  | 
|  | 53 | BASEDEPENDS = "" | 
|  | 54 | BASEDEPENDS:class-target = "${@get_base_dep(d)}" | 
|  | 55 | BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" | 
|  | 56 |  | 
|  | 57 | DEPENDS:prepend="${BASEDEPENDS} " | 
|  | 58 |  | 
|  | 59 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" | 
|  | 60 | # THISDIR only works properly with imediate expansion as it has to run | 
|  | 61 | # in the context of the location its used (:=) | 
|  | 62 | THISDIR = "${@os.path.dirname(d.getVar('FILE'))}" | 
|  | 63 |  | 
|  | 64 | def extra_path_elements(d): | 
|  | 65 | path = "" | 
|  | 66 | elements = (d.getVar('EXTRANATIVEPATH') or "").split() | 
|  | 67 | for e in elements: | 
|  | 68 | path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" | 
|  | 69 | return path | 
|  | 70 |  | 
|  | 71 | PATH:prepend = "${@extra_path_elements(d)}" | 
|  | 72 |  | 
|  | 73 | def get_lic_checksum_file_list(d): | 
|  | 74 | filelist = [] | 
|  | 75 | lic_files = d.getVar("LIC_FILES_CHKSUM") or '' | 
|  | 76 | tmpdir = d.getVar("TMPDIR") | 
|  | 77 | s = d.getVar("S") | 
|  | 78 | b = d.getVar("B") | 
|  | 79 | workdir = d.getVar("WORKDIR") | 
|  | 80 |  | 
|  | 81 | urls = lic_files.split() | 
|  | 82 | for url in urls: | 
|  | 83 | # We only care about items that are absolute paths since | 
|  | 84 | # any others should be covered by SRC_URI. | 
|  | 85 | try: | 
|  | 86 | (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | 
|  | 87 | if method != "file" or not path: | 
|  | 88 | raise bb.fetch.MalformedUrl(url) | 
|  | 89 |  | 
|  | 90 | if path[0] == '/': | 
|  | 91 | if path.startswith((tmpdir, s, b, workdir)): | 
|  | 92 | continue | 
|  | 93 | filelist.append(path + ":" + str(os.path.exists(path))) | 
|  | 94 | except bb.fetch.MalformedUrl: | 
|  | 95 | bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) | 
|  | 96 | return " ".join(filelist) | 
|  | 97 |  | 
|  | 98 | def setup_hosttools_dir(dest, toolsvar, d, fatal=True): | 
|  | 99 | tools = d.getVar(toolsvar).split() | 
|  | 100 | origbbenv = d.getVar("BB_ORIGENV", False) | 
|  | 101 | path = origbbenv.getVar("PATH") | 
|  | 102 | # Need to ignore our own scripts directories to avoid circular links | 
|  | 103 | for p in path.split(":"): | 
|  | 104 | if p.endswith("/scripts"): | 
|  | 105 | path = path.replace(p, "/ignoreme") | 
|  | 106 | bb.utils.mkdirhier(dest) | 
|  | 107 | notfound = [] | 
|  | 108 | for tool in tools: | 
|  | 109 | desttool = os.path.join(dest, tool) | 
|  | 110 | if not os.path.exists(desttool): | 
|  | 111 | # clean up dead symlink | 
|  | 112 | if os.path.islink(desttool): | 
|  | 113 | os.unlink(desttool) | 
|  | 114 | srctool = bb.utils.which(path, tool, executable=True) | 
|  | 115 | # gcc/g++ may link to ccache on some hosts, e.g., | 
|  | 116 | # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc) | 
|  | 117 | # would return /usr/local/bin/ccache/gcc, but what we need is | 
|  | 118 | # /usr/bin/gcc, this code can check and fix that. | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 119 | if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache': | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 120 | srctool = bb.utils.which(path, tool, executable=True, direction=1) | 
|  | 121 | if srctool: | 
|  | 122 | os.symlink(srctool, desttool) | 
|  | 123 | else: | 
|  | 124 | notfound.append(tool) | 
|  | 125 |  | 
|  | 126 | if notfound and fatal: | 
|  | 127 | bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n  %s" % " ".join(notfound)) | 
|  | 128 |  | 
| Andrew Geissler | 5082cc7 | 2023-09-11 08:41:39 -0400 | [diff] [blame] | 129 | # We can't use vardepvalue against do_fetch directly since that would overwrite | 
|  | 130 | # the other task dependencies so we use an indirect function. | 
|  | 131 | python fetcher_hashes_dummyfunc() { | 
|  | 132 | return | 
|  | 133 | } | 
|  | 134 | fetcher_hashes_dummyfunc[vardepvalue] = "${@bb.fetch.get_hashvalue(d)}" | 
|  | 135 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 136 | addtask fetch | 
|  | 137 | do_fetch[dirs] = "${DL_DIR}" | 
|  | 138 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" | 
|  | 139 | do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" | 
| Andrew Geissler | 5082cc7 | 2023-09-11 08:41:39 -0400 | [diff] [blame] | 140 | do_fetch[prefuncs] += "fetcher_hashes_dummyfunc" | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 141 | do_fetch[network] = "1" | 
|  | 142 | python base_do_fetch() { | 
|  | 143 |  | 
|  | 144 | src_uri = (d.getVar('SRC_URI') or "").split() | 
|  | 145 | if not src_uri: | 
|  | 146 | return | 
|  | 147 |  | 
|  | 148 | try: | 
|  | 149 | fetcher = bb.fetch2.Fetch(src_uri, d) | 
|  | 150 | fetcher.download() | 
|  | 151 | except bb.fetch2.BBFetchException as e: | 
|  | 152 | bb.fatal("Bitbake Fetcher Error: " + repr(e)) | 
|  | 153 | } | 
|  | 154 |  | 
|  | 155 | addtask unpack after do_fetch | 
|  | 156 | do_unpack[dirs] = "${WORKDIR}" | 
|  | 157 |  | 
|  | 158 | do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}" | 
|  | 159 |  | 
|  | 160 | python base_do_unpack() { | 
|  | 161 | src_uri = (d.getVar('SRC_URI') or "").split() | 
|  | 162 | if not src_uri: | 
|  | 163 | return | 
|  | 164 |  | 
|  | 165 | try: | 
|  | 166 | fetcher = bb.fetch2.Fetch(src_uri, d) | 
|  | 167 | fetcher.unpack(d.getVar('WORKDIR')) | 
|  | 168 | except bb.fetch2.BBFetchException as e: | 
|  | 169 | bb.fatal("Bitbake Fetcher Error: " + repr(e)) | 
|  | 170 | } | 
|  | 171 |  | 
|  | 172 | SSTATETASKS += "do_deploy_source_date_epoch" | 
|  | 173 |  | 
|  | 174 | do_deploy_source_date_epoch () { | 
|  | 175 | mkdir -p ${SDE_DEPLOYDIR} | 
|  | 176 | if [ -e ${SDE_FILE} ]; then | 
|  | 177 | echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}." | 
|  | 178 | cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt | 
|  | 179 | else | 
|  | 180 | echo "${SDE_FILE} not found!" | 
|  | 181 | fi | 
|  | 182 | } | 
|  | 183 |  | 
|  | 184 | python do_deploy_source_date_epoch_setscene () { | 
|  | 185 | sstate_setscene(d) | 
|  | 186 | bb.utils.mkdirhier(d.getVar('SDE_DIR')) | 
|  | 187 | sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt') | 
|  | 188 | if os.path.exists(sde_file): | 
|  | 189 | target = d.getVar('SDE_FILE') | 
|  | 190 | bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target)) | 
|  | 191 | bb.utils.rename(sde_file, target) | 
|  | 192 | else: | 
|  | 193 | bb.debug(1, "%s not found!" % sde_file) | 
|  | 194 | } | 
|  | 195 |  | 
|  | 196 | do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}" | 
|  | 197 | do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}" | 
|  | 198 | addtask do_deploy_source_date_epoch_setscene | 
|  | 199 | addtask do_deploy_source_date_epoch before do_configure after do_patch | 
|  | 200 |  | 
|  | 201 | python create_source_date_epoch_stamp() { | 
|  | 202 | # Version: 1 | 
|  | 203 | source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) | 
|  | 204 | oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) | 
|  | 205 | } | 
|  | 206 | do_unpack[postfuncs] += "create_source_date_epoch_stamp" | 
|  | 207 |  | 
|  | 208 | def get_source_date_epoch_value(d): | 
|  | 209 | return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d) | 
|  | 210 |  | 
|  | 211 | def get_layers_branch_rev(d): | 
|  | 212 | revisions = oe.buildcfg.get_layer_revisions(d) | 
|  | 213 | layers_branch_rev = ["%-20s = \"%s:%s\"" % (r[1], r[2], r[3]) for r in revisions] | 
|  | 214 | i = len(layers_branch_rev)-1 | 
|  | 215 | p1 = layers_branch_rev[i].find("=") | 
|  | 216 | s1 = layers_branch_rev[i][p1:] | 
|  | 217 | while i > 0: | 
|  | 218 | p2 = layers_branch_rev[i-1].find("=") | 
|  | 219 | s2= layers_branch_rev[i-1][p2:] | 
|  | 220 | if s1 == s2: | 
|  | 221 | layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] | 
|  | 222 | i -= 1 | 
|  | 223 | else: | 
|  | 224 | i -= 1 | 
|  | 225 | p1 = layers_branch_rev[i].find("=") | 
|  | 226 | s1= layers_branch_rev[i][p1:] | 
|  | 227 | return layers_branch_rev | 
|  | 228 |  | 
|  | 229 |  | 
|  | 230 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" | 
|  | 231 | BUILDCFG_FUNCS[type] = "list" | 
|  | 232 |  | 
|  | 233 | def buildcfg_vars(d): | 
|  | 234 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) | 
|  | 235 | for var in statusvars: | 
|  | 236 | value = d.getVar(var) | 
|  | 237 | if value is not None: | 
|  | 238 | yield '%-20s = "%s"' % (var, value) | 
|  | 239 |  | 
|  | 240 | def buildcfg_neededvars(d): | 
|  | 241 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) | 
|  | 242 | pesteruser = [] | 
|  | 243 | for v in needed_vars: | 
|  | 244 | val = d.getVar(v) | 
|  | 245 | if not val or val == 'INVALID': | 
|  | 246 | pesteruser.append(v) | 
|  | 247 |  | 
|  | 248 | if pesteruser: | 
|  | 249 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | 
|  | 250 |  | 
|  | 251 | addhandler base_eventhandler | 
|  | 252 | base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed" | 
|  | 253 | python base_eventhandler() { | 
|  | 254 | import bb.runqueue | 
|  | 255 |  | 
|  | 256 | if isinstance(e, bb.event.ConfigParsed): | 
|  | 257 | if not d.getVar("NATIVELSBSTRING", False): | 
|  | 258 | d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) | 
|  | 259 | d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False)) | 
|  | 260 | d.setVar('BB_VERSION', bb.__version__) | 
|  | 261 |  | 
|  | 262 | # There might be no bb.event.ConfigParsed event if bitbake server is | 
|  | 263 | # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR} | 
|  | 264 | # exists. | 
|  | 265 | if isinstance(e, bb.event.ConfigParsed) or \ | 
|  | 266 | (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))): | 
|  | 267 | # Works with the line in layer.conf which changes PATH to point here | 
|  | 268 | setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d) | 
|  | 269 | setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False) | 
|  | 270 |  | 
|  | 271 | if isinstance(e, bb.event.MultiConfigParsed): | 
|  | 272 | # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores | 
|  | 273 | # own contexts so the variables get expanded correctly for that arch, then inject back into | 
|  | 274 | # the main data store. | 
|  | 275 | deps = [] | 
|  | 276 | for config in e.mcdata: | 
|  | 277 | deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS")) | 
|  | 278 | deps = " ".join(deps) | 
|  | 279 | e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps) | 
|  | 280 |  | 
|  | 281 | if isinstance(e, bb.event.BuildStarted): | 
|  | 282 | localdata = bb.data.createCopy(d) | 
|  | 283 | statuslines = [] | 
|  | 284 | for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): | 
|  | 285 | g = globals() | 
|  | 286 | if func not in g: | 
|  | 287 | bb.warn("Build configuration function '%s' does not exist" % func) | 
|  | 288 | else: | 
|  | 289 | flines = g[func](localdata) | 
|  | 290 | if flines: | 
|  | 291 | statuslines.extend(flines) | 
|  | 292 |  | 
|  | 293 | statusheader = d.getVar('BUILDCFG_HEADER') | 
|  | 294 | if statusheader: | 
|  | 295 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 
|  | 296 |  | 
|  | 297 | # This code is to silence warnings where the SDK variables overwrite the | 
| Andrew Geissler | fc113ea | 2023-03-31 09:59:46 -0500 | [diff] [blame] | 298 | # target ones and we'd see duplicate key names overwriting each other | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 299 | # for various PREFERRED_PROVIDERS | 
|  | 300 | if isinstance(e, bb.event.RecipePreFinalise): | 
|  | 301 | if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"): | 
|  | 302 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") | 
|  | 303 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") | 
|  | 304 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++") | 
|  | 305 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs") | 
|  | 306 |  | 
|  | 307 | if isinstance(e, bb.event.RecipeParsed): | 
|  | 308 | # | 
|  | 309 | # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set | 
|  | 310 | # skip parsing for all the other providers which will mean they get uninstalled from the | 
|  | 311 | # sysroot since they're now "unreachable". This makes switching virtual/kernel work in | 
|  | 312 | # particular. | 
|  | 313 | # | 
|  | 314 | pn = d.getVar('PN') | 
|  | 315 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) | 
|  | 316 | if not source_mirror_fetch: | 
|  | 317 | provs = (d.getVar("PROVIDES") or "").split() | 
|  | 318 | multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() | 
|  | 319 | for p in provs: | 
|  | 320 | if p.startswith("virtual/") and p not in multiprovidersallowed: | 
|  | 321 | profprov = d.getVar("PREFERRED_PROVIDER_" + p) | 
|  | 322 | if profprov and pn != profprov: | 
|  | 323 | raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) | 
|  | 324 | } | 
|  | 325 |  | 
|  | 326 | CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate" | 
|  | 327 | CLEANBROKEN = "0" | 
|  | 328 |  | 
|  | 329 | addtask configure after do_patch | 
|  | 330 | do_configure[dirs] = "${B}" | 
|  | 331 | base_do_configure() { | 
|  | 332 | if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then | 
|  | 333 | if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then | 
|  | 334 | cd ${B} | 
|  | 335 | if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then | 
|  | 336 | oe_runmake clean | 
|  | 337 | fi | 
|  | 338 | # -ignore_readdir_race does not work correctly with -delete; | 
|  | 339 | # use xargs to avoid spurious build failures | 
|  | 340 | find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f | 
|  | 341 | fi | 
|  | 342 | fi | 
|  | 343 | if [ -n "${CONFIGURESTAMPFILE}" ]; then | 
|  | 344 | mkdir -p `dirname ${CONFIGURESTAMPFILE}` | 
|  | 345 | echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE} | 
|  | 346 | fi | 
|  | 347 | } | 
|  | 348 |  | 
|  | 349 | addtask compile after do_configure | 
|  | 350 | do_compile[dirs] = "${B}" | 
|  | 351 | base_do_compile() { | 
|  | 352 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then | 
|  | 353 | oe_runmake || die "make failed" | 
|  | 354 | else | 
|  | 355 | bbnote "nothing to compile" | 
|  | 356 | fi | 
|  | 357 | } | 
|  | 358 |  | 
|  | 359 | addtask install after do_compile | 
|  | 360 | do_install[dirs] = "${B}" | 
| Andrew Geissler | fc113ea | 2023-03-31 09:59:46 -0500 | [diff] [blame] | 361 | # Remove and re-create ${D} so that it is guaranteed to be empty | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 362 | do_install[cleandirs] = "${D}" | 
|  | 363 |  | 
|  | 364 | base_do_install() { | 
|  | 365 | : | 
|  | 366 | } | 
|  | 367 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 368 | addtask build after do_populate_sysroot | 
|  | 369 | do_build[noexec] = "1" | 
|  | 370 | do_build[recrdeptask] += "do_deploy" | 
|  | 371 | do_build () { | 
|  | 372 | : | 
|  | 373 | } | 
|  | 374 |  | 
|  | 375 | def set_packagetriplet(d): | 
|  | 376 | archs = [] | 
|  | 377 | tos = [] | 
|  | 378 | tvs = [] | 
|  | 379 |  | 
|  | 380 | archs.append(d.getVar("PACKAGE_ARCHS").split()) | 
|  | 381 | tos.append(d.getVar("TARGET_OS")) | 
|  | 382 | tvs.append(d.getVar("TARGET_VENDOR")) | 
|  | 383 |  | 
|  | 384 | def settriplet(d, varname, archs, tos, tvs): | 
|  | 385 | triplets = [] | 
|  | 386 | for i in range(len(archs)): | 
|  | 387 | for arch in archs[i]: | 
|  | 388 | triplets.append(arch + tvs[i] + "-" + tos[i]) | 
|  | 389 | triplets.reverse() | 
|  | 390 | d.setVar(varname, " ".join(triplets)) | 
|  | 391 |  | 
|  | 392 | settriplet(d, "PKGTRIPLETS", archs, tos, tvs) | 
|  | 393 |  | 
|  | 394 | variants = d.getVar("MULTILIB_VARIANTS") or "" | 
|  | 395 | for item in variants.split(): | 
|  | 396 | localdata = bb.data.createCopy(d) | 
|  | 397 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item | 
|  | 398 | localdata.setVar("OVERRIDES", overrides) | 
|  | 399 |  | 
|  | 400 | archs.append(localdata.getVar("PACKAGE_ARCHS").split()) | 
|  | 401 | tos.append(localdata.getVar("TARGET_OS")) | 
|  | 402 | tvs.append(localdata.getVar("TARGET_VENDOR")) | 
|  | 403 |  | 
|  | 404 | settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) | 
|  | 405 |  | 
|  | 406 | python () { | 
|  | 407 | import string, re | 
|  | 408 |  | 
|  | 409 | # Handle backfilling | 
|  | 410 | oe.utils.features_backfill("DISTRO_FEATURES", d) | 
|  | 411 | oe.utils.features_backfill("MACHINE_FEATURES", d) | 
|  | 412 |  | 
|  | 413 | if d.getVar("S")[-1] == '/': | 
|  | 414 | bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | 
|  | 415 | if d.getVar("B")[-1] == '/': | 
|  | 416 | bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | 
|  | 417 |  | 
|  | 418 | if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")): | 
|  | 419 | d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}") | 
|  | 420 | if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")): | 
|  | 421 | d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}") | 
|  | 422 |  | 
|  | 423 | # To add a recipe to the skip list , set: | 
|  | 424 | #   SKIP_RECIPE[pn] = "message" | 
|  | 425 | pn = d.getVar('PN') | 
|  | 426 | skip_msg = d.getVarFlag('SKIP_RECIPE', pn) | 
|  | 427 | if skip_msg: | 
|  | 428 | bb.debug(1, "Skipping %s %s" % (pn, skip_msg)) | 
|  | 429 | raise bb.parse.SkipRecipe("Recipe will be skipped because: %s" % (skip_msg)) | 
|  | 430 |  | 
|  | 431 | # Handle PACKAGECONFIG | 
|  | 432 | # | 
|  | 433 | # These take the form: | 
|  | 434 | # | 
|  | 435 | # PACKAGECONFIG ??= "<default options>" | 
|  | 436 | # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig" | 
|  | 437 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | 
|  | 438 | if pkgconfigflags: | 
|  | 439 | pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() | 
|  | 440 | pn = d.getVar("PN") | 
|  | 441 |  | 
|  | 442 | mlprefix = d.getVar("MLPREFIX") | 
|  | 443 |  | 
|  | 444 | def expandFilter(appends, extension, prefix): | 
|  | 445 | appends = bb.utils.explode_deps(d.expand(" ".join(appends))) | 
|  | 446 | newappends = [] | 
|  | 447 | for a in appends: | 
|  | 448 | if a.endswith("-native") or ("-cross-" in a): | 
|  | 449 | newappends.append(a) | 
|  | 450 | elif a.startswith("virtual/"): | 
|  | 451 | subs = a.split("/", 1)[1] | 
|  | 452 | if subs.startswith(prefix): | 
|  | 453 | newappends.append(a + extension) | 
|  | 454 | else: | 
|  | 455 | newappends.append("virtual/" + prefix + subs + extension) | 
|  | 456 | else: | 
|  | 457 | if a.startswith(prefix): | 
|  | 458 | newappends.append(a + extension) | 
|  | 459 | else: | 
|  | 460 | newappends.append(prefix + a + extension) | 
|  | 461 | return newappends | 
|  | 462 |  | 
|  | 463 | def appendVar(varname, appends): | 
|  | 464 | if not appends: | 
|  | 465 | return | 
|  | 466 | if varname.find("DEPENDS") != -1: | 
|  | 467 | if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : | 
|  | 468 | appends = expandFilter(appends, "", "nativesdk-") | 
|  | 469 | elif bb.data.inherits_class('native', d): | 
|  | 470 | appends = expandFilter(appends, "-native", "") | 
|  | 471 | elif mlprefix: | 
|  | 472 | appends = expandFilter(appends, "", mlprefix) | 
|  | 473 | varname = d.expand(varname) | 
|  | 474 | d.appendVar(varname, " " + " ".join(appends)) | 
|  | 475 |  | 
|  | 476 | extradeps = [] | 
|  | 477 | extrardeps = [] | 
|  | 478 | extrarrecs = [] | 
|  | 479 | extraconf = [] | 
|  | 480 | for flag, flagval in sorted(pkgconfigflags.items()): | 
|  | 481 | items = flagval.split(",") | 
|  | 482 | num = len(items) | 
|  | 483 | if num > 6: | 
|  | 484 | bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!" | 
|  | 485 | % (d.getVar('PN'), flag)) | 
|  | 486 |  | 
|  | 487 | if flag in pkgconfig: | 
|  | 488 | if num >= 3 and items[2]: | 
|  | 489 | extradeps.append(items[2]) | 
|  | 490 | if num >= 4 and items[3]: | 
|  | 491 | extrardeps.append(items[3]) | 
|  | 492 | if num >= 5 and items[4]: | 
|  | 493 | extrarrecs.append(items[4]) | 
|  | 494 | if num >= 1 and items[0]: | 
|  | 495 | extraconf.append(items[0]) | 
|  | 496 | elif num >= 2 and items[1]: | 
|  | 497 | extraconf.append(items[1]) | 
|  | 498 |  | 
|  | 499 | if num >= 6 and items[5]: | 
|  | 500 | conflicts = set(items[5].split()) | 
|  | 501 | invalid = conflicts.difference(set(pkgconfigflags.keys())) | 
|  | 502 | if invalid: | 
|  | 503 | bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified." | 
|  | 504 | % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid))) | 
|  | 505 |  | 
|  | 506 | if flag in pkgconfig: | 
|  | 507 | intersec = conflicts.intersection(set(pkgconfig)) | 
|  | 508 | if intersec: | 
|  | 509 | bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG." | 
|  | 510 | % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec))) | 
|  | 511 |  | 
|  | 512 | appendVar('DEPENDS', extradeps) | 
|  | 513 | appendVar('RDEPENDS:${PN}', extrardeps) | 
|  | 514 | appendVar('RRECOMMENDS:${PN}', extrarrecs) | 
|  | 515 | appendVar('PACKAGECONFIG_CONFARGS', extraconf) | 
|  | 516 |  | 
|  | 517 | pn = d.getVar('PN') | 
|  | 518 | license = d.getVar('LICENSE') | 
|  | 519 | if license == "INVALID" and pn != "defaultpkgname": | 
|  | 520 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | 
|  | 521 |  | 
|  | 522 | if bb.data.inherits_class('license', d): | 
|  | 523 | check_license_format(d) | 
|  | 524 | unmatched_license_flags = check_license_flags(d) | 
|  | 525 | if unmatched_license_flags: | 
| Patrick Williams | b542dec | 2023-06-09 01:26:37 -0500 | [diff] [blame] | 526 | for unmatched in unmatched_license_flags: | 
|  | 527 | message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched | 
|  | 528 | details = d.getVarFlag("LICENSE_FLAGS_DETAILS", unmatched) | 
|  | 529 | if details: | 
| Andrew Geissler | 220dafd | 2023-10-04 10:18:08 -0500 | [diff] [blame^] | 530 | message += "\n" + details | 
| Patrick Williams | b542dec | 2023-06-09 01:26:37 -0500 | [diff] [blame] | 531 | bb.debug(1, "Skipping %s: %s" % (pn, message)) | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 532 | raise bb.parse.SkipRecipe(message) | 
|  | 533 |  | 
|  | 534 | # If we're building a target package we need to use fakeroot (pseudo) | 
|  | 535 | # in order to capture permissions, owners, groups and special files | 
|  | 536 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): | 
|  | 537 | d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
|  | 538 | d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
|  | 539 | d.setVarFlag('do_install', 'fakeroot', '1') | 
|  | 540 | d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
|  | 541 | d.setVarFlag('do_package', 'fakeroot', '1') | 
|  | 542 | d.setVarFlag('do_package_setscene', 'fakeroot', '1') | 
|  | 543 | d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
|  | 544 | d.setVarFlag('do_devshell', 'fakeroot', '1') | 
|  | 545 | d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
|  | 546 |  | 
|  | 547 | need_machine = d.getVar('COMPATIBLE_MACHINE') | 
|  | 548 | if need_machine and not d.getVar('PARSE_ALL_RECIPES', False): | 
|  | 549 | import re | 
|  | 550 | compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") | 
|  | 551 | for m in compat_machines: | 
|  | 552 | if re.match(need_machine, m): | 
|  | 553 | break | 
|  | 554 | else: | 
|  | 555 | raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) | 
|  | 556 |  | 
|  | 557 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False) | 
|  | 558 | if not source_mirror_fetch: | 
|  | 559 | need_host = d.getVar('COMPATIBLE_HOST') | 
|  | 560 | if need_host: | 
|  | 561 | import re | 
|  | 562 | this_host = d.getVar('HOST_SYS') | 
|  | 563 | if not re.match(need_host, this_host): | 
|  | 564 | raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) | 
|  | 565 |  | 
|  | 566 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | 
|  | 567 |  | 
|  | 568 | check_license = False if pn.startswith("nativesdk-") else True | 
|  | 569 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | 
|  | 570 | "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", | 
|  | 571 | "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: | 
|  | 572 | if pn.endswith(d.expand(t)): | 
|  | 573 | check_license = False | 
|  | 574 | if pn.startswith("gcc-source-"): | 
|  | 575 | check_license = False | 
|  | 576 |  | 
|  | 577 | if check_license and bad_licenses: | 
|  | 578 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | 
|  | 579 |  | 
|  | 580 | exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() | 
|  | 581 |  | 
|  | 582 | for lic_exception in exceptions: | 
|  | 583 | if ":" in lic_exception: | 
|  | 584 | lic_exception = lic_exception.split(":")[1] | 
|  | 585 | if lic_exception in oe.license.obsolete_license_list(): | 
|  | 586 | bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) | 
|  | 587 |  | 
|  | 588 | pkgs = d.getVar('PACKAGES').split() | 
|  | 589 | skipped_pkgs = {} | 
|  | 590 | unskipped_pkgs = [] | 
|  | 591 | for pkg in pkgs: | 
|  | 592 | remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions) | 
|  | 593 |  | 
|  | 594 | incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) | 
|  | 595 | if incompatible_lic: | 
|  | 596 | skipped_pkgs[pkg] = incompatible_lic | 
|  | 597 | else: | 
|  | 598 | unskipped_pkgs.append(pkg) | 
|  | 599 |  | 
|  | 600 | if unskipped_pkgs: | 
|  | 601 | for pkg in skipped_pkgs: | 
|  | 602 | bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) | 
|  | 603 | d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg])) | 
|  | 604 | for pkg in unskipped_pkgs: | 
|  | 605 | bb.debug(1, "Including the package %s" % pkg) | 
|  | 606 | else: | 
|  | 607 | incompatible_lic = incompatible_license(d, bad_licenses) | 
|  | 608 | for pkg in skipped_pkgs: | 
|  | 609 | incompatible_lic += skipped_pkgs[pkg] | 
|  | 610 | incompatible_lic = sorted(list(set(incompatible_lic))) | 
|  | 611 |  | 
|  | 612 | if incompatible_lic: | 
|  | 613 | bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic))) | 
|  | 614 | raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic)) | 
|  | 615 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 616 | srcuri = d.getVar('SRC_URI') | 
|  | 617 | for uri_string in srcuri.split(): | 
|  | 618 | uri = bb.fetch.URI(uri_string) | 
|  | 619 | # Also check downloadfilename as the URL path might not be useful for sniffing | 
|  | 620 | path = uri.params.get("downloadfilename", uri.path) | 
|  | 621 |  | 
|  | 622 | # HTTP/FTP use the wget fetcher | 
|  | 623 | if uri.scheme in ("http", "https", "ftp"): | 
|  | 624 | d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot') | 
|  | 625 |  | 
|  | 626 | # Svn packages should DEPEND on subversion-native | 
|  | 627 | if uri.scheme == "svn": | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 628 | d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') | 
|  | 629 |  | 
|  | 630 | # Git packages should DEPEND on git-native | 
|  | 631 | elif uri.scheme in ("git", "gitsm"): | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 632 | d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot') | 
|  | 633 |  | 
|  | 634 | # Mercurial packages should DEPEND on mercurial-native | 
|  | 635 | elif uri.scheme == "hg": | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 636 | d.appendVar("EXTRANATIVEPATH", ' python3-native ') | 
|  | 637 | d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot') | 
|  | 638 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 639 | # OSC packages should DEPEND on osc-native | 
|  | 640 | elif uri.scheme == "osc": | 
|  | 641 | d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') | 
|  | 642 |  | 
|  | 643 | elif uri.scheme == "npm": | 
|  | 644 | d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot') | 
|  | 645 |  | 
|  | 646 | elif uri.scheme == "repo": | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 647 | d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot') | 
|  | 648 |  | 
|  | 649 | # *.lz4 should DEPEND on lz4-native for unpacking | 
|  | 650 | if path.endswith('.lz4'): | 
|  | 651 | d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot') | 
|  | 652 |  | 
|  | 653 | # *.zst should DEPEND on zstd-native for unpacking | 
|  | 654 | elif path.endswith('.zst'): | 
|  | 655 | d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot') | 
|  | 656 |  | 
|  | 657 | # *.lz should DEPEND on lzip-native for unpacking | 
|  | 658 | elif path.endswith('.lz'): | 
|  | 659 | d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot') | 
|  | 660 |  | 
|  | 661 | # *.xz should DEPEND on xz-native for unpacking | 
|  | 662 | elif path.endswith('.xz') or path.endswith('.txz'): | 
|  | 663 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 
|  | 664 |  | 
|  | 665 | # .zip should DEPEND on unzip-native for unpacking | 
|  | 666 | elif path.endswith('.zip') or path.endswith('.jar'): | 
|  | 667 | d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') | 
|  | 668 |  | 
|  | 669 | # Some rpm files may be compressed internally using xz (for example, rpms from Fedora) | 
|  | 670 | elif path.endswith('.rpm'): | 
|  | 671 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 
|  | 672 |  | 
|  | 673 | # *.deb should DEPEND on xz-native for unpacking | 
|  | 674 | elif path.endswith('.deb'): | 
|  | 675 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 
|  | 676 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 677 | set_packagetriplet(d) | 
|  | 678 |  | 
|  | 679 | # 'multimachine' handling | 
|  | 680 | mach_arch = d.getVar('MACHINE_ARCH') | 
|  | 681 | pkg_arch = d.getVar('PACKAGE_ARCH') | 
|  | 682 |  | 
|  | 683 | if (pkg_arch == mach_arch): | 
|  | 684 | # Already machine specific - nothing further to do | 
|  | 685 | return | 
|  | 686 |  | 
|  | 687 | # | 
|  | 688 | # We always try to scan SRC_URI for urls with machine overrides | 
|  | 689 | # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 | 
|  | 690 | # | 
|  | 691 | override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') | 
|  | 692 | if override != '0': | 
|  | 693 | paths = [] | 
|  | 694 | fpaths = (d.getVar('FILESPATH') or '').split(':') | 
|  | 695 | machine = d.getVar('MACHINE') | 
|  | 696 | for p in fpaths: | 
|  | 697 | if os.path.basename(p) == machine and os.path.isdir(p): | 
|  | 698 | paths.append(p) | 
|  | 699 |  | 
|  | 700 | if paths: | 
|  | 701 | for s in srcuri.split(): | 
|  | 702 | if not s.startswith("file://"): | 
|  | 703 | continue | 
|  | 704 | fetcher = bb.fetch2.Fetch([s], d) | 
|  | 705 | local = fetcher.localpath(s) | 
|  | 706 | for mp in paths: | 
|  | 707 | if local.startswith(mp): | 
|  | 708 | #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn)) | 
|  | 709 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 
|  | 710 | return | 
|  | 711 |  | 
|  | 712 | packages = d.getVar('PACKAGES').split() | 
|  | 713 | for pkg in packages: | 
|  | 714 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) | 
|  | 715 |  | 
|  | 716 | # We could look for != PACKAGE_ARCH here but how to choose | 
|  | 717 | # if multiple differences are present? | 
|  | 718 | # Look through PACKAGE_ARCHS for the priority order? | 
|  | 719 | if pkgarch and pkgarch == mach_arch: | 
|  | 720 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | 
|  | 721 | bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) | 
|  | 722 | } | 
|  | 723 |  | 
|  | 724 | addtask cleansstate after do_clean | 
|  | 725 | python do_cleansstate() { | 
|  | 726 | sstate_clean_cachefiles(d) | 
|  | 727 | } | 
|  | 728 | addtask cleanall after do_cleansstate | 
|  | 729 | do_cleansstate[nostamp] = "1" | 
|  | 730 |  | 
|  | 731 | python do_cleanall() { | 
|  | 732 | src_uri = (d.getVar('SRC_URI') or "").split() | 
|  | 733 | if not src_uri: | 
|  | 734 | return | 
|  | 735 |  | 
|  | 736 | try: | 
|  | 737 | fetcher = bb.fetch2.Fetch(src_uri, d) | 
|  | 738 | fetcher.clean() | 
|  | 739 | except bb.fetch2.BBFetchException as e: | 
|  | 740 | bb.fatal(str(e)) | 
|  | 741 | } | 
|  | 742 | do_cleanall[nostamp] = "1" | 
|  | 743 |  | 
|  | 744 |  | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 745 | EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install |