| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # ex:ts=4:sw=4:sts=4:et | 
|  | 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 
|  | 3 | # | 
|  | 4 | # This bbclass is used for creating archive for: | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 5 | #  1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" | 
|  | 6 | #  2) patched source: ARCHIVER_MODE[src] = "patched" (default) | 
|  | 7 | #  3) configured source: ARCHIVER_MODE[src] = "configured" | 
|  | 8 | #  4) source mirror: ARCHIVE_MODE[src] = "mirror" | 
|  | 9 | #  5) The patches between do_unpack and do_patch: | 
|  | 10 | #     ARCHIVER_MODE[diff] = "1" | 
|  | 11 | #     And you can set the one that you'd like to exclude from the diff: | 
|  | 12 | #     ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | 
|  | 13 | #  6) The environment data, similar to 'bitbake -e recipe': | 
|  | 14 | #     ARCHIVER_MODE[dumpdata] = "1" | 
|  | 15 | #  7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" | 
|  | 16 | #  8) Whether output the .src.rpm package: | 
|  | 17 | #     ARCHIVER_MODE[srpm] = "1" | 
|  | 18 | #  9) Filter the license, the recipe whose license in | 
|  | 19 | #     COPYLEFT_LICENSE_INCLUDE will be included, and in | 
|  | 20 | #     COPYLEFT_LICENSE_EXCLUDE will be excluded. | 
|  | 21 | #     COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' | 
|  | 22 | #     COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' | 
|  | 23 | # 10) The recipe type that will be archived: | 
|  | 24 | #     COPYLEFT_RECIPE_TYPES = 'target' | 
|  | 25 | # 11) The source mirror mode: | 
|  | 26 | #     ARCHIVER_MODE[mirror] = "split" (default): Sources are split into | 
|  | 27 | #     per-recipe directories in a similar way to other archiver modes. | 
|  | 28 | #     Post-processing may be required to produce a single mirror directory. | 
|  | 29 | #     This does however allow inspection of duplicate sources and more | 
|  | 30 | #     intelligent handling. | 
|  | 31 | #     ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single | 
|  | 32 | #     directory suitable for direct use as a mirror. Duplicate sources are | 
|  | 33 | #     ignored. | 
|  | 34 | # 12) Source mirror exclusions: | 
|  | 35 | #     ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror. | 
|  | 36 | #     This may be used for sources which you are already publishing yourself | 
|  | 37 | #     (e.g. if the URI starts with 'https://mysite.com/' and your mirror is | 
|  | 38 | #     going to be published to the same site). It may also be used to exclude | 
|  | 39 | #     local files (with the prefix 'file://') if these will be provided as part | 
|  | 40 | #     of an archive of the layers themselves. | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 41 | # | 
|  | 42 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 43 | # Create archive for all the recipe types | 
|  | 44 | COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' | 
|  | 45 | inherit copyleft_filter | 
|  | 46 |  | 
|  | 47 | ARCHIVER_MODE[srpm] ?= "0" | 
|  | 48 | ARCHIVER_MODE[src] ?= "patched" | 
|  | 49 | ARCHIVER_MODE[diff] ?= "0" | 
|  | 50 | ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | 
|  | 51 | ARCHIVER_MODE[dumpdata] ?= "0" | 
|  | 52 | ARCHIVER_MODE[recipe] ?= "0" | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 53 | ARCHIVER_MODE[mirror] ?= "split" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 54 |  | 
|  | 55 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" | 
|  | 56 | ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" | 
|  | 57 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" | 
| Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 58 | ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" | 
|  | 59 | ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 60 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" | 
|  | 61 |  | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 62 | # When producing a combined mirror directory, allow duplicates for the case | 
|  | 63 | # where multiple recipes use the same SRC_URI. | 
|  | 64 | ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" | 
|  | 65 | SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}/mirror" | 
| Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 66 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 67 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" | 
|  | 68 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" | 
|  | 69 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" | 
|  | 70 | do_deploy_archives[dirs] = "${WORKDIR}" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 71 |  | 
|  | 72 | # This is a convenience for the shell script to use it | 
|  | 73 |  | 
|  | 74 |  | 
|  | 75 | python () { | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 76 | pn = d.getVar('PN') | 
|  | 77 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 78 | if pn in assume_provided: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 79 | for p in d.getVar("PROVIDES").split(): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 80 | if p != pn: | 
|  | 81 | pn = p | 
|  | 82 | break | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 83 |  | 
|  | 84 | included, reason = copyleft_should_include(d) | 
|  | 85 | if not included: | 
|  | 86 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) | 
|  | 87 | return | 
|  | 88 | else: | 
|  | 89 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | 
|  | 90 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 91 |  | 
|  | 92 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | 
|  | 93 | # so avoid archiving source here. | 
|  | 94 | if pn.startswith('glibc-locale'): | 
|  | 95 | return | 
|  | 96 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 97 | # We just archive gcc-source for all the gcc related recipes | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 98 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 99 | and not pn.startswith('gcc-source'): | 
|  | 100 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) | 
|  | 101 | return | 
|  | 102 |  | 
| Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 103 | def hasTask(task): | 
|  | 104 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | 
|  | 105 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 106 | ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') | 
|  | 107 | ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') | 
|  | 108 | ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 109 |  | 
|  | 110 | if ar_src == "original": | 
|  | 111 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 112 | # 'patched' and 'configured' invoke do_unpack_and_patch because | 
|  | 113 | # do_ar_patched resp. do_ar_configured depend on it, but for 'original' | 
|  | 114 | # we have to add it explicitly. | 
|  | 115 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | 
|  | 116 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 117 | elif ar_src == "patched": | 
|  | 118 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) | 
|  | 119 | elif ar_src == "configured": | 
|  | 120 | # We can't use "addtask do_ar_configured after do_configure" since it | 
|  | 121 | # will cause the deptask of do_populate_sysroot to run not matter what | 
|  | 122 | # archives we need, so we add the depends here. | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 123 |  | 
|  | 124 | # There is a corner case with "gcc-source-${PV}" recipes, they don't have | 
|  | 125 | # the "do_configure" task, so we need to use "do_preconfigure" | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 126 | if hasTask("do_preconfigure"): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 127 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 128 | elif hasTask("do_configure"): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 129 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 130 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 131 | elif ar_src == "mirror": | 
|  | 132 | d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 133 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 134 | elif ar_src: | 
|  | 135 | bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) | 
|  | 136 |  | 
|  | 137 | if ar_dumpdata == "1": | 
|  | 138 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) | 
|  | 139 |  | 
|  | 140 | if ar_recipe == "1": | 
|  | 141 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) | 
|  | 142 |  | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 143 | # Output the SRPM package | 
|  | 144 | if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): | 
| Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 145 | if "package_rpm" not in d.getVar('PACKAGE_CLASSES'): | 
|  | 146 | bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") | 
|  | 147 |  | 
|  | 148 | # Some recipes do not have any packaging tasks | 
|  | 149 | if hasTask("do_package_write_rpm"): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 150 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | 
| Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 151 | d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') | 
|  | 152 | d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') | 
|  | 153 | d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 154 | if ar_dumpdata == "1": | 
|  | 155 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | 
|  | 156 | if ar_recipe == "1": | 
|  | 157 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) | 
|  | 158 | if ar_src == "original": | 
|  | 159 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) | 
|  | 160 | elif ar_src == "patched": | 
|  | 161 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) | 
|  | 162 | elif ar_src == "configured": | 
|  | 163 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 164 | } | 
|  | 165 |  | 
|  | 166 | # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. | 
|  | 167 | # Files in SRC_URI are copied directly, anything that's a directory | 
|  | 168 | # (e.g. git repositories) is "unpacked" and then put into a tarball. | 
|  | 169 | python do_ar_original() { | 
|  | 170 |  | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 171 | import shutil, tempfile | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 172 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 173 | if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 174 | return | 
|  | 175 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 176 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 177 | bb.note('Archiving the original source...') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 178 | urls = d.getVar("SRC_URI").split() | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 179 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be | 
|  | 180 | # absolute paths (for example, destsuffix=${S}/foobar). | 
|  | 181 | # That messes with unpacking inside our tmpdir below, because the fetchers | 
|  | 182 | # will then unpack in that directory and completely ignore the tmpdir. | 
|  | 183 | # That breaks parallel tasks relying on ${S}, like do_compile. | 
|  | 184 | # | 
|  | 185 | # To solve this, we remove these parameters from all URLs. | 
|  | 186 | # We do this even for relative paths because it makes the content of the | 
|  | 187 | # archives more useful (no extra paths that are only used during | 
|  | 188 | # compilation). | 
|  | 189 | for i, url in enumerate(urls): | 
|  | 190 | decoded = bb.fetch2.decodeurl(url) | 
|  | 191 | for param in ('destsuffix', 'subdir'): | 
|  | 192 | if param in decoded[5]: | 
|  | 193 | del decoded[5][param] | 
|  | 194 | encoded = bb.fetch2.encodeurl(decoded) | 
|  | 195 | urls[i] = encoded | 
| Andrew Geissler | 1e34c2d | 2020-05-29 16:02:59 -0500 | [diff] [blame] | 196 |  | 
|  | 197 | # Cleanup SRC_URI before call bb.fetch2.Fetch() since now SRC_URI is in the | 
|  | 198 | # variable "urls", otherwise there might be errors like: | 
|  | 199 | # The SRCREV_FORMAT variable must be set when multiple SCMs are used | 
|  | 200 | ld = bb.data.createCopy(d) | 
|  | 201 | ld.setVar('SRC_URI', '') | 
|  | 202 | fetch = bb.fetch2.Fetch(urls, ld) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 203 | tarball_suffix = {} | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 204 | for url in fetch.urls: | 
|  | 205 | local = fetch.localpath(url).rstrip("/"); | 
|  | 206 | if os.path.isfile(local): | 
|  | 207 | shutil.copy(local, ar_outdir) | 
|  | 208 | elif os.path.isdir(local): | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 209 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 210 | fetch.unpack(tmpdir, (url,)) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 211 | # To handle recipes with more than one source, we add the "name" | 
|  | 212 | # URL parameter as suffix. We treat it as an error when | 
|  | 213 | # there's more than one URL without a name, or a name gets reused. | 
|  | 214 | # This is an additional safety net, in practice the name has | 
|  | 215 | # to be set when using the git fetcher, otherwise SRCREV cannot | 
|  | 216 | # be set separately for each URL. | 
|  | 217 | params = bb.fetch2.decodeurl(url)[5] | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 218 | type = bb.fetch2.decodeurl(url)[0] | 
|  | 219 | location = bb.fetch2.decodeurl(url)[2] | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 220 | name = params.get('name', '') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 221 | if type.lower() == 'file': | 
|  | 222 | name_tmp = location.rstrip("*").rstrip("/") | 
|  | 223 | name = os.path.basename(name_tmp) | 
|  | 224 | else: | 
|  | 225 | if name in tarball_suffix: | 
|  | 226 | if not name: | 
|  | 227 | bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) | 
|  | 228 | else: | 
|  | 229 | bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 230 | tarball_suffix[name] = url | 
|  | 231 | create_tarball(d, tmpdir + '/.', name, ar_outdir) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 232 |  | 
|  | 233 | # Emit patch series files for 'original' | 
|  | 234 | bb.note('Writing patch series files...') | 
|  | 235 | for patch in src_patches(d): | 
|  | 236 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) | 
|  | 237 | patchdir = parm.get('patchdir') | 
|  | 238 | if patchdir: | 
|  | 239 | series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) | 
|  | 240 | else: | 
|  | 241 | series = os.path.join(ar_outdir, 'series') | 
|  | 242 |  | 
|  | 243 | with open(series, 'a') as s: | 
|  | 244 | s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) | 
|  | 245 | } | 
|  | 246 |  | 
|  | 247 | python do_ar_patched() { | 
|  | 248 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 249 | if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 250 | return | 
|  | 251 |  | 
|  | 252 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 253 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
| Brad Bishop | a34c030 | 2019-09-23 22:34:48 -0400 | [diff] [blame] | 254 | if not is_work_shared(d): | 
|  | 255 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') | 
|  | 256 | d.setVar('WORKDIR', ar_workdir) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 257 | bb.note('Archiving the patched source...') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 258 | create_tarball(d, d.getVar('S'), 'patched', ar_outdir) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 259 | } | 
|  | 260 |  | 
|  | 261 | python do_ar_configured() { | 
|  | 262 | import shutil | 
|  | 263 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 264 | # Forcibly expand the sysroot paths as we're about to change WORKDIR | 
| Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 265 | d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) | 
|  | 266 | d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 267 | d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) | 
|  | 268 | d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) | 
|  | 269 |  | 
|  | 270 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
|  | 271 | if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 272 | bb.note('Archiving the configured source...') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 273 | pn = d.getVar('PN') | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 274 | # "gcc-source-${PV}" recipes don't have "do_configure" | 
|  | 275 | # task, so we need to run "do_preconfigure" instead | 
|  | 276 | if pn.startswith("gcc-source-"): | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 277 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 278 | bb.build.exec_func('do_preconfigure', d) | 
|  | 279 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 280 | # The libtool-native's do_configure will remove the | 
|  | 281 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the | 
|  | 282 | # do_configure, we archive the already configured ${S} to | 
|  | 283 | # instead of. | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 284 | elif pn != 'libtool-native': | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 285 | def runTask(task): | 
|  | 286 | prefuncs = d.getVarFlag(task, 'prefuncs') or '' | 
|  | 287 | for func in prefuncs.split(): | 
|  | 288 | if func != "sysroot_cleansstate": | 
|  | 289 | bb.build.exec_func(func, d) | 
|  | 290 | bb.build.exec_func(task, d) | 
|  | 291 | postfuncs = d.getVarFlag(task, 'postfuncs') or '' | 
|  | 292 | for func in postfuncs.split(): | 
|  | 293 | if func != 'do_qa_configure': | 
|  | 294 | bb.build.exec_func(func, d) | 
|  | 295 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 296 | # Change the WORKDIR to make do_configure run in another dir. | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 297 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 298 |  | 
|  | 299 | preceeds = bb.build.preceedtask('do_configure', False, d) | 
|  | 300 | for task in preceeds: | 
|  | 301 | if task != 'do_patch' and task != 'do_prepare_recipe_sysroot': | 
|  | 302 | runTask(task) | 
|  | 303 | runTask('do_configure') | 
|  | 304 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 305 | srcdir = d.getVar('S') | 
|  | 306 | builddir = d.getVar('B') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 307 | if srcdir != builddir: | 
|  | 308 | if os.path.exists(builddir): | 
|  | 309 | oe.path.copytree(builddir, os.path.join(srcdir, \ | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 310 | 'build.%s.ar_configured' % d.getVar('PF'))) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 311 | create_tarball(d, srcdir, 'configured', ar_outdir) | 
|  | 312 | } | 
|  | 313 |  | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 314 | python do_ar_mirror() { | 
|  | 315 | import subprocess | 
|  | 316 |  | 
|  | 317 | src_uri = (d.getVar('SRC_URI') or '').split() | 
|  | 318 | if len(src_uri) == 0: | 
|  | 319 | return | 
|  | 320 |  | 
|  | 321 | dl_dir = d.getVar('DL_DIR') | 
|  | 322 | mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() | 
|  | 323 | mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') | 
|  | 324 | have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS') | 
|  | 325 |  | 
|  | 326 | if mirror_mode == 'combined': | 
|  | 327 | destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') | 
|  | 328 | elif mirror_mode == 'split': | 
|  | 329 | destdir = d.getVar('ARCHIVER_OUTDIR') | 
|  | 330 | else: | 
|  | 331 | bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode)) | 
|  | 332 |  | 
|  | 333 | if not have_mirror_tarballs: | 
|  | 334 | bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`') | 
|  | 335 |  | 
|  | 336 | def is_excluded(url): | 
|  | 337 | for prefix in mirror_exclusions: | 
|  | 338 | if url.startswith(prefix): | 
|  | 339 | return True | 
|  | 340 | return False | 
|  | 341 |  | 
|  | 342 | bb.note('Archiving the source as a mirror...') | 
|  | 343 |  | 
|  | 344 | bb.utils.mkdirhier(destdir) | 
|  | 345 |  | 
|  | 346 | fetcher = bb.fetch2.Fetch(src_uri, d) | 
|  | 347 |  | 
| Andrew Geissler | 5a43b43 | 2020-06-13 10:46:56 -0500 | [diff] [blame] | 348 | for ud in fetcher.expanded_urldata(): | 
|  | 349 | if is_excluded(ud.url): | 
|  | 350 | bb.note('Skipping excluded url: %s' % (ud.url)) | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 351 | continue | 
|  | 352 |  | 
| Andrew Geissler | 5a43b43 | 2020-06-13 10:46:56 -0500 | [diff] [blame] | 353 | bb.note('Archiving url: %s' % (ud.url)) | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 354 | ud.setup_localpath(d) | 
|  | 355 | localpath = None | 
|  | 356 |  | 
|  | 357 | # Check for mirror tarballs first. We will archive the first mirror | 
|  | 358 | # tarball that we find as it's assumed that we just need one. | 
|  | 359 | for mirror_fname in ud.mirrortarballs: | 
|  | 360 | mirror_path = os.path.join(dl_dir, mirror_fname) | 
|  | 361 | if os.path.exists(mirror_path): | 
|  | 362 | bb.note('Found mirror tarball: %s' % (mirror_path)) | 
|  | 363 | localpath = mirror_path | 
|  | 364 | break | 
|  | 365 |  | 
|  | 366 | if len(ud.mirrortarballs) and not localpath: | 
|  | 367 | bb.warn('Mirror tarballs are listed for a source but none are present. ' \ | 
|  | 368 | 'Falling back to original download.\n' \ | 
| Andrew Geissler | 5a43b43 | 2020-06-13 10:46:56 -0500 | [diff] [blame] | 369 | 'SRC_URI = %s' % (ud.url)) | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 370 |  | 
|  | 371 | # Check original download | 
|  | 372 | if not localpath: | 
|  | 373 | bb.note('Using original download: %s' % (ud.localpath)) | 
|  | 374 | localpath = ud.localpath | 
|  | 375 |  | 
|  | 376 | if not localpath or not os.path.exists(localpath): | 
|  | 377 | bb.fatal('Original download is missing for a source.\n' \ | 
| Andrew Geissler | 5a43b43 | 2020-06-13 10:46:56 -0500 | [diff] [blame] | 378 | 'SRC_URI = %s' % (ud.url)) | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 379 |  | 
|  | 380 | # We now have an appropriate localpath | 
|  | 381 | bb.note('Copying source mirror') | 
|  | 382 | cmd = 'cp -fpPRH %s %s' % (localpath, destdir) | 
|  | 383 | subprocess.check_call(cmd, shell=True) | 
|  | 384 | } | 
|  | 385 |  | 
| Brad Bishop | c4ea075 | 2018-11-15 14:30:15 -0800 | [diff] [blame] | 386 | def exclude_useless_paths(tarinfo): | 
|  | 387 | if tarinfo.isdir(): | 
|  | 388 | if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'): | 
|  | 389 | return None | 
|  | 390 | elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc': | 
|  | 391 | return None | 
|  | 392 | return tarinfo | 
|  | 393 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 394 | def create_tarball(d, srcdir, suffix, ar_outdir): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 395 | """ | 
|  | 396 | create the tarball from srcdir | 
|  | 397 | """ | 
|  | 398 | import tarfile | 
|  | 399 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 400 | # Make sure we are only creating a single tarball for gcc sources | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 401 | if (d.getVar('SRC_URI') == ""): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 402 | return | 
|  | 403 |  | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 404 | # For the kernel archive, srcdir may just be a link to the | 
|  | 405 | # work-shared location. Use os.path.realpath to make sure | 
|  | 406 | # that we archive the actual directory and not just the link. | 
|  | 407 | srcdir = os.path.realpath(srcdir) | 
|  | 408 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 409 | bb.utils.mkdirhier(ar_outdir) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 410 | if suffix: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 411 | filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 412 | else: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 413 | filename = '%s.tar.gz' % d.getVar('PF') | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 414 | tarname = os.path.join(ar_outdir, filename) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 415 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 416 | bb.note('Creating %s' % tarname) | 
|  | 417 | tar = tarfile.open(tarname, 'w:gz') | 
| Brad Bishop | c4ea075 | 2018-11-15 14:30:15 -0800 | [diff] [blame] | 418 | tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 419 | tar.close() | 
|  | 420 |  | 
|  | 421 | # creating .diff.gz between source.orig and source | 
|  | 422 | def create_diff_gz(d, src_orig, src, ar_outdir): | 
|  | 423 |  | 
|  | 424 | import subprocess | 
|  | 425 |  | 
|  | 426 | if not os.path.isdir(src) or not os.path.isdir(src_orig): | 
|  | 427 | return | 
|  | 428 |  | 
|  | 429 | # The diff --exclude can't exclude the file with path, so we copy | 
|  | 430 | # the patched source, and remove the files that we'd like to | 
|  | 431 | # exclude. | 
|  | 432 | src_patched = src + '.patched' | 
|  | 433 | oe.path.copyhardlinktree(src, src_patched) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 434 | for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 435 | bb.utils.remove(os.path.join(src_orig, i), recurse=True) | 
|  | 436 | bb.utils.remove(os.path.join(src_patched, i), recurse=True) | 
|  | 437 |  | 
|  | 438 | dirname = os.path.dirname(src) | 
|  | 439 | basename = os.path.basename(src) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 440 | bb.utils.mkdirhier(ar_outdir) | 
|  | 441 | cwd = os.getcwd() | 
|  | 442 | try: | 
|  | 443 | os.chdir(dirname) | 
|  | 444 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) | 
|  | 445 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | 
|  | 446 | subprocess.check_call(diff_cmd, shell=True) | 
|  | 447 | bb.utils.remove(src_patched, recurse=True) | 
|  | 448 | finally: | 
|  | 449 | os.chdir(cwd) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 450 |  | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 451 | def is_work_shared(d): | 
|  | 452 | pn = d.getVar('PN') | 
|  | 453 | return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') | 
|  | 454 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 455 | # Run do_unpack and do_patch | 
|  | 456 | python do_unpack_and_patch() { | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 457 | if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 458 | [ 'patched', 'configured'] and \ | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 459 | d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 460 | return | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 461 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
|  | 462 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') | 
|  | 463 | ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | 
|  | 464 | pn = d.getVar('PN') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 465 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 466 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 467 | if not is_work_shared(d): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 468 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | 
|  | 469 | d.setVar('WORKDIR', ar_workdir) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 470 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | 
|  | 471 | d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 472 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 473 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | 
|  | 474 | # possibly requiring of the following tasks (such as some recipes's | 
|  | 475 | # do_patch required 'B' existed). | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 476 | bb.utils.mkdirhier(d.getVar('B')) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 477 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 478 | bb.build.exec_func('do_unpack', d) | 
|  | 479 |  | 
|  | 480 | # Save the original source for creating the patches | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 481 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | 
|  | 482 | src = d.getVar('S').rstrip('/') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 483 | src_orig = '%s.orig' % src | 
|  | 484 | oe.path.copytree(src, src_orig) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 485 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 486 | # Make sure gcc and kernel sources are patched only once | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 487 | if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 488 | bb.build.exec_func('do_patch', d) | 
|  | 489 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 490 | # Create the patches | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 491 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 492 | bb.note('Creating diff gz...') | 
|  | 493 | create_diff_gz(d, src_orig, src, ar_outdir) | 
|  | 494 | bb.utils.remove(src_orig, recurse=True) | 
|  | 495 | } | 
|  | 496 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 497 | # BBINCLUDED is special (excluded from basehash signature | 
|  | 498 | # calculation). Using it in a task signature can cause "basehash | 
|  | 499 | # changed" errors. | 
|  | 500 | # | 
|  | 501 | # Depending on BBINCLUDED also causes do_ar_recipe to run again | 
|  | 502 | # for unrelated changes, like adding or removing buildhistory.bbclass. | 
|  | 503 | # | 
|  | 504 | # For these reasons we ignore the dependency completely. The versioning | 
|  | 505 | # of the output file ensures that we create it each time the recipe | 
|  | 506 | # gets rebuilt, at least as long as a PR server is used. We also rely | 
|  | 507 | # on that mechanism to catch changes in the file content, because the | 
|  | 508 | # file content is not part of of the task signature either. | 
|  | 509 | do_ar_recipe[vardepsexclude] += "BBINCLUDED" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 510 | python do_ar_recipe () { | 
|  | 511 | """ | 
|  | 512 | archive the recipe, including .bb and .inc. | 
|  | 513 | """ | 
|  | 514 | import re | 
|  | 515 | import shutil | 
|  | 516 |  | 
|  | 517 | require_re = re.compile( r"require\s+(.+)" ) | 
|  | 518 | include_re = re.compile( r"include\s+(.+)" ) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 519 | bbfile = d.getVar('FILE') | 
|  | 520 | outdir = os.path.join(d.getVar('WORKDIR'), \ | 
|  | 521 | '%s-recipe' % d.getVar('PF')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 522 | bb.utils.mkdirhier(outdir) | 
|  | 523 | shutil.copy(bbfile, outdir) | 
|  | 524 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 525 | pn = d.getVar('PN') | 
|  | 526 | bbappend_files = d.getVar('BBINCLUDED').split() | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 527 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend | 
|  | 528 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 529 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) | 
|  | 530 | bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 531 | for file in bbappend_files: | 
|  | 532 | if bbappend_re.match(file) or bbappend_re1.match(file): | 
|  | 533 | shutil.copy(file, outdir) | 
|  | 534 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 535 | dirname = os.path.dirname(bbfile) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 536 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 537 | f = open(bbfile, 'r') | 
|  | 538 | for line in f.readlines(): | 
|  | 539 | incfile = None | 
|  | 540 | if require_re.match(line): | 
|  | 541 | incfile = require_re.match(line).group(1) | 
|  | 542 | elif include_re.match(line): | 
|  | 543 | incfile = include_re.match(line).group(1) | 
|  | 544 | if incfile: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 545 | incfile = d.expand(incfile) | 
| Brad Bishop | 1d80a2e | 2019-11-15 16:35:03 -0500 | [diff] [blame] | 546 | if incfile: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 547 | incfile = bb.utils.which(bbpath, incfile) | 
| Brad Bishop | 1d80a2e | 2019-11-15 16:35:03 -0500 | [diff] [blame] | 548 | if incfile: | 
|  | 549 | shutil.copy(incfile, outdir) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 550 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 551 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 552 | bb.utils.remove(outdir, recurse=True) | 
|  | 553 | } | 
|  | 554 |  | 
|  | 555 | python do_dumpdata () { | 
|  | 556 | """ | 
|  | 557 | dump environment data to ${PF}-showdata.dump | 
|  | 558 | """ | 
|  | 559 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 560 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ | 
|  | 561 | '%s-showdata.dump' % d.getVar('PF')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 562 | bb.note('Dumping metadata into %s' % dumpfile) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 563 | with open(dumpfile, "w") as f: | 
|  | 564 | # emit variables and shell functions | 
|  | 565 | bb.data.emit_env(f, d, True) | 
|  | 566 | # emit the metadata which isn't valid shell | 
|  | 567 | for e in d.keys(): | 
|  | 568 | if d.getVarFlag(e, "python", False): | 
|  | 569 | f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 570 | } | 
|  | 571 |  | 
|  | 572 | SSTATETASKS += "do_deploy_archives" | 
|  | 573 | do_deploy_archives () { | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 574 | echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 575 | } | 
|  | 576 | python do_deploy_archives_setscene () { | 
|  | 577 | sstate_setscene(d) | 
|  | 578 | } | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 579 | do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 580 | do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" | 
|  | 581 | do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 582 | addtask do_deploy_archives_setscene | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 583 |  | 
|  | 584 | addtask do_ar_original after do_unpack | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 585 | addtask do_unpack_and_patch after do_patch | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 586 | addtask do_ar_patched after do_unpack_and_patch | 
|  | 587 | addtask do_ar_configured after do_unpack_and_patch | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 588 | addtask do_ar_mirror after do_fetch | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 589 | addtask do_dumpdata | 
|  | 590 | addtask do_ar_recipe | 
| Andrew Geissler | 1e34c2d | 2020-05-29 16:02:59 -0500 | [diff] [blame] | 591 | addtask do_deploy_archives | 
|  | 592 | do_build[recrdeptask] += "do_deploy_archives" | 
|  | 593 | do_populate_sdk[recrdeptask] += "do_deploy_archives" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 594 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 595 | python () { | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 596 | # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. | 
|  | 597 | # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency | 
|  | 598 | # so that do_kernel_configme does not need to run again when do_unpack_and_patch | 
|  | 599 | # gets added or removed (by adding or removing archiver.bbclass). | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 600 | if bb.data.inherits_class('kernel-yocto', d): | 
|  | 601 | bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 602 | } |