| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # ex:ts=4:sw=4:sts=4:et | 
|  | 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 
|  | 3 | # | 
|  | 4 | # This bbclass is used for creating archive for: | 
|  | 5 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" | 
|  | 6 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) | 
|  | 7 | # 3) configured source: ARCHIVER_MODE[src] = "configured" | 
|  | 8 | # 4) The patches between do_unpack and do_patch: | 
|  | 9 | #    ARCHIVER_MODE[diff] = "1" | 
|  | 10 | #    And you can set the one that you'd like to exclude from the diff: | 
|  | 11 | #    ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | 
|  | 12 | # 5) The environment data, similar to 'bitbake -e recipe': | 
|  | 13 | #    ARCHIVER_MODE[dumpdata] = "1" | 
|  | 14 | # 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" | 
|  | 15 | # 7) Whether output the .src.rpm package: | 
|  | 16 | #    ARCHIVER_MODE[srpm] = "1" | 
|  | 17 | # 8) Filter the license, the recipe whose license in | 
|  | 18 | #    COPYLEFT_LICENSE_INCLUDE will be included, and in | 
|  | 19 | #    COPYLEFT_LICENSE_EXCLUDE will be excluded. | 
|  | 20 | #    COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' | 
|  | 21 | #    COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' | 
|  | 22 | # 9) The recipe type that will be archived: | 
|  | 23 | #    COPYLEFT_RECIPE_TYPES = 'target' | 
|  | 24 | # | 
|  | 25 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 26 | # Create archive for all the recipe types | 
|  | 27 | COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' | 
|  | 28 | inherit copyleft_filter | 
|  | 29 |  | 
|  | 30 | ARCHIVER_MODE[srpm] ?= "0" | 
|  | 31 | ARCHIVER_MODE[src] ?= "patched" | 
|  | 32 | ARCHIVER_MODE[diff] ?= "0" | 
|  | 33 | ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | 
|  | 34 | ARCHIVER_MODE[dumpdata] ?= "0" | 
|  | 35 | ARCHIVER_MODE[recipe] ?= "0" | 
|  | 36 |  | 
|  | 37 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" | 
|  | 38 | ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" | 
|  | 39 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" | 
|  | 40 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" | 
|  | 41 |  | 
|  | 42 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" | 
|  | 43 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" | 
|  | 44 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" | 
|  | 45 | do_deploy_archives[dirs] = "${WORKDIR}" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 46 |  | 
|  | 47 | # This is a convenience for the shell script to use it | 
|  | 48 |  | 
|  | 49 |  | 
|  | 50 | python () { | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 51 | pn = d.getVar('PN') | 
|  | 52 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 53 | if pn in assume_provided: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 54 | for p in d.getVar("PROVIDES").split(): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 55 | if p != pn: | 
|  | 56 | pn = p | 
|  | 57 | break | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 58 |  | 
|  | 59 | included, reason = copyleft_should_include(d) | 
|  | 60 | if not included: | 
|  | 61 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) | 
|  | 62 | return | 
|  | 63 | else: | 
|  | 64 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | 
|  | 65 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 66 |  | 
|  | 67 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | 
|  | 68 | # so avoid archiving source here. | 
|  | 69 | if pn.startswith('glibc-locale'): | 
|  | 70 | return | 
|  | 71 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 72 | # We just archive gcc-source for all the gcc related recipes | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 73 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 74 | and not pn.startswith('gcc-source'): | 
|  | 75 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) | 
|  | 76 | return | 
|  | 77 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 78 | ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') | 
|  | 79 | ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') | 
|  | 80 | ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 81 |  | 
|  | 82 | if ar_src == "original": | 
|  | 83 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 84 | # 'patched' and 'configured' invoke do_unpack_and_patch because | 
|  | 85 | # do_ar_patched resp. do_ar_configured depend on it, but for 'original' | 
|  | 86 | # we have to add it explicitly. | 
|  | 87 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | 
|  | 88 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 89 | elif ar_src == "patched": | 
|  | 90 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) | 
|  | 91 | elif ar_src == "configured": | 
|  | 92 | # We can't use "addtask do_ar_configured after do_configure" since it | 
|  | 93 | # will cause the deptask of do_populate_sysroot to run not matter what | 
|  | 94 | # archives we need, so we add the depends here. | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 95 |  | 
|  | 96 | # There is a corner case with "gcc-source-${PV}" recipes, they don't have | 
|  | 97 | # the "do_configure" task, so we need to use "do_preconfigure" | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 98 | def hasTask(task): | 
|  | 99 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | 
|  | 100 |  | 
|  | 101 | if hasTask("do_preconfigure"): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 102 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 103 | elif hasTask("do_configure"): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 104 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 105 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 106 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 107 | elif ar_src: | 
|  | 108 | bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) | 
|  | 109 |  | 
|  | 110 | if ar_dumpdata == "1": | 
|  | 111 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) | 
|  | 112 |  | 
|  | 113 | if ar_recipe == "1": | 
|  | 114 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) | 
|  | 115 |  | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 116 | # Output the SRPM package | 
|  | 117 | if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): | 
|  | 118 | if "package_rpm" in d.getVar('PACKAGE_CLASSES'): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 119 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | 
|  | 120 | if ar_dumpdata == "1": | 
|  | 121 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | 
|  | 122 | if ar_recipe == "1": | 
|  | 123 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) | 
|  | 124 | if ar_src == "original": | 
|  | 125 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) | 
|  | 126 | elif ar_src == "patched": | 
|  | 127 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) | 
|  | 128 | elif ar_src == "configured": | 
|  | 129 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 130 | else: | 
|  | 131 | bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 132 | } | 
|  | 133 |  | 
|  | 134 | # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. | 
|  | 135 | # Files in SRC_URI are copied directly, anything that's a directory | 
|  | 136 | # (e.g. git repositories) is "unpacked" and then put into a tarball. | 
|  | 137 | python do_ar_original() { | 
|  | 138 |  | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 139 | import shutil, tempfile | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 140 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 141 | if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 142 | return | 
|  | 143 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 144 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 145 | bb.note('Archiving the original source...') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 146 | urls = d.getVar("SRC_URI").split() | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 147 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be | 
|  | 148 | # absolute paths (for example, destsuffix=${S}/foobar). | 
|  | 149 | # That messes with unpacking inside our tmpdir below, because the fetchers | 
|  | 150 | # will then unpack in that directory and completely ignore the tmpdir. | 
|  | 151 | # That breaks parallel tasks relying on ${S}, like do_compile. | 
|  | 152 | # | 
|  | 153 | # To solve this, we remove these parameters from all URLs. | 
|  | 154 | # We do this even for relative paths because it makes the content of the | 
|  | 155 | # archives more useful (no extra paths that are only used during | 
|  | 156 | # compilation). | 
|  | 157 | for i, url in enumerate(urls): | 
|  | 158 | decoded = bb.fetch2.decodeurl(url) | 
|  | 159 | for param in ('destsuffix', 'subdir'): | 
|  | 160 | if param in decoded[5]: | 
|  | 161 | del decoded[5][param] | 
|  | 162 | encoded = bb.fetch2.encodeurl(decoded) | 
|  | 163 | urls[i] = encoded | 
|  | 164 | fetch = bb.fetch2.Fetch(urls, d) | 
|  | 165 | tarball_suffix = {} | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 166 | for url in fetch.urls: | 
|  | 167 | local = fetch.localpath(url).rstrip("/"); | 
|  | 168 | if os.path.isfile(local): | 
|  | 169 | shutil.copy(local, ar_outdir) | 
|  | 170 | elif os.path.isdir(local): | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 171 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 172 | fetch.unpack(tmpdir, (url,)) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 173 | # To handle recipes with more than one source, we add the "name" | 
|  | 174 | # URL parameter as suffix. We treat it as an error when | 
|  | 175 | # there's more than one URL without a name, or a name gets reused. | 
|  | 176 | # This is an additional safety net, in practice the name has | 
|  | 177 | # to be set when using the git fetcher, otherwise SRCREV cannot | 
|  | 178 | # be set separately for each URL. | 
|  | 179 | params = bb.fetch2.decodeurl(url)[5] | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 180 | type = bb.fetch2.decodeurl(url)[0] | 
|  | 181 | location = bb.fetch2.decodeurl(url)[2] | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 182 | name = params.get('name', '') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 183 | if type.lower() == 'file': | 
|  | 184 | name_tmp = location.rstrip("*").rstrip("/") | 
|  | 185 | name = os.path.basename(name_tmp) | 
|  | 186 | else: | 
|  | 187 | if name in tarball_suffix: | 
|  | 188 | if not name: | 
|  | 189 | bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) | 
|  | 190 | else: | 
|  | 191 | bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 192 | tarball_suffix[name] = url | 
|  | 193 | create_tarball(d, tmpdir + '/.', name, ar_outdir) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 194 |  | 
|  | 195 | # Emit patch series files for 'original' | 
|  | 196 | bb.note('Writing patch series files...') | 
|  | 197 | for patch in src_patches(d): | 
|  | 198 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) | 
|  | 199 | patchdir = parm.get('patchdir') | 
|  | 200 | if patchdir: | 
|  | 201 | series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) | 
|  | 202 | else: | 
|  | 203 | series = os.path.join(ar_outdir, 'series') | 
|  | 204 |  | 
|  | 205 | with open(series, 'a') as s: | 
|  | 206 | s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) | 
|  | 207 | } | 
|  | 208 |  | 
|  | 209 | python do_ar_patched() { | 
|  | 210 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 211 | if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 212 | return | 
|  | 213 |  | 
|  | 214 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 215 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
|  | 216 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 217 | bb.note('Archiving the patched source...') | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 218 | d.setVar('WORKDIR', ar_workdir) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 219 | create_tarball(d, d.getVar('S'), 'patched', ar_outdir) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 220 | } | 
|  | 221 |  | 
|  | 222 | python do_ar_configured() { | 
|  | 223 | import shutil | 
|  | 224 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 225 | # Forcibly expand the sysroot paths as we're about to change WORKDIR | 
| Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 226 | d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) | 
|  | 227 | d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 228 | d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) | 
|  | 229 | d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) | 
|  | 230 |  | 
|  | 231 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
|  | 232 | if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 233 | bb.note('Archiving the configured source...') | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 234 | pn = d.getVar('PN') | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 235 | # "gcc-source-${PV}" recipes don't have "do_configure" | 
|  | 236 | # task, so we need to run "do_preconfigure" instead | 
|  | 237 | if pn.startswith("gcc-source-"): | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 238 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 239 | bb.build.exec_func('do_preconfigure', d) | 
|  | 240 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 241 | # The libtool-native's do_configure will remove the | 
|  | 242 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the | 
|  | 243 | # do_configure, we archive the already configured ${S} to | 
|  | 244 | # instead of. | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 245 | elif pn != 'libtool-native': | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 246 | def runTask(task): | 
|  | 247 | prefuncs = d.getVarFlag(task, 'prefuncs') or '' | 
|  | 248 | for func in prefuncs.split(): | 
|  | 249 | if func != "sysroot_cleansstate": | 
|  | 250 | bb.build.exec_func(func, d) | 
|  | 251 | bb.build.exec_func(task, d) | 
|  | 252 | postfuncs = d.getVarFlag(task, 'postfuncs') or '' | 
|  | 253 | for func in postfuncs.split(): | 
|  | 254 | if func != 'do_qa_configure': | 
|  | 255 | bb.build.exec_func(func, d) | 
|  | 256 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 257 | # Change the WORKDIR to make do_configure run in another dir. | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 258 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 259 |  | 
|  | 260 | preceeds = bb.build.preceedtask('do_configure', False, d) | 
|  | 261 | for task in preceeds: | 
|  | 262 | if task != 'do_patch' and task != 'do_prepare_recipe_sysroot': | 
|  | 263 | runTask(task) | 
|  | 264 | runTask('do_configure') | 
|  | 265 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 266 | srcdir = d.getVar('S') | 
|  | 267 | builddir = d.getVar('B') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 268 | if srcdir != builddir: | 
|  | 269 | if os.path.exists(builddir): | 
|  | 270 | oe.path.copytree(builddir, os.path.join(srcdir, \ | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 271 | 'build.%s.ar_configured' % d.getVar('PF'))) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 272 | create_tarball(d, srcdir, 'configured', ar_outdir) | 
|  | 273 | } | 
|  | 274 |  | 
| Brad Bishop | c4ea075 | 2018-11-15 14:30:15 -0800 | [diff] [blame] | 275 | def exclude_useless_paths(tarinfo): | 
|  | 276 | if tarinfo.isdir(): | 
|  | 277 | if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'): | 
|  | 278 | return None | 
|  | 279 | elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc': | 
|  | 280 | return None | 
|  | 281 | return tarinfo | 
|  | 282 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 283 | def create_tarball(d, srcdir, suffix, ar_outdir): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 284 | """ | 
|  | 285 | create the tarball from srcdir | 
|  | 286 | """ | 
|  | 287 | import tarfile | 
|  | 288 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 289 | # Make sure we are only creating a single tarball for gcc sources | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 290 | if (d.getVar('SRC_URI') == ""): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 291 | return | 
|  | 292 |  | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 293 | # For the kernel archive, srcdir may just be a link to the | 
|  | 294 | # work-shared location. Use os.path.realpath to make sure | 
|  | 295 | # that we archive the actual directory and not just the link. | 
|  | 296 | srcdir = os.path.realpath(srcdir) | 
|  | 297 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 298 | bb.utils.mkdirhier(ar_outdir) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 299 | if suffix: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 300 | filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 301 | else: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 302 | filename = '%s.tar.gz' % d.getVar('PF') | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 303 | tarname = os.path.join(ar_outdir, filename) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 304 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 305 | bb.note('Creating %s' % tarname) | 
|  | 306 | tar = tarfile.open(tarname, 'w:gz') | 
| Brad Bishop | c4ea075 | 2018-11-15 14:30:15 -0800 | [diff] [blame] | 307 | tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 308 | tar.close() | 
|  | 309 |  | 
|  | 310 | # creating .diff.gz between source.orig and source | 
|  | 311 | def create_diff_gz(d, src_orig, src, ar_outdir): | 
|  | 312 |  | 
|  | 313 | import subprocess | 
|  | 314 |  | 
|  | 315 | if not os.path.isdir(src) or not os.path.isdir(src_orig): | 
|  | 316 | return | 
|  | 317 |  | 
|  | 318 | # The diff --exclude can't exclude the file with path, so we copy | 
|  | 319 | # the patched source, and remove the files that we'd like to | 
|  | 320 | # exclude. | 
|  | 321 | src_patched = src + '.patched' | 
|  | 322 | oe.path.copyhardlinktree(src, src_patched) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 323 | for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 324 | bb.utils.remove(os.path.join(src_orig, i), recurse=True) | 
|  | 325 | bb.utils.remove(os.path.join(src_patched, i), recurse=True) | 
|  | 326 |  | 
|  | 327 | dirname = os.path.dirname(src) | 
|  | 328 | basename = os.path.basename(src) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 329 | bb.utils.mkdirhier(ar_outdir) | 
|  | 330 | cwd = os.getcwd() | 
|  | 331 | try: | 
|  | 332 | os.chdir(dirname) | 
|  | 333 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) | 
|  | 334 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | 
|  | 335 | subprocess.check_call(diff_cmd, shell=True) | 
|  | 336 | bb.utils.remove(src_patched, recurse=True) | 
|  | 337 | finally: | 
|  | 338 | os.chdir(cwd) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 339 |  | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 340 | def is_work_shared(d): | 
|  | 341 | pn = d.getVar('PN') | 
|  | 342 | return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') | 
|  | 343 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 344 | # Run do_unpack and do_patch | 
|  | 345 | python do_unpack_and_patch() { | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 346 | if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 347 | [ 'patched', 'configured'] and \ | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 348 | d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 349 | return | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 350 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | 
|  | 351 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') | 
|  | 352 | ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | 
|  | 353 | pn = d.getVar('PN') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 354 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 355 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 356 | if not is_work_shared(d): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 357 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | 
|  | 358 | d.setVar('WORKDIR', ar_workdir) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 359 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | 
|  | 360 | d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 361 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 362 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | 
|  | 363 | # possibly requiring of the following tasks (such as some recipes's | 
|  | 364 | # do_patch required 'B' existed). | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 365 | bb.utils.mkdirhier(d.getVar('B')) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 366 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 367 | bb.build.exec_func('do_unpack', d) | 
|  | 368 |  | 
|  | 369 | # Save the original source for creating the patches | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 370 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | 
|  | 371 | src = d.getVar('S').rstrip('/') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 372 | src_orig = '%s.orig' % src | 
|  | 373 | oe.path.copytree(src, src_orig) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 374 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 375 | # Make sure gcc and kernel sources are patched only once | 
| Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 376 | if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 377 | bb.build.exec_func('do_patch', d) | 
|  | 378 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 379 | # Create the patches | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 380 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 381 | bb.note('Creating diff gz...') | 
|  | 382 | create_diff_gz(d, src_orig, src, ar_outdir) | 
|  | 383 | bb.utils.remove(src_orig, recurse=True) | 
|  | 384 | } | 
|  | 385 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 386 | # BBINCLUDED is special (excluded from basehash signature | 
|  | 387 | # calculation). Using it in a task signature can cause "basehash | 
|  | 388 | # changed" errors. | 
|  | 389 | # | 
|  | 390 | # Depending on BBINCLUDED also causes do_ar_recipe to run again | 
|  | 391 | # for unrelated changes, like adding or removing buildhistory.bbclass. | 
|  | 392 | # | 
|  | 393 | # For these reasons we ignore the dependency completely. The versioning | 
|  | 394 | # of the output file ensures that we create it each time the recipe | 
|  | 395 | # gets rebuilt, at least as long as a PR server is used. We also rely | 
|  | 396 | # on that mechanism to catch changes in the file content, because the | 
|  | 397 | # file content is not part of of the task signature either. | 
|  | 398 | do_ar_recipe[vardepsexclude] += "BBINCLUDED" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 399 | python do_ar_recipe () { | 
|  | 400 | """ | 
|  | 401 | archive the recipe, including .bb and .inc. | 
|  | 402 | """ | 
|  | 403 | import re | 
|  | 404 | import shutil | 
|  | 405 |  | 
|  | 406 | require_re = re.compile( r"require\s+(.+)" ) | 
|  | 407 | include_re = re.compile( r"include\s+(.+)" ) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 408 | bbfile = d.getVar('FILE') | 
|  | 409 | outdir = os.path.join(d.getVar('WORKDIR'), \ | 
|  | 410 | '%s-recipe' % d.getVar('PF')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 411 | bb.utils.mkdirhier(outdir) | 
|  | 412 | shutil.copy(bbfile, outdir) | 
|  | 413 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 414 | pn = d.getVar('PN') | 
|  | 415 | bbappend_files = d.getVar('BBINCLUDED').split() | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 416 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend | 
|  | 417 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 418 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) | 
|  | 419 | bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 420 | for file in bbappend_files: | 
|  | 421 | if bbappend_re.match(file) or bbappend_re1.match(file): | 
|  | 422 | shutil.copy(file, outdir) | 
|  | 423 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 424 | dirname = os.path.dirname(bbfile) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 425 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 426 | f = open(bbfile, 'r') | 
|  | 427 | for line in f.readlines(): | 
|  | 428 | incfile = None | 
|  | 429 | if require_re.match(line): | 
|  | 430 | incfile = require_re.match(line).group(1) | 
|  | 431 | elif include_re.match(line): | 
|  | 432 | incfile = include_re.match(line).group(1) | 
|  | 433 | if incfile: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 434 | incfile = d.expand(incfile) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 435 | incfile = bb.utils.which(bbpath, incfile) | 
|  | 436 | if incfile: | 
|  | 437 | shutil.copy(incfile, outdir) | 
|  | 438 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 439 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 440 | bb.utils.remove(outdir, recurse=True) | 
|  | 441 | } | 
|  | 442 |  | 
|  | 443 | python do_dumpdata () { | 
|  | 444 | """ | 
|  | 445 | dump environment data to ${PF}-showdata.dump | 
|  | 446 | """ | 
|  | 447 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 448 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ | 
|  | 449 | '%s-showdata.dump' % d.getVar('PF')) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 450 | bb.note('Dumping metadata into %s' % dumpfile) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 451 | with open(dumpfile, "w") as f: | 
|  | 452 | # emit variables and shell functions | 
|  | 453 | bb.data.emit_env(f, d, True) | 
|  | 454 | # emit the metadata which isn't valid shell | 
|  | 455 | for e in d.keys(): | 
|  | 456 | if d.getVarFlag(e, "python", False): | 
|  | 457 | f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 458 | } | 
|  | 459 |  | 
|  | 460 | SSTATETASKS += "do_deploy_archives" | 
|  | 461 | do_deploy_archives () { | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 462 | echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 463 | } | 
|  | 464 | python do_deploy_archives_setscene () { | 
|  | 465 | sstate_setscene(d) | 
|  | 466 | } | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 467 | do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 468 | do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" | 
|  | 469 | do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 470 | addtask do_deploy_archives_setscene | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 471 |  | 
|  | 472 | addtask do_ar_original after do_unpack | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 473 | addtask do_unpack_and_patch after do_patch | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 474 | addtask do_ar_patched after do_unpack_and_patch | 
|  | 475 | addtask do_ar_configured after do_unpack_and_patch | 
|  | 476 | addtask do_dumpdata | 
|  | 477 | addtask do_ar_recipe | 
|  | 478 | addtask do_deploy_archives before do_build | 
|  | 479 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 480 | python () { | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 481 | # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. | 
|  | 482 | # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency | 
|  | 483 | # so that do_kernel_configme does not need to run again when do_unpack_and_patch | 
|  | 484 | # gets added or removed (by adding or removing archiver.bbclass). | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 485 | if bb.data.inherits_class('kernel-yocto', d): | 
|  | 486 | bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 487 | } |