Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # ex:ts=4:sw=4:sts=4:et |
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
| 3 | # |
| 4 | # This bbclass is used for creating archive for: |
| 5 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" |
| 6 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) |
| 7 | # 3) configured source: ARCHIVER_MODE[src] = "configured" |
| 8 | # 4) The patches between do_unpack and do_patch: |
| 9 | # ARCHIVER_MODE[diff] = "1" |
| 10 | # And you can set the one that you'd like to exclude from the diff: |
| 11 | # ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" |
| 12 | # 5) The environment data, similar to 'bitbake -e recipe': |
| 13 | # ARCHIVER_MODE[dumpdata] = "1" |
| 14 | # 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" |
| 15 | # 7) Whether output the .src.rpm package: |
| 16 | # ARCHIVER_MODE[srpm] = "1" |
| 17 | # 8) Filter the license, the recipe whose license in |
| 18 | # COPYLEFT_LICENSE_INCLUDE will be included, and in |
| 19 | # COPYLEFT_LICENSE_EXCLUDE will be excluded. |
| 20 | # COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' |
| 21 | # COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' |
| 22 | # 9) The recipe type that will be archived: |
| 23 | # COPYLEFT_RECIPE_TYPES = 'target' |
| 24 | # |
| 25 | |
| 26 | # Don't filter the license by default |
| 27 | COPYLEFT_LICENSE_INCLUDE ?= '' |
| 28 | COPYLEFT_LICENSE_EXCLUDE ?= '' |
| 29 | # Create archive for all the recipe types |
| 30 | COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' |
| 31 | inherit copyleft_filter |
| 32 | |
| 33 | ARCHIVER_MODE[srpm] ?= "0" |
| 34 | ARCHIVER_MODE[src] ?= "patched" |
| 35 | ARCHIVER_MODE[diff] ?= "0" |
| 36 | ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" |
| 37 | ARCHIVER_MODE[dumpdata] ?= "0" |
| 38 | ARCHIVER_MODE[recipe] ?= "0" |
| 39 | |
| 40 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" |
| 41 | ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" |
| 42 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" |
| 43 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" |
| 44 | |
| 45 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" |
| 46 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" |
| 47 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" |
| 48 | do_deploy_archives[dirs] = "${WORKDIR}" |
| 49 | do_deploy_all_archives[dirs] = "${WORKDIR}" |
| 50 | |
| 51 | # This is a convenience for the shell script to use it |
| 52 | |
| 53 | |
| 54 | python () { |
| 55 | pn = d.getVar('PN', True) |
| 56 | |
| 57 | included, reason = copyleft_should_include(d) |
| 58 | if not included: |
| 59 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) |
| 60 | return |
| 61 | else: |
| 62 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) |
| 63 | |
| 64 | ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True) |
| 65 | ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True) |
| 66 | ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True) |
| 67 | |
| 68 | if ar_src == "original": |
| 69 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) |
| 70 | elif ar_src == "patched": |
| 71 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) |
| 72 | elif ar_src == "configured": |
| 73 | # We can't use "addtask do_ar_configured after do_configure" since it |
| 74 | # will cause the deptask of do_populate_sysroot to run not matter what |
| 75 | # archives we need, so we add the depends here. |
| 76 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) |
| 77 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) |
| 78 | elif ar_src: |
| 79 | bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) |
| 80 | |
| 81 | if ar_dumpdata == "1": |
| 82 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) |
| 83 | |
| 84 | if ar_recipe == "1": |
| 85 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) |
| 86 | |
| 87 | # Output the srpm package |
| 88 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) |
| 89 | if ar_srpm == "1": |
| 90 | if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': |
| 91 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) |
| 92 | if ar_dumpdata == "1": |
| 93 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) |
| 94 | if ar_recipe == "1": |
| 95 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) |
| 96 | if ar_src == "original": |
| 97 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) |
| 98 | elif ar_src == "patched": |
| 99 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) |
| 100 | elif ar_src == "configured": |
| 101 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) |
| 102 | |
| 103 | # The gcc staff uses shared source |
| 104 | flag = d.getVarFlag("do_unpack", "stamp-base", True) |
| 105 | if flag: |
| 106 | if ar_src in [ 'original', 'patched' ]: |
| 107 | ar_outdir = os.path.join(d.getVar('ARCHIVER_TOPDIR', True), 'work-shared') |
| 108 | d.setVar('ARCHIVER_OUTDIR', ar_outdir) |
| 109 | d.setVarFlag('do_ar_original', 'stamp-base', flag) |
| 110 | d.setVarFlag('do_ar_patched', 'stamp-base', flag) |
| 111 | d.setVarFlag('do_unpack_and_patch', 'stamp-base', flag) |
| 112 | d.setVarFlag('do_ar_original', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') |
| 113 | d.setVarFlag('do_unpack_and_patch', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') |
| 114 | d.setVarFlag('do_ar_patched', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') |
| 115 | d.setVarFlag('create_diff_gz', 'vardepsexclude', 'PF') |
| 116 | d.setVarFlag('create_tarball', 'vardepsexclude', 'PF') |
| 117 | |
| 118 | flag_clean = d.getVarFlag('do_unpack', 'stamp-base-clean', True) |
| 119 | if flag_clean: |
| 120 | d.setVarFlag('do_ar_original', 'stamp-base-clean', flag_clean) |
| 121 | d.setVarFlag('do_ar_patched', 'stamp-base-clean', flag_clean) |
| 122 | d.setVarFlag('do_unpack_and_patch', 'stamp-base-clean', flag_clean) |
| 123 | } |
| 124 | |
| 125 | # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. |
| 126 | # Files in SRC_URI are copied directly, anything that's a directory |
| 127 | # (e.g. git repositories) is "unpacked" and then put into a tarball. |
| 128 | python do_ar_original() { |
| 129 | |
| 130 | import shutil, tarfile, tempfile |
| 131 | |
| 132 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": |
| 133 | return |
| 134 | |
| 135 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
| 136 | bb.note('Archiving the original source...') |
| 137 | fetch = bb.fetch2.Fetch([], d) |
| 138 | for url in fetch.urls: |
| 139 | local = fetch.localpath(url).rstrip("/"); |
| 140 | if os.path.isfile(local): |
| 141 | shutil.copy(local, ar_outdir) |
| 142 | elif os.path.isdir(local): |
| 143 | basename = os.path.basename(local) |
| 144 | |
| 145 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) |
| 146 | fetch.unpack(tmpdir, (url,)) |
| 147 | |
| 148 | os.chdir(tmpdir) |
| 149 | # We eliminate any AUTOINC+ in the revision. |
| 150 | try: |
| 151 | src_rev = bb.fetch2.get_srcrev(d).replace('AUTOINC+','') |
| 152 | except: |
| 153 | src_rev = 'NOREV' |
| 154 | tarname = os.path.join(ar_outdir, basename + '.' + src_rev + '.tar.gz') |
| 155 | tar = tarfile.open(tarname, 'w:gz') |
| 156 | tar.add('.') |
| 157 | tar.close() |
| 158 | |
| 159 | # Emit patch series files for 'original' |
| 160 | bb.note('Writing patch series files...') |
| 161 | for patch in src_patches(d): |
| 162 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) |
| 163 | patchdir = parm.get('patchdir') |
| 164 | if patchdir: |
| 165 | series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) |
| 166 | else: |
| 167 | series = os.path.join(ar_outdir, 'series') |
| 168 | |
| 169 | with open(series, 'a') as s: |
| 170 | s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) |
| 171 | } |
| 172 | |
| 173 | python do_ar_patched() { |
| 174 | |
| 175 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched': |
| 176 | return |
| 177 | |
| 178 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR |
| 179 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
| 180 | bb.note('Archiving the patched source...') |
| 181 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) |
| 182 | # The gcc staff uses shared source |
| 183 | flag = d.getVarFlag('do_unpack', 'stamp-base', True) |
| 184 | if flag: |
| 185 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir, 'gcc') |
| 186 | else: |
| 187 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) |
| 188 | } |
| 189 | |
| 190 | python do_ar_configured() { |
| 191 | import shutil |
| 192 | |
| 193 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
| 194 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': |
| 195 | bb.note('Archiving the configured source...') |
| 196 | # The libtool-native's do_configure will remove the |
| 197 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the |
| 198 | # do_configure, we archive the already configured ${S} to |
| 199 | # instead of. |
| 200 | if d.getVar('PN', True) != 'libtool-native': |
| 201 | # Change the WORKDIR to make do_configure run in another dir. |
| 202 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) |
| 203 | if bb.data.inherits_class('kernel-yocto', d): |
| 204 | bb.build.exec_func('do_kernel_configme', d) |
| 205 | if bb.data.inherits_class('cmake', d): |
| 206 | bb.build.exec_func('do_generate_toolchain_file', d) |
| 207 | prefuncs = d.getVarFlag('do_configure', 'prefuncs', True) |
| 208 | for func in (prefuncs or '').split(): |
| 209 | if func != "sysroot_cleansstate": |
| 210 | bb.build.exec_func(func, d) |
| 211 | bb.build.exec_func('do_configure', d) |
| 212 | postfuncs = d.getVarFlag('do_configure', 'postfuncs', True) |
| 213 | for func in (postfuncs or '').split(): |
| 214 | if func != "do_qa_configure": |
| 215 | bb.build.exec_func(func, d) |
| 216 | srcdir = d.getVar('S', True) |
| 217 | builddir = d.getVar('B', True) |
| 218 | if srcdir != builddir: |
| 219 | if os.path.exists(builddir): |
| 220 | oe.path.copytree(builddir, os.path.join(srcdir, \ |
| 221 | 'build.%s.ar_configured' % d.getVar('PF', True))) |
| 222 | create_tarball(d, srcdir, 'configured', ar_outdir) |
| 223 | } |
| 224 | |
| 225 | def create_tarball(d, srcdir, suffix, ar_outdir, pf=None): |
| 226 | """ |
| 227 | create the tarball from srcdir |
| 228 | """ |
| 229 | import tarfile |
| 230 | |
| 231 | bb.utils.mkdirhier(ar_outdir) |
| 232 | if pf: |
| 233 | tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % (pf, suffix)) |
| 234 | else: |
| 235 | tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % \ |
| 236 | (d.getVar('PF', True), suffix)) |
| 237 | |
| 238 | srcdir = srcdir.rstrip('/') |
| 239 | dirname = os.path.dirname(srcdir) |
| 240 | basename = os.path.basename(srcdir) |
| 241 | os.chdir(dirname) |
| 242 | bb.note('Creating %s' % tarname) |
| 243 | tar = tarfile.open(tarname, 'w:gz') |
| 244 | tar.add(basename) |
| 245 | tar.close() |
| 246 | |
| 247 | # creating .diff.gz between source.orig and source |
| 248 | def create_diff_gz(d, src_orig, src, ar_outdir): |
| 249 | |
| 250 | import subprocess |
| 251 | |
| 252 | if not os.path.isdir(src) or not os.path.isdir(src_orig): |
| 253 | return |
| 254 | |
| 255 | # The diff --exclude can't exclude the file with path, so we copy |
| 256 | # the patched source, and remove the files that we'd like to |
| 257 | # exclude. |
| 258 | src_patched = src + '.patched' |
| 259 | oe.path.copyhardlinktree(src, src_patched) |
| 260 | for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split(): |
| 261 | bb.utils.remove(os.path.join(src_orig, i), recurse=True) |
| 262 | bb.utils.remove(os.path.join(src_patched, i), recurse=True) |
| 263 | |
| 264 | dirname = os.path.dirname(src) |
| 265 | basename = os.path.basename(src) |
| 266 | os.chdir(dirname) |
| 267 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) |
| 268 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) |
| 269 | subprocess.call(diff_cmd, shell=True) |
| 270 | bb.utils.remove(src_patched, recurse=True) |
| 271 | |
| 272 | # Run do_unpack and do_patch |
| 273 | python do_unpack_and_patch() { |
| 274 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \ |
| 275 | [ 'patched', 'configured'] and \ |
| 276 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': |
| 277 | return |
| 278 | |
| 279 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
| 280 | |
| 281 | # Change the WORKDIR to make do_unpack do_patch run in another dir. |
| 282 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) |
| 283 | |
| 284 | # The changed 'WORKDIR' also casued 'B' changed, create dir 'B' for the |
| 285 | # possibly requiring of the following tasks (such as some recipes's |
| 286 | # do_patch required 'B' existed). |
| 287 | bb.utils.mkdirhier(d.getVar('B', True)) |
| 288 | |
| 289 | # The kernel source is ready after do_validate_branches |
| 290 | if bb.data.inherits_class('kernel-yocto', d): |
| 291 | bb.build.exec_func('do_unpack', d) |
| 292 | bb.build.exec_func('do_kernel_checkout', d) |
| 293 | bb.build.exec_func('do_validate_branches', d) |
| 294 | else: |
| 295 | bb.build.exec_func('do_unpack', d) |
| 296 | |
| 297 | # Save the original source for creating the patches |
| 298 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': |
| 299 | src = d.getVar('S', True).rstrip('/') |
| 300 | src_orig = '%s.orig' % src |
| 301 | oe.path.copytree(src, src_orig) |
| 302 | bb.build.exec_func('do_patch', d) |
| 303 | # Create the patches |
| 304 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': |
| 305 | bb.note('Creating diff gz...') |
| 306 | create_diff_gz(d, src_orig, src, ar_outdir) |
| 307 | bb.utils.remove(src_orig, recurse=True) |
| 308 | } |
| 309 | |
| 310 | python do_ar_recipe () { |
| 311 | """ |
| 312 | archive the recipe, including .bb and .inc. |
| 313 | """ |
| 314 | import re |
| 315 | import shutil |
| 316 | |
| 317 | require_re = re.compile( r"require\s+(.+)" ) |
| 318 | include_re = re.compile( r"include\s+(.+)" ) |
| 319 | bbfile = d.getVar('FILE', True) |
| 320 | outdir = os.path.join(d.getVar('WORKDIR', True), \ |
| 321 | '%s-recipe' % d.getVar('PF', True)) |
| 322 | bb.utils.mkdirhier(outdir) |
| 323 | shutil.copy(bbfile, outdir) |
| 324 | |
| 325 | dirname = os.path.dirname(bbfile) |
| 326 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) |
| 327 | f = open(bbfile, 'r') |
| 328 | for line in f.readlines(): |
| 329 | incfile = None |
| 330 | if require_re.match(line): |
| 331 | incfile = require_re.match(line).group(1) |
| 332 | elif include_re.match(line): |
| 333 | incfile = include_re.match(line).group(1) |
| 334 | if incfile: |
| 335 | incfile = bb.data.expand(incfile, d) |
| 336 | incfile = bb.utils.which(bbpath, incfile) |
| 337 | if incfile: |
| 338 | shutil.copy(incfile, outdir) |
| 339 | |
| 340 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) |
| 341 | bb.utils.remove(outdir, recurse=True) |
| 342 | } |
| 343 | |
| 344 | python do_dumpdata () { |
| 345 | """ |
| 346 | dump environment data to ${PF}-showdata.dump |
| 347 | """ |
| 348 | |
| 349 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ |
| 350 | '%s-showdata.dump' % d.getVar('PF', True)) |
| 351 | bb.note('Dumping metadata into %s' % dumpfile) |
| 352 | f = open(dumpfile, 'w') |
| 353 | # emit variables and shell functions |
| 354 | bb.data.emit_env(f, d, True) |
| 355 | # emit the metadata which isn't valid shell |
| 356 | for e in d.keys(): |
| 357 | if bb.data.getVarFlag(e, 'python', d): |
| 358 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, True))) |
| 359 | f.close() |
| 360 | } |
| 361 | |
| 362 | SSTATETASKS += "do_deploy_archives" |
| 363 | do_deploy_archives () { |
| 364 | echo "Deploying source archive files ..." |
| 365 | } |
| 366 | python do_deploy_archives_setscene () { |
| 367 | sstate_setscene(d) |
| 368 | } |
| 369 | do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" |
| 370 | do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" |
| 371 | |
| 372 | addtask do_ar_original after do_unpack |
| 373 | addtask do_unpack_and_patch after do_patch |
| 374 | addtask do_ar_patched after do_unpack_and_patch |
| 375 | addtask do_ar_configured after do_unpack_and_patch |
| 376 | addtask do_dumpdata |
| 377 | addtask do_ar_recipe |
| 378 | addtask do_deploy_archives before do_build |
| 379 | |
| 380 | addtask do_deploy_all_archives after do_deploy_archives |
| 381 | do_deploy_all_archives[recrdeptask] = "do_deploy_archives" |
| 382 | do_deploy_all_archives[recideptask] = "do_${BB_DEFAULT_TASK}" |
| 383 | do_deploy_all_archives() { |
| 384 | : |
| 385 | } |