blob: 0710c1ec5e6e9a76b278ce360c506d7d90df3899 [file] [log] [blame]
Patrick Williams92b42cb2022-09-03 06:53:57 -05001#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
Patrick Williamsc124f4f2015-09-15 14:41:29 -05007#
8# This bbclass is used for creating archive for:
Andrew Geissler82c905d2020-04-13 13:39:40 -05009# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
10# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
11# 3) configured source: ARCHIVER_MODE[src] = "configured"
Andrew Geissler7e0e3c02022-02-25 20:34:39 +000012# 4) source mirror: ARCHIVER_MODE[src] = "mirror"
Andrew Geissler82c905d2020-04-13 13:39:40 -050013# 5) The patches between do_unpack and do_patch:
14# ARCHIVER_MODE[diff] = "1"
15# And you can set the one that you'd like to exclude from the diff:
16# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
17# 6) The environment data, similar to 'bitbake -e recipe':
18# ARCHIVER_MODE[dumpdata] = "1"
19# 7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
20# 8) Whether output the .src.rpm package:
21# ARCHIVER_MODE[srpm] = "1"
22# 9) Filter the license, the recipe whose license in
23# COPYLEFT_LICENSE_INCLUDE will be included, and in
24# COPYLEFT_LICENSE_EXCLUDE will be excluded.
25# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
26# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
27# 10) The recipe type that will be archived:
28# COPYLEFT_RECIPE_TYPES = 'target'
29# 11) The source mirror mode:
30# ARCHIVER_MODE[mirror] = "split" (default): Sources are split into
31# per-recipe directories in a similar way to other archiver modes.
32# Post-processing may be required to produce a single mirror directory.
33# This does however allow inspection of duplicate sources and more
34# intelligent handling.
35# ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single
36# directory suitable for direct use as a mirror. Duplicate sources are
37# ignored.
38# 12) Source mirror exclusions:
39# ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror.
40# This may be used for sources which you are already publishing yourself
41# (e.g. if the URI starts with 'https://mysite.com/' and your mirror is
42# going to be published to the same site). It may also be used to exclude
43# local files (with the prefix 'file://') if these will be provided as part
44# of an archive of the layers themselves.
Patrick Williamsc124f4f2015-09-15 14:41:29 -050045#
46
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047# Create archive for all the recipe types
48COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
49inherit copyleft_filter
50
51ARCHIVER_MODE[srpm] ?= "0"
52ARCHIVER_MODE[src] ?= "patched"
53ARCHIVER_MODE[diff] ?= "0"
54ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
55ARCHIVER_MODE[dumpdata] ?= "0"
56ARCHIVER_MODE[recipe] ?= "0"
Andrew Geissler82c905d2020-04-13 13:39:40 -050057ARCHIVER_MODE[mirror] ?= "split"
Andrew Geissler595f6302022-01-24 19:11:47 +000058ARCHIVER_MODE[compression] ?= "xz"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059
60DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
Andrew Geisslerf0343792020-11-18 10:42:21 -060061ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
Andrew Geissler615f2f12022-07-15 14:00:58 -050062ARCHIVER_ARCH = "${TARGET_SYS}"
63ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/"
Brad Bishop19323692019-04-05 15:28:33 -040064ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
Andrew Geissler615f2f12022-07-15 14:00:58 -050065ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050066ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
67
Andrew Geissler82c905d2020-04-13 13:39:40 -050068# When producing a combined mirror directory, allow duplicates for the case
69# where multiple recipes use the same SRC_URI.
70ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror"
Andrew Geissler7e0e3c02022-02-25 20:34:39 +000071SSTATE_ALLOW_OVERLAP_FILES += "${DEPLOY_DIR_SRC}/mirror"
Brad Bishop19323692019-04-05 15:28:33 -040072
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
74do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
75do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076
77# This is a convenience for the shell script to use it
78
79
80python () {
Brad Bishop6e60e8b2018-02-01 10:27:11 -050081 pn = d.getVar('PN')
82 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050083 if pn in assume_provided:
Brad Bishop6e60e8b2018-02-01 10:27:11 -050084 for p in d.getVar("PROVIDES").split():
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050085 if p != pn:
86 pn = p
87 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -050088
89 included, reason = copyleft_should_include(d)
90 if not included:
91 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
92 return
93 else:
94 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
95
Brad Bishop6e60e8b2018-02-01 10:27:11 -050096
97 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
98 # so avoid archiving source here.
99 if pn.startswith('glibc-locale'):
100 return
101
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500102 # We just archive gcc-source for all the gcc related recipes
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500103 if d.getVar('BPN') in ['gcc', 'libgcc'] \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500104 and not pn.startswith('gcc-source'):
105 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
106 return
107
Andrew Geissler615f2f12022-07-15 14:00:58 -0500108 # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
109 if pn.startswith('gcc-source'):
110 d.setVar('ARCHIVER_ARCH', "allarch")
111
Brad Bishop79641f22019-09-10 07:20:22 -0400112 def hasTask(task):
113 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
114
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500115 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src')
116 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata')
117 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119 if ar_src == "original":
120 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500121 # 'patched' and 'configured' invoke do_unpack_and_patch because
122 # do_ar_patched resp. do_ar_configured depend on it, but for 'original'
123 # we have to add it explicitly.
124 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
125 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 elif ar_src == "patched":
127 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
128 elif ar_src == "configured":
129 # We can't use "addtask do_ar_configured after do_configure" since it
Andrew Geisslerc926e172021-05-07 16:11:35 -0500130 # will cause the deptask of do_populate_sysroot to run no matter what
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131 # archives we need, so we add the depends here.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500132
133 # There is a corner case with "gcc-source-${PV}" recipes, they don't have
134 # the "do_configure" task, so we need to use "do_preconfigure"
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800135 if hasTask("do_preconfigure"):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500136 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800137 elif hasTask("do_configure"):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500138 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500139 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500140 elif ar_src == "mirror":
141 d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500142
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143 elif ar_src:
144 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
145
146 if ar_dumpdata == "1":
147 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
148
149 if ar_recipe == "1":
150 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
151
Brad Bishop316dfdd2018-06-25 12:45:53 -0400152 # Output the SRPM package
153 if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'):
Brad Bishop79641f22019-09-10 07:20:22 -0400154 if "package_rpm" not in d.getVar('PACKAGE_CLASSES'):
155 bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES")
156
157 # Some recipes do not have any packaging tasks
158 if hasTask("do_package_write_rpm"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500159 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
Brad Bishop19323692019-04-05 15:28:33 -0400160 d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}')
161 d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}')
162 d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163 if ar_dumpdata == "1":
164 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
165 if ar_recipe == "1":
166 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
167 if ar_src == "original":
168 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
169 elif ar_src == "patched":
170 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
171 elif ar_src == "configured":
172 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173}
174
Andrew Geisslerc926e172021-05-07 16:11:35 -0500175# Take all the sources for a recipe and put them in WORKDIR/archiver-work/.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176# Files in SRC_URI are copied directly, anything that's a directory
177# (e.g. git repositories) is "unpacked" and then put into a tarball.
178python do_ar_original() {
179
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500180 import shutil, tempfile
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500182 if d.getVarFlag('ARCHIVER_MODE', 'src') != "original":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183 return
184
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500185 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500186 bb.note('Archiving the original source...')
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500187 urls = d.getVar("SRC_URI").split()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600188 # destsuffix (git fetcher) and subdir (everything else) are allowed to be
189 # absolute paths (for example, destsuffix=${S}/foobar).
190 # That messes with unpacking inside our tmpdir below, because the fetchers
191 # will then unpack in that directory and completely ignore the tmpdir.
192 # That breaks parallel tasks relying on ${S}, like do_compile.
193 #
194 # To solve this, we remove these parameters from all URLs.
195 # We do this even for relative paths because it makes the content of the
196 # archives more useful (no extra paths that are only used during
197 # compilation).
198 for i, url in enumerate(urls):
199 decoded = bb.fetch2.decodeurl(url)
200 for param in ('destsuffix', 'subdir'):
201 if param in decoded[5]:
202 del decoded[5][param]
203 encoded = bb.fetch2.encodeurl(decoded)
204 urls[i] = encoded
Andrew Geissler1e34c2d2020-05-29 16:02:59 -0500205
206 # Cleanup SRC_URI before call bb.fetch2.Fetch() since now SRC_URI is in the
207 # variable "urls", otherwise there might be errors like:
208 # The SRCREV_FORMAT variable must be set when multiple SCMs are used
209 ld = bb.data.createCopy(d)
210 ld.setVar('SRC_URI', '')
211 fetch = bb.fetch2.Fetch(urls, ld)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600212 tarball_suffix = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213 for url in fetch.urls:
214 local = fetch.localpath(url).rstrip("/");
215 if os.path.isfile(local):
216 shutil.copy(local, ar_outdir)
217 elif os.path.isdir(local):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500218 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500219 fetch.unpack(tmpdir, (url,))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600220 # To handle recipes with more than one source, we add the "name"
221 # URL parameter as suffix. We treat it as an error when
222 # there's more than one URL without a name, or a name gets reused.
223 # This is an additional safety net, in practice the name has
224 # to be set when using the git fetcher, otherwise SRCREV cannot
225 # be set separately for each URL.
226 params = bb.fetch2.decodeurl(url)[5]
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500227 type = bb.fetch2.decodeurl(url)[0]
228 location = bb.fetch2.decodeurl(url)[2]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600229 name = params.get('name', '')
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500230 if type.lower() == 'file':
231 name_tmp = location.rstrip("*").rstrip("/")
232 name = os.path.basename(name_tmp)
233 else:
234 if name in tarball_suffix:
235 if not name:
236 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
237 else:
238 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600239 tarball_suffix[name] = url
240 create_tarball(d, tmpdir + '/.', name, ar_outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500241
242 # Emit patch series files for 'original'
243 bb.note('Writing patch series files...')
244 for patch in src_patches(d):
245 _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
246 patchdir = parm.get('patchdir')
247 if patchdir:
248 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
249 else:
250 series = os.path.join(ar_outdir, 'series')
251
252 with open(series, 'a') as s:
253 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
254}
255
256python do_ar_patched() {
257
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500258 if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500259 return
260
261 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500262 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
Brad Bishopa34c0302019-09-23 22:34:48 -0400263 if not is_work_shared(d):
264 ar_workdir = d.getVar('ARCHIVER_WORKDIR')
265 d.setVar('WORKDIR', ar_workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500266 bb.note('Archiving the patched source...')
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500267 create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500268}
269
270python do_ar_configured() {
271 import shutil
272
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500273 # Forcibly expand the sysroot paths as we're about to change WORKDIR
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500274 d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST'))
275 d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET'))
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500276 d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT'))
277 d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE'))
278
279 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
280 if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500281 bb.note('Archiving the configured source...')
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500282 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500283 # "gcc-source-${PV}" recipes don't have "do_configure"
284 # task, so we need to run "do_preconfigure" instead
285 if pn.startswith("gcc-source-"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500286 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500287 bb.build.exec_func('do_preconfigure', d)
288
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 # The libtool-native's do_configure will remove the
290 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
291 # do_configure, we archive the already configured ${S} to
292 # instead of.
Patrick Williams213cb262021-08-07 19:21:33 -0500293 # The kernel class functions require it to be on work-shared, we
294 # don't unpack, patch, configure again, just archive the already
295 # configured ${S}
296 elif not (pn == 'libtool-native' or is_work_shared(d)):
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800297 def runTask(task):
298 prefuncs = d.getVarFlag(task, 'prefuncs') or ''
299 for func in prefuncs.split():
300 if func != "sysroot_cleansstate":
301 bb.build.exec_func(func, d)
302 bb.build.exec_func(task, d)
303 postfuncs = d.getVarFlag(task, 'postfuncs') or ''
304 for func in postfuncs.split():
305 if func != 'do_qa_configure':
306 bb.build.exec_func(func, d)
307
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308 # Change the WORKDIR to make do_configure run in another dir.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500309 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800310
311 preceeds = bb.build.preceedtask('do_configure', False, d)
312 for task in preceeds:
313 if task != 'do_patch' and task != 'do_prepare_recipe_sysroot':
314 runTask(task)
315 runTask('do_configure')
316
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500317 srcdir = d.getVar('S')
318 builddir = d.getVar('B')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 if srcdir != builddir:
320 if os.path.exists(builddir):
321 oe.path.copytree(builddir, os.path.join(srcdir, \
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500322 'build.%s.ar_configured' % d.getVar('PF')))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500323 create_tarball(d, srcdir, 'configured', ar_outdir)
324}
325
Andrew Geissler82c905d2020-04-13 13:39:40 -0500326python do_ar_mirror() {
327 import subprocess
328
329 src_uri = (d.getVar('SRC_URI') or '').split()
330 if len(src_uri) == 0:
331 return
332
333 dl_dir = d.getVar('DL_DIR')
334 mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split()
335 mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror')
336 have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS')
337
338 if mirror_mode == 'combined':
339 destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR')
340 elif mirror_mode == 'split':
341 destdir = d.getVar('ARCHIVER_OUTDIR')
342 else:
343 bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode))
344
345 if not have_mirror_tarballs:
346 bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`')
347
348 def is_excluded(url):
349 for prefix in mirror_exclusions:
350 if url.startswith(prefix):
351 return True
352 return False
353
354 bb.note('Archiving the source as a mirror...')
355
356 bb.utils.mkdirhier(destdir)
357
358 fetcher = bb.fetch2.Fetch(src_uri, d)
359
Andrew Geissler5a43b432020-06-13 10:46:56 -0500360 for ud in fetcher.expanded_urldata():
361 if is_excluded(ud.url):
362 bb.note('Skipping excluded url: %s' % (ud.url))
Andrew Geissler82c905d2020-04-13 13:39:40 -0500363 continue
364
Andrew Geissler5a43b432020-06-13 10:46:56 -0500365 bb.note('Archiving url: %s' % (ud.url))
Andrew Geissler82c905d2020-04-13 13:39:40 -0500366 ud.setup_localpath(d)
367 localpath = None
368
369 # Check for mirror tarballs first. We will archive the first mirror
370 # tarball that we find as it's assumed that we just need one.
371 for mirror_fname in ud.mirrortarballs:
372 mirror_path = os.path.join(dl_dir, mirror_fname)
373 if os.path.exists(mirror_path):
374 bb.note('Found mirror tarball: %s' % (mirror_path))
375 localpath = mirror_path
376 break
377
378 if len(ud.mirrortarballs) and not localpath:
379 bb.warn('Mirror tarballs are listed for a source but none are present. ' \
380 'Falling back to original download.\n' \
Andrew Geissler5a43b432020-06-13 10:46:56 -0500381 'SRC_URI = %s' % (ud.url))
Andrew Geissler82c905d2020-04-13 13:39:40 -0500382
383 # Check original download
384 if not localpath:
385 bb.note('Using original download: %s' % (ud.localpath))
386 localpath = ud.localpath
387
388 if not localpath or not os.path.exists(localpath):
389 bb.fatal('Original download is missing for a source.\n' \
Andrew Geissler5a43b432020-06-13 10:46:56 -0500390 'SRC_URI = %s' % (ud.url))
Andrew Geissler82c905d2020-04-13 13:39:40 -0500391
392 # We now have an appropriate localpath
393 bb.note('Copying source mirror')
394 cmd = 'cp -fpPRH %s %s' % (localpath, destdir)
395 subprocess.check_call(cmd, shell=True)
396}
397
Brad Bishopc4ea0752018-11-15 14:30:15 -0800398def exclude_useless_paths(tarinfo):
399 if tarinfo.isdir():
400 if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
401 return None
402 elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
403 return None
404 return tarinfo
405
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500406def create_tarball(d, srcdir, suffix, ar_outdir):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 """
408 create the tarball from srcdir
409 """
410 import tarfile
411
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500412 # Make sure we are only creating a single tarball for gcc sources
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500413 if (d.getVar('SRC_URI') == ""):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500414 return
415
Brad Bishop316dfdd2018-06-25 12:45:53 -0400416 # For the kernel archive, srcdir may just be a link to the
417 # work-shared location. Use os.path.realpath to make sure
418 # that we archive the actual directory and not just the link.
419 srcdir = os.path.realpath(srcdir)
420
Andrew Geisslereff27472021-10-29 15:35:00 -0500421 compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 bb.utils.mkdirhier(ar_outdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500423 if suffix:
Andrew Geisslereff27472021-10-29 15:35:00 -0500424 filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500425 else:
Andrew Geisslereff27472021-10-29 15:35:00 -0500426 filename = '%s.tar.%s' % (d.getVar('PF'), compression_method)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500427 tarname = os.path.join(ar_outdir, filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500428
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429 bb.note('Creating %s' % tarname)
Andrew Geisslereff27472021-10-29 15:35:00 -0500430 tar = tarfile.open(tarname, 'w:%s' % compression_method)
Brad Bishopc4ea0752018-11-15 14:30:15 -0800431 tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 tar.close()
433
434# creating .diff.gz between source.orig and source
435def create_diff_gz(d, src_orig, src, ar_outdir):
436
437 import subprocess
438
439 if not os.path.isdir(src) or not os.path.isdir(src_orig):
440 return
441
442 # The diff --exclude can't exclude the file with path, so we copy
443 # the patched source, and remove the files that we'd like to
444 # exclude.
445 src_patched = src + '.patched'
446 oe.path.copyhardlinktree(src, src_patched)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500447 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500448 bb.utils.remove(os.path.join(src_orig, i), recurse=True)
449 bb.utils.remove(os.path.join(src_patched, i), recurse=True)
450
451 dirname = os.path.dirname(src)
452 basename = os.path.basename(src)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500453 bb.utils.mkdirhier(ar_outdir)
454 cwd = os.getcwd()
455 try:
456 os.chdir(dirname)
457 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
458 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
459 subprocess.check_call(diff_cmd, shell=True)
460 bb.utils.remove(src_patched, recurse=True)
461 finally:
462 os.chdir(cwd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500463
Brad Bishop316dfdd2018-06-25 12:45:53 -0400464def is_work_shared(d):
465 pn = d.getVar('PN')
Patrick Williams92b42cb2022-09-03 06:53:57 -0500466 return pn.startswith('gcc-source') or \
467 bb.data.inherits_class('kernel', d) or \
468 (bb.data.inherits_class('kernelsrc', d) and d.getVar('S') == d.getVar('STAGING_KERNEL_DIR'))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400469
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470# Run do_unpack and do_patch
471python do_unpack_and_patch() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500472 if d.getVarFlag('ARCHIVER_MODE', 'src') not in \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500473 [ 'patched', 'configured'] and \
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500474 d.getVarFlag('ARCHIVER_MODE', 'diff') != '1':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 return
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500476 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
477 ar_workdir = d.getVar('ARCHIVER_WORKDIR')
478 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
479 pn = d.getVar('PN')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480
Andrew Geisslerc926e172021-05-07 16:11:35 -0500481 # The kernel class functions require it to be on work-shared, so we don't change WORKDIR
Brad Bishop316dfdd2018-06-25 12:45:53 -0400482 if not is_work_shared(d):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500483 # Change the WORKDIR to make do_unpack do_patch run in another dir.
484 d.setVar('WORKDIR', ar_workdir)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500485 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
486 d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500487
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500488 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
489 # possibly requiring of the following tasks (such as some recipes's
490 # do_patch required 'B' existed).
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500491 bb.utils.mkdirhier(d.getVar('B'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 bb.build.exec_func('do_unpack', d)
494
495 # Save the original source for creating the patches
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500496 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
497 src = d.getVar('S').rstrip('/')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 src_orig = '%s.orig' % src
499 oe.path.copytree(src, src_orig)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500500
Patrick Williams213cb262021-08-07 19:21:33 -0500501 if bb.data.inherits_class('dos2unix', d):
502 bb.build.exec_func('do_convert_crlf_to_lf', d)
503
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500504 # Make sure gcc and kernel sources are patched only once
Brad Bishop316dfdd2018-06-25 12:45:53 -0400505 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500506 bb.build.exec_func('do_patch', d)
507
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 # Create the patches
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500509 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 bb.note('Creating diff gz...')
511 create_diff_gz(d, src_orig, src, ar_outdir)
512 bb.utils.remove(src_orig, recurse=True)
513}
514
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500515# BBINCLUDED is special (excluded from basehash signature
516# calculation). Using it in a task signature can cause "basehash
517# changed" errors.
518#
519# Depending on BBINCLUDED also causes do_ar_recipe to run again
520# for unrelated changes, like adding or removing buildhistory.bbclass.
521#
522# For these reasons we ignore the dependency completely. The versioning
523# of the output file ensures that we create it each time the recipe
524# gets rebuilt, at least as long as a PR server is used. We also rely
525# on that mechanism to catch changes in the file content, because the
Andrew Geisslerc926e172021-05-07 16:11:35 -0500526# file content is not part of the task signature either.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500527do_ar_recipe[vardepsexclude] += "BBINCLUDED"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528python do_ar_recipe () {
529 """
530 archive the recipe, including .bb and .inc.
531 """
532 import re
533 import shutil
534
535 require_re = re.compile( r"require\s+(.+)" )
536 include_re = re.compile( r"include\s+(.+)" )
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500537 bbfile = d.getVar('FILE')
538 outdir = os.path.join(d.getVar('WORKDIR'), \
539 '%s-recipe' % d.getVar('PF'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500540 bb.utils.mkdirhier(outdir)
541 shutil.copy(bbfile, outdir)
542
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500543 pn = d.getVar('PN')
544 bbappend_files = d.getVar('BBINCLUDED').split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500545 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
546 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500547 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn))
548 bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500549 for file in bbappend_files:
550 if bbappend_re.match(file) or bbappend_re1.match(file):
551 shutil.copy(file, outdir)
552
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500553 dirname = os.path.dirname(bbfile)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500554 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555 f = open(bbfile, 'r')
556 for line in f.readlines():
557 incfile = None
558 if require_re.match(line):
559 incfile = require_re.match(line).group(1)
560 elif include_re.match(line):
561 incfile = include_re.match(line).group(1)
562 if incfile:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500563 incfile = d.expand(incfile)
Brad Bishop1d80a2e2019-11-15 16:35:03 -0500564 if incfile:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500565 incfile = bb.utils.which(bbpath, incfile)
Brad Bishop1d80a2e2019-11-15 16:35:03 -0500566 if incfile:
567 shutil.copy(incfile, outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500568
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500569 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570 bb.utils.remove(outdir, recurse=True)
571}
572
573python do_dumpdata () {
574 """
575 dump environment data to ${PF}-showdata.dump
576 """
577
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500578 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
579 '%s-showdata.dump' % d.getVar('PF'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580 bb.note('Dumping metadata into %s' % dumpfile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500581 with open(dumpfile, "w") as f:
582 # emit variables and shell functions
583 bb.data.emit_env(f, d, True)
584 # emit the metadata which isn't valid shell
585 for e in d.keys():
586 if d.getVarFlag(e, "python", False):
587 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500588}
589
590SSTATETASKS += "do_deploy_archives"
591do_deploy_archives () {
Andrew Geissler78b72792022-06-14 06:47:25 -0500592 bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500593}
594python do_deploy_archives_setscene () {
595 sstate_setscene(d)
596}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500597do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500598do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
599do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500600addtask do_deploy_archives_setscene
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500601
602addtask do_ar_original after do_unpack
Andrew Geissler475cb722020-07-10 16:00:51 -0500603addtask do_unpack_and_patch after do_patch do_preconfigure
604addtask do_ar_patched after do_unpack_and_patch
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500605addtask do_ar_configured after do_unpack_and_patch
Andrew Geissler82c905d2020-04-13 13:39:40 -0500606addtask do_ar_mirror after do_fetch
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500607addtask do_dumpdata
608addtask do_ar_recipe
Andrew Geissler1e34c2d2020-05-29 16:02:59 -0500609addtask do_deploy_archives
610do_build[recrdeptask] += "do_deploy_archives"
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600611do_rootfs[recrdeptask] += "do_deploy_archives"
Andrew Geissler1e34c2d2020-05-29 16:02:59 -0500612do_populate_sdk[recrdeptask] += "do_deploy_archives"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500614python () {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500615 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition.
616 # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency
617 # so that do_kernel_configme does not need to run again when do_unpack_and_patch
618 # gets added or removed (by adding or removing archiver.bbclass).
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500619 if bb.data.inherits_class('kernel-yocto', d):
620 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500621}