blob: 188f8c0423256275c16257e48976439d237d3f2a [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This bbclass is used for creating archive for:
5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
7# 3) configured source: ARCHIVER_MODE[src] = "configured"
8# 4) The patches between do_unpack and do_patch:
9# ARCHIVER_MODE[diff] = "1"
10# And you can set the one that you'd like to exclude from the diff:
11# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
12# 5) The environment data, similar to 'bitbake -e recipe':
13# ARCHIVER_MODE[dumpdata] = "1"
14# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
15# 7) Whether output the .src.rpm package:
16# ARCHIVER_MODE[srpm] = "1"
17# 8) Filter the license, the recipe whose license in
18# COPYLEFT_LICENSE_INCLUDE will be included, and in
19# COPYLEFT_LICENSE_EXCLUDE will be excluded.
20# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
21# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
22# 9) The recipe type that will be archived:
23# COPYLEFT_RECIPE_TYPES = 'target'
24#
25
26# Don't filter the license by default
27COPYLEFT_LICENSE_INCLUDE ?= ''
28COPYLEFT_LICENSE_EXCLUDE ?= ''
29# Create archive for all the recipe types
30COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
31inherit copyleft_filter
32
33ARCHIVER_MODE[srpm] ?= "0"
34ARCHIVER_MODE[src] ?= "patched"
35ARCHIVER_MODE[diff] ?= "0"
36ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
37ARCHIVER_MODE[dumpdata] ?= "0"
38ARCHIVER_MODE[recipe] ?= "0"
39
40DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
41ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
42ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
43ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
44
45do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
46do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
47do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
48do_deploy_archives[dirs] = "${WORKDIR}"
49do_deploy_all_archives[dirs] = "${WORKDIR}"
50
51# This is a convenience for the shell script to use it
52
53
54python () {
55 pn = d.getVar('PN', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split()
57 if pn in assume_provided:
58 for p in d.getVar("PROVIDES", True).split():
59 if p != pn:
60 pn = p
61 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -050062
63 included, reason = copyleft_should_include(d)
64 if not included:
65 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
66 return
67 else:
68 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
69
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050070 # We just archive gcc-source for all the gcc related recipes
71 if d.getVar('BPN', True) in ['gcc', 'libgcc'] \
72 and not pn.startswith('gcc-source'):
73 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
74 return
75
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True)
77 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True)
78 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True)
79
80 if ar_src == "original":
81 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
82 elif ar_src == "patched":
83 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
84 elif ar_src == "configured":
85 # We can't use "addtask do_ar_configured after do_configure" since it
86 # will cause the deptask of do_populate_sysroot to run not matter what
87 # archives we need, so we add the depends here.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088
89 # There is a corner case with "gcc-source-${PV}" recipes, they don't have
90 # the "do_configure" task, so we need to use "do_preconfigure"
91 if pn.startswith("gcc-source-"):
92 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
93 else:
94 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050096
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097 elif ar_src:
98 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
99
100 if ar_dumpdata == "1":
101 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
102
103 if ar_recipe == "1":
104 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
105
106 # Output the srpm package
107 ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True)
108 if ar_srpm == "1":
109 if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm':
110 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
111 if ar_dumpdata == "1":
112 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
113 if ar_recipe == "1":
114 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
115 if ar_src == "original":
116 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
117 elif ar_src == "patched":
118 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
119 elif ar_src == "configured":
120 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121}
122
123# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
124# Files in SRC_URI are copied directly, anything that's a directory
125# (e.g. git repositories) is "unpacked" and then put into a tarball.
126python do_ar_original() {
127
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500128 import shutil, tempfile
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500129
130 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
131 return
132
133 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
134 bb.note('Archiving the original source...')
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600135 urls = d.getVar("SRC_URI", True).split()
136 # destsuffix (git fetcher) and subdir (everything else) are allowed to be
137 # absolute paths (for example, destsuffix=${S}/foobar).
138 # That messes with unpacking inside our tmpdir below, because the fetchers
139 # will then unpack in that directory and completely ignore the tmpdir.
140 # That breaks parallel tasks relying on ${S}, like do_compile.
141 #
142 # To solve this, we remove these parameters from all URLs.
143 # We do this even for relative paths because it makes the content of the
144 # archives more useful (no extra paths that are only used during
145 # compilation).
146 for i, url in enumerate(urls):
147 decoded = bb.fetch2.decodeurl(url)
148 for param in ('destsuffix', 'subdir'):
149 if param in decoded[5]:
150 del decoded[5][param]
151 encoded = bb.fetch2.encodeurl(decoded)
152 urls[i] = encoded
153 fetch = bb.fetch2.Fetch(urls, d)
154 tarball_suffix = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500155 for url in fetch.urls:
156 local = fetch.localpath(url).rstrip("/");
157 if os.path.isfile(local):
158 shutil.copy(local, ar_outdir)
159 elif os.path.isdir(local):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True))
161 fetch.unpack(tmpdir, (url,))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600162 # To handle recipes with more than one source, we add the "name"
163 # URL parameter as suffix. We treat it as an error when
164 # there's more than one URL without a name, or a name gets reused.
165 # This is an additional safety net, in practice the name has
166 # to be set when using the git fetcher, otherwise SRCREV cannot
167 # be set separately for each URL.
168 params = bb.fetch2.decodeurl(url)[5]
169 name = params.get('name', '')
170 if name in tarball_suffix:
171 if not name:
172 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
173 else:
174 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
175 tarball_suffix[name] = url
176 create_tarball(d, tmpdir + '/.', name, ar_outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177
178 # Emit patch series files for 'original'
179 bb.note('Writing patch series files...')
180 for patch in src_patches(d):
181 _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
182 patchdir = parm.get('patchdir')
183 if patchdir:
184 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
185 else:
186 series = os.path.join(ar_outdir, 'series')
187
188 with open(series, 'a') as s:
189 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
190}
191
192python do_ar_patched() {
193
194 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched':
195 return
196
197 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
198 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500199 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200 bb.note('Archiving the patched source...')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500201 d.setVar('WORKDIR', ar_workdir)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500202 create_tarball(d, d.getVar('S', True), 'patched', ar_outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500203}
204
205python do_ar_configured() {
206 import shutil
207
208 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
209 if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured':
210 bb.note('Archiving the configured source...')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500211 pn = d.getVar('PN', True)
212 # "gcc-source-${PV}" recipes don't have "do_configure"
213 # task, so we need to run "do_preconfigure" instead
214 if pn.startswith("gcc-source-"):
215 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True))
216 bb.build.exec_func('do_preconfigure', d)
217
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218 # The libtool-native's do_configure will remove the
219 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
220 # do_configure, we archive the already configured ${S} to
221 # instead of.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500222 elif pn != 'libtool-native':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223 # Change the WORKDIR to make do_configure run in another dir.
224 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True))
225 if bb.data.inherits_class('kernel-yocto', d):
226 bb.build.exec_func('do_kernel_configme', d)
227 if bb.data.inherits_class('cmake', d):
228 bb.build.exec_func('do_generate_toolchain_file', d)
229 prefuncs = d.getVarFlag('do_configure', 'prefuncs', True)
230 for func in (prefuncs or '').split():
231 if func != "sysroot_cleansstate":
232 bb.build.exec_func(func, d)
233 bb.build.exec_func('do_configure', d)
234 postfuncs = d.getVarFlag('do_configure', 'postfuncs', True)
235 for func in (postfuncs or '').split():
236 if func != "do_qa_configure":
237 bb.build.exec_func(func, d)
238 srcdir = d.getVar('S', True)
239 builddir = d.getVar('B', True)
240 if srcdir != builddir:
241 if os.path.exists(builddir):
242 oe.path.copytree(builddir, os.path.join(srcdir, \
243 'build.%s.ar_configured' % d.getVar('PF', True)))
244 create_tarball(d, srcdir, 'configured', ar_outdir)
245}
246
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500247def create_tarball(d, srcdir, suffix, ar_outdir):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500248 """
249 create the tarball from srcdir
250 """
251 import tarfile
252
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500253 # Make sure we are only creating a single tarball for gcc sources
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500254 if (d.getVar('SRC_URI', True) == ""):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500255 return
256
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 bb.utils.mkdirhier(ar_outdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500258 if suffix:
259 filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix)
260 else:
261 filename = '%s.tar.gz' % d.getVar('PF', True)
262 tarname = os.path.join(ar_outdir, filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500263
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264 bb.note('Creating %s' % tarname)
265 tar = tarfile.open(tarname, 'w:gz')
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500266 tar.add(srcdir, arcname=os.path.basename(srcdir))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500267 tar.close()
268
269# creating .diff.gz between source.orig and source
270def create_diff_gz(d, src_orig, src, ar_outdir):
271
272 import subprocess
273
274 if not os.path.isdir(src) or not os.path.isdir(src_orig):
275 return
276
277 # The diff --exclude can't exclude the file with path, so we copy
278 # the patched source, and remove the files that we'd like to
279 # exclude.
280 src_patched = src + '.patched'
281 oe.path.copyhardlinktree(src, src_patched)
282 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split():
283 bb.utils.remove(os.path.join(src_orig, i), recurse=True)
284 bb.utils.remove(os.path.join(src_patched, i), recurse=True)
285
286 dirname = os.path.dirname(src)
287 basename = os.path.basename(src)
288 os.chdir(dirname)
289 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True))
290 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
291 subprocess.call(diff_cmd, shell=True)
292 bb.utils.remove(src_patched, recurse=True)
293
294# Run do_unpack and do_patch
295python do_unpack_and_patch() {
296 if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \
297 [ 'patched', 'configured'] and \
298 d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
299 return
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500300 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500301 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600302 pn = d.getVar('PN', True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500303
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500304 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600305 if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500306 # Change the WORKDIR to make do_unpack do_patch run in another dir.
307 d.setVar('WORKDIR', ar_workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500309 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
310 # possibly requiring of the following tasks (such as some recipes's
311 # do_patch required 'B' existed).
312 bb.utils.mkdirhier(d.getVar('B', True))
313
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500314 bb.build.exec_func('do_unpack', d)
315
316 # Save the original source for creating the patches
317 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
318 src = d.getVar('S', True).rstrip('/')
319 src_orig = '%s.orig' % src
320 oe.path.copytree(src, src_orig)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500321
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500322 # Make sure gcc and kernel sources are patched only once
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600323 if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500324 bb.build.exec_func('do_patch', d)
325
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500326 # Create the patches
327 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
328 bb.note('Creating diff gz...')
329 create_diff_gz(d, src_orig, src, ar_outdir)
330 bb.utils.remove(src_orig, recurse=True)
331}
332
333python do_ar_recipe () {
334 """
335 archive the recipe, including .bb and .inc.
336 """
337 import re
338 import shutil
339
340 require_re = re.compile( r"require\s+(.+)" )
341 include_re = re.compile( r"include\s+(.+)" )
342 bbfile = d.getVar('FILE', True)
343 outdir = os.path.join(d.getVar('WORKDIR', True), \
344 '%s-recipe' % d.getVar('PF', True))
345 bb.utils.mkdirhier(outdir)
346 shutil.copy(bbfile, outdir)
347
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500348 pn = d.getVar('PN', True)
349 bbappend_files = d.getVar('BBINCLUDED', True).split()
350 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
351 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500352 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn))
353 bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500354 for file in bbappend_files:
355 if bbappend_re.match(file) or bbappend_re1.match(file):
356 shutil.copy(file, outdir)
357
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500358 dirname = os.path.dirname(bbfile)
359 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True))
360 f = open(bbfile, 'r')
361 for line in f.readlines():
362 incfile = None
363 if require_re.match(line):
364 incfile = require_re.match(line).group(1)
365 elif include_re.match(line):
366 incfile = include_re.match(line).group(1)
367 if incfile:
368 incfile = bb.data.expand(incfile, d)
369 incfile = bb.utils.which(bbpath, incfile)
370 if incfile:
371 shutil.copy(incfile, outdir)
372
373 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True))
374 bb.utils.remove(outdir, recurse=True)
375}
376
377python do_dumpdata () {
378 """
379 dump environment data to ${PF}-showdata.dump
380 """
381
382 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \
383 '%s-showdata.dump' % d.getVar('PF', True))
384 bb.note('Dumping metadata into %s' % dumpfile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500385 with open(dumpfile, "w") as f:
386 # emit variables and shell functions
387 bb.data.emit_env(f, d, True)
388 # emit the metadata which isn't valid shell
389 for e in d.keys():
390 if d.getVarFlag(e, "python", False):
391 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500392}
393
394SSTATETASKS += "do_deploy_archives"
395do_deploy_archives () {
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500396 echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500397}
398python do_deploy_archives_setscene () {
399 sstate_setscene(d)
400}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500401do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500402do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
403do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500404addtask do_deploy_archives_setscene
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500405
406addtask do_ar_original after do_unpack
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500407addtask do_unpack_and_patch after do_patch
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500408addtask do_ar_patched after do_unpack_and_patch
409addtask do_ar_configured after do_unpack_and_patch
410addtask do_dumpdata
411addtask do_ar_recipe
412addtask do_deploy_archives before do_build
413
414addtask do_deploy_all_archives after do_deploy_archives
415do_deploy_all_archives[recrdeptask] = "do_deploy_archives"
416do_deploy_all_archives[recideptask] = "do_${BB_DEFAULT_TASK}"
417do_deploy_all_archives() {
418 :
419}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500420
421python () {
422 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition
423 if bb.data.inherits_class('kernel-yocto', d):
424 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500425}