blob: 9239983e8fbd22465bd61ac1e5b513233ffe6785 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This bbclass is used for creating archive for:
5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
7# 3) configured source: ARCHIVER_MODE[src] = "configured"
8# 4) The patches between do_unpack and do_patch:
9# ARCHIVER_MODE[diff] = "1"
10# And you can set the one that you'd like to exclude from the diff:
11# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
12# 5) The environment data, similar to 'bitbake -e recipe':
13# ARCHIVER_MODE[dumpdata] = "1"
14# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
15# 7) Whether output the .src.rpm package:
16# ARCHIVER_MODE[srpm] = "1"
17# 8) Filter the license, the recipe whose license in
18# COPYLEFT_LICENSE_INCLUDE will be included, and in
19# COPYLEFT_LICENSE_EXCLUDE will be excluded.
20# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
21# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
22# 9) The recipe type that will be archived:
23# COPYLEFT_RECIPE_TYPES = 'target'
24#
25
26# Don't filter the license by default
27COPYLEFT_LICENSE_INCLUDE ?= ''
28COPYLEFT_LICENSE_EXCLUDE ?= ''
29# Create archive for all the recipe types
30COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
31inherit copyleft_filter
32
33ARCHIVER_MODE[srpm] ?= "0"
34ARCHIVER_MODE[src] ?= "patched"
35ARCHIVER_MODE[diff] ?= "0"
36ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
37ARCHIVER_MODE[dumpdata] ?= "0"
38ARCHIVER_MODE[recipe] ?= "0"
39
40DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
41ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
42ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
43ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
44
45do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
46do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
47do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
48do_deploy_archives[dirs] = "${WORKDIR}"
49do_deploy_all_archives[dirs] = "${WORKDIR}"
50
51# This is a convenience for the shell script to use it
52
53
54python () {
55 pn = d.getVar('PN', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split()
57 if pn in assume_provided:
58 for p in d.getVar("PROVIDES", True).split():
59 if p != pn:
60 pn = p
61 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -050062
63 included, reason = copyleft_should_include(d)
64 if not included:
65 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
66 return
67 else:
68 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
69
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050070 # We just archive gcc-source for all the gcc related recipes
71 if d.getVar('BPN', True) in ['gcc', 'libgcc'] \
72 and not pn.startswith('gcc-source'):
73 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
74 return
75
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True)
77 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True)
78 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True)
79
80 if ar_src == "original":
81 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
82 elif ar_src == "patched":
83 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
84 elif ar_src == "configured":
85 # We can't use "addtask do_ar_configured after do_configure" since it
86 # will cause the deptask of do_populate_sysroot to run not matter what
87 # archives we need, so we add the depends here.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088
89 # There is a corner case with "gcc-source-${PV}" recipes, they don't have
90 # the "do_configure" task, so we need to use "do_preconfigure"
91 if pn.startswith("gcc-source-"):
92 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
93 else:
94 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050096
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097 elif ar_src:
98 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
99
100 if ar_dumpdata == "1":
101 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
102
103 if ar_recipe == "1":
104 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
105
106 # Output the srpm package
107 ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True)
108 if ar_srpm == "1":
109 if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm':
110 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
111 if ar_dumpdata == "1":
112 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
113 if ar_recipe == "1":
114 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
115 if ar_src == "original":
116 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
117 elif ar_src == "patched":
118 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
119 elif ar_src == "configured":
120 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121}
122
123# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
124# Files in SRC_URI are copied directly, anything that's a directory
125# (e.g. git repositories) is "unpacked" and then put into a tarball.
126python do_ar_original() {
127
128 import shutil, tarfile, tempfile
129
130 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
131 return
132
133 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
134 bb.note('Archiving the original source...')
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600135 urls = d.getVar("SRC_URI", True).split()
136 # destsuffix (git fetcher) and subdir (everything else) are allowed to be
137 # absolute paths (for example, destsuffix=${S}/foobar).
138 # That messes with unpacking inside our tmpdir below, because the fetchers
139 # will then unpack in that directory and completely ignore the tmpdir.
140 # That breaks parallel tasks relying on ${S}, like do_compile.
141 #
142 # To solve this, we remove these parameters from all URLs.
143 # We do this even for relative paths because it makes the content of the
144 # archives more useful (no extra paths that are only used during
145 # compilation).
146 for i, url in enumerate(urls):
147 decoded = bb.fetch2.decodeurl(url)
148 for param in ('destsuffix', 'subdir'):
149 if param in decoded[5]:
150 del decoded[5][param]
151 encoded = bb.fetch2.encodeurl(decoded)
152 urls[i] = encoded
153 fetch = bb.fetch2.Fetch(urls, d)
154 tarball_suffix = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500155 for url in fetch.urls:
156 local = fetch.localpath(url).rstrip("/");
157 if os.path.isfile(local):
158 shutil.copy(local, ar_outdir)
159 elif os.path.isdir(local):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True))
161 fetch.unpack(tmpdir, (url,))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600162 # To handle recipes with more than one source, we add the "name"
163 # URL parameter as suffix. We treat it as an error when
164 # there's more than one URL without a name, or a name gets reused.
165 # This is an additional safety net, in practice the name has
166 # to be set when using the git fetcher, otherwise SRCREV cannot
167 # be set separately for each URL.
168 params = bb.fetch2.decodeurl(url)[5]
169 name = params.get('name', '')
170 if name in tarball_suffix:
171 if not name:
172 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
173 else:
174 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
175 tarball_suffix[name] = url
176 create_tarball(d, tmpdir + '/.', name, ar_outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177
178 # Emit patch series files for 'original'
179 bb.note('Writing patch series files...')
180 for patch in src_patches(d):
181 _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
182 patchdir = parm.get('patchdir')
183 if patchdir:
184 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
185 else:
186 series = os.path.join(ar_outdir, 'series')
187
188 with open(series, 'a') as s:
189 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
190}
191
192python do_ar_patched() {
193
194 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched':
195 return
196
197 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
198 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500199 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200 bb.note('Archiving the patched source...')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500201 d.setVar('WORKDIR', ar_workdir)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500202 create_tarball(d, d.getVar('S', True), 'patched', ar_outdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500203}
204
205python do_ar_configured() {
206 import shutil
207
208 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
209 if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured':
210 bb.note('Archiving the configured source...')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500211 pn = d.getVar('PN', True)
212 # "gcc-source-${PV}" recipes don't have "do_configure"
213 # task, so we need to run "do_preconfigure" instead
214 if pn.startswith("gcc-source-"):
215 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True))
216 bb.build.exec_func('do_preconfigure', d)
217
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218 # The libtool-native's do_configure will remove the
219 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
220 # do_configure, we archive the already configured ${S} to
221 # instead of.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500222 elif pn != 'libtool-native':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223 # Change the WORKDIR to make do_configure run in another dir.
224 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True))
225 if bb.data.inherits_class('kernel-yocto', d):
226 bb.build.exec_func('do_kernel_configme', d)
227 if bb.data.inherits_class('cmake', d):
228 bb.build.exec_func('do_generate_toolchain_file', d)
229 prefuncs = d.getVarFlag('do_configure', 'prefuncs', True)
230 for func in (prefuncs or '').split():
231 if func != "sysroot_cleansstate":
232 bb.build.exec_func(func, d)
233 bb.build.exec_func('do_configure', d)
234 postfuncs = d.getVarFlag('do_configure', 'postfuncs', True)
235 for func in (postfuncs or '').split():
236 if func != "do_qa_configure":
237 bb.build.exec_func(func, d)
238 srcdir = d.getVar('S', True)
239 builddir = d.getVar('B', True)
240 if srcdir != builddir:
241 if os.path.exists(builddir):
242 oe.path.copytree(builddir, os.path.join(srcdir, \
243 'build.%s.ar_configured' % d.getVar('PF', True)))
244 create_tarball(d, srcdir, 'configured', ar_outdir)
245}
246
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500247def create_tarball(d, srcdir, suffix, ar_outdir):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500248 """
249 create the tarball from srcdir
250 """
251 import tarfile
252
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500253 # Make sure we are only creating a single tarball for gcc sources
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500254 if (d.getVar('SRC_URI', True) == ""):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500255 return
256
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 bb.utils.mkdirhier(ar_outdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500258 if suffix:
259 filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix)
260 else:
261 filename = '%s.tar.gz' % d.getVar('PF', True)
262 tarname = os.path.join(ar_outdir, filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500263
264 srcdir = srcdir.rstrip('/')
265 dirname = os.path.dirname(srcdir)
266 basename = os.path.basename(srcdir)
267 os.chdir(dirname)
268 bb.note('Creating %s' % tarname)
269 tar = tarfile.open(tarname, 'w:gz')
270 tar.add(basename)
271 tar.close()
272
273# creating .diff.gz between source.orig and source
274def create_diff_gz(d, src_orig, src, ar_outdir):
275
276 import subprocess
277
278 if not os.path.isdir(src) or not os.path.isdir(src_orig):
279 return
280
281 # The diff --exclude can't exclude the file with path, so we copy
282 # the patched source, and remove the files that we'd like to
283 # exclude.
284 src_patched = src + '.patched'
285 oe.path.copyhardlinktree(src, src_patched)
286 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split():
287 bb.utils.remove(os.path.join(src_orig, i), recurse=True)
288 bb.utils.remove(os.path.join(src_patched, i), recurse=True)
289
290 dirname = os.path.dirname(src)
291 basename = os.path.basename(src)
292 os.chdir(dirname)
293 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True))
294 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
295 subprocess.call(diff_cmd, shell=True)
296 bb.utils.remove(src_patched, recurse=True)
297
298# Run do_unpack and do_patch
299python do_unpack_and_patch() {
300 if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \
301 [ 'patched', 'configured'] and \
302 d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
303 return
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500304 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500305 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600306 pn = d.getVar('PN', True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500307
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500308 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600309 if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500310 # Change the WORKDIR to make do_unpack do_patch run in another dir.
311 d.setVar('WORKDIR', ar_workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500313 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
314 # possibly requiring of the following tasks (such as some recipes's
315 # do_patch required 'B' existed).
316 bb.utils.mkdirhier(d.getVar('B', True))
317
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500318 bb.build.exec_func('do_unpack', d)
319
320 # Save the original source for creating the patches
321 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
322 src = d.getVar('S', True).rstrip('/')
323 src_orig = '%s.orig' % src
324 oe.path.copytree(src, src_orig)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500325
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500326 # Make sure gcc and kernel sources are patched only once
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600327 if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500328 bb.build.exec_func('do_patch', d)
329
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500330 # Create the patches
331 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
332 bb.note('Creating diff gz...')
333 create_diff_gz(d, src_orig, src, ar_outdir)
334 bb.utils.remove(src_orig, recurse=True)
335}
336
337python do_ar_recipe () {
338 """
339 archive the recipe, including .bb and .inc.
340 """
341 import re
342 import shutil
343
344 require_re = re.compile( r"require\s+(.+)" )
345 include_re = re.compile( r"include\s+(.+)" )
346 bbfile = d.getVar('FILE', True)
347 outdir = os.path.join(d.getVar('WORKDIR', True), \
348 '%s-recipe' % d.getVar('PF', True))
349 bb.utils.mkdirhier(outdir)
350 shutil.copy(bbfile, outdir)
351
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500352 pn = d.getVar('PN', True)
353 bbappend_files = d.getVar('BBINCLUDED', True).split()
354 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
355 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
356 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn)
357 bbappend_re1 = re.compile( r".*/%s\.bbappend$" %pn)
358 for file in bbappend_files:
359 if bbappend_re.match(file) or bbappend_re1.match(file):
360 shutil.copy(file, outdir)
361
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362 dirname = os.path.dirname(bbfile)
363 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True))
364 f = open(bbfile, 'r')
365 for line in f.readlines():
366 incfile = None
367 if require_re.match(line):
368 incfile = require_re.match(line).group(1)
369 elif include_re.match(line):
370 incfile = include_re.match(line).group(1)
371 if incfile:
372 incfile = bb.data.expand(incfile, d)
373 incfile = bb.utils.which(bbpath, incfile)
374 if incfile:
375 shutil.copy(incfile, outdir)
376
377 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True))
378 bb.utils.remove(outdir, recurse=True)
379}
380
381python do_dumpdata () {
382 """
383 dump environment data to ${PF}-showdata.dump
384 """
385
386 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \
387 '%s-showdata.dump' % d.getVar('PF', True))
388 bb.note('Dumping metadata into %s' % dumpfile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500389 with open(dumpfile, "w") as f:
390 # emit variables and shell functions
391 bb.data.emit_env(f, d, True)
392 # emit the metadata which isn't valid shell
393 for e in d.keys():
394 if d.getVarFlag(e, "python", False):
395 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500396}
397
398SSTATETASKS += "do_deploy_archives"
399do_deploy_archives () {
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500400 echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401}
402python do_deploy_archives_setscene () {
403 sstate_setscene(d)
404}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500405do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
407do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500408addtask do_deploy_archives_setscene
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500409
410addtask do_ar_original after do_unpack
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500411addtask do_unpack_and_patch after do_patch
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412addtask do_ar_patched after do_unpack_and_patch
413addtask do_ar_configured after do_unpack_and_patch
414addtask do_dumpdata
415addtask do_ar_recipe
416addtask do_deploy_archives before do_build
417
418addtask do_deploy_all_archives after do_deploy_archives
419do_deploy_all_archives[recrdeptask] = "do_deploy_archives"
420do_deploy_all_archives[recideptask] = "do_${BB_DEFAULT_TASK}"
421do_deploy_all_archives() {
422 :
423}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500424
425python () {
426 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition
427 if bb.data.inherits_class('kernel-yocto', d):
428 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500429}