blob: 454dd7a7a07844bb9cf87cbcd5e4b9d6c8864f56 [file] [log] [blame]
Patrick Williams7784c422022-11-17 07:29:11 -06001#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${MACHINE}"
8
9# The product name that the CVE database uses. Defaults to BPN, but may need to
10# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
11CVE_PRODUCT ??= "${BPN}"
12CVE_VERSION ??= "${PV}"
13
14SPDXDIR ??= "${WORKDIR}/spdx"
15SPDXDEPLOY = "${SPDXDIR}/deploy"
16SPDXWORK = "${SPDXDIR}/work"
Andrew Geissler6aa7eec2023-03-03 12:41:14 -060017SPDXIMAGEWORK = "${SPDXDIR}/image-work"
18SPDXSDKWORK = "${SPDXDIR}/sdk-work"
Patrick Williams7784c422022-11-17 07:29:11 -060019
20SPDX_TOOL_NAME ??= "oe-spdx-creator"
21SPDX_TOOL_VERSION ??= "1.0"
22
23SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
24
25SPDX_INCLUDE_SOURCES ??= "0"
26SPDX_ARCHIVE_SOURCES ??= "0"
27SPDX_ARCHIVE_PACKAGED ??= "0"
28
29SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
30SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc"
31SPDX_PRETTY ??= "0"
32
33SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
34
Andrew Geissler6aa7eec2023-03-03 12:41:14 -060035SPDX_CUSTOM_ANNOTATION_VARS ??= ""
36
Patrick Williams7784c422022-11-17 07:29:11 -060037SPDX_ORG ??= "OpenEmbedded ()"
38SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
39SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
40 this recipe. For SPDX documents create using this class during the build, this \
41 is the contact information for the person or organization who is doing the \
42 build."
43
44def extract_licenses(filename):
45 import re
46
47 lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
48
49 try:
50 with open(filename, 'rb') as f:
51 size = min(15000, os.stat(filename).st_size)
52 txt = f.read(size)
53 licenses = re.findall(lic_regex, txt)
54 if licenses:
55 ascii_licenses = [lic.decode('ascii') for lic in licenses]
56 return ascii_licenses
57 except Exception as e:
58 bb.warn(f"Exception reading {filename}: {e}")
59 return None
60
61def get_doc_namespace(d, doc):
62 import uuid
63 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
64 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
65
66def create_annotation(d, comment):
67 from datetime import datetime, timezone
68
69 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
70 annotation = oe.spdx.SPDXAnnotation()
71 annotation.annotationDate = creation_time
72 annotation.annotationType = "OTHER"
73 annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
74 annotation.comment = comment
75 return annotation
76
77def recipe_spdx_is_native(d, recipe):
78 return any(a.annotationType == "OTHER" and
79 a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
80 a.comment == "isNative" for a in recipe.annotations)
81
82def is_work_shared_spdx(d):
83 return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
84
85def get_json_indent(d):
86 if d.getVar("SPDX_PRETTY") == "1":
87 return 2
88 return None
89
90python() {
91 import json
92 if d.getVar("SPDX_LICENSE_DATA"):
93 return
94
95 with open(d.getVar("SPDX_LICENSES"), "r") as f:
96 data = json.load(f)
97 # Transform the license array to a dictionary
98 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
99 d.setVar("SPDX_LICENSE_DATA", data)
100}
101
102def convert_license_to_spdx(lic, document, d, existing={}):
103 from pathlib import Path
104 import oe.spdx
105
106 license_data = d.getVar("SPDX_LICENSE_DATA")
107 extracted = {}
108
109 def add_extracted_license(ident, name):
110 nonlocal document
111
112 if name in extracted:
113 return
114
115 extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
116 extracted_info.name = name
117 extracted_info.licenseId = ident
118 extracted_info.extractedText = None
119
120 if name == "PD":
121 # Special-case this.
122 extracted_info.extractedText = "Software released to the public domain"
123 else:
124 # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
125 for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
126 try:
127 with (Path(directory) / name).open(errors="replace") as f:
128 extracted_info.extractedText = f.read()
129 break
130 except FileNotFoundError:
131 pass
132 if extracted_info.extractedText is None:
133 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
134 filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
135 if filename:
136 filename = d.expand("${S}/" + filename)
137 with open(filename, errors="replace") as f:
138 extracted_info.extractedText = f.read()
139 else:
140 bb.error("Cannot find any text for license %s" % name)
141
142 extracted[name] = extracted_info
143 document.hasExtractedLicensingInfos.append(extracted_info)
144
145 def convert(l):
146 if l == "(" or l == ")":
147 return l
148
149 if l == "&":
150 return "AND"
151
152 if l == "|":
153 return "OR"
154
155 if l == "CLOSED":
156 return "NONE"
157
158 spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
159 if spdx_license in license_data["licenses"]:
160 return spdx_license
161
162 try:
163 spdx_license = existing[l]
164 except KeyError:
165 spdx_license = "LicenseRef-" + l
166 add_extracted_license(spdx_license, l)
167
168 return spdx_license
169
170 lic_split = lic.replace("(", " ( ").replace(")", " ) ").split()
171
172 return ' '.join(convert(l) for l in lic_split)
173
174def process_sources(d):
175 pn = d.getVar('PN')
176 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
177 if pn in assume_provided:
178 for p in d.getVar("PROVIDES").split():
179 if p != pn:
180 pn = p
181 break
182
183 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
184 # so avoid archiving source here.
185 if pn.startswith('glibc-locale'):
186 return False
187 if d.getVar('PN') == "libtool-cross":
188 return False
189 if d.getVar('PN') == "libgcc-initial":
190 return False
191 if d.getVar('PN') == "shadow-sysroot":
192 return False
193
194 # We just archive gcc-source for all the gcc related recipes
195 if d.getVar('BPN') in ['gcc', 'libgcc']:
196 bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
197 return False
198
199 return True
200
201
202def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
203 from pathlib import Path
204 import oe.spdx
205 import hashlib
206
207 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
208 if source_date_epoch:
209 source_date_epoch = int(source_date_epoch)
210
211 sha1s = []
212 spdx_files = []
213
214 file_counter = 1
215 for subdir, dirs, files in os.walk(topdir):
216 dirs[:] = [d for d in dirs if d not in ignore_dirs]
217 if subdir == str(topdir):
218 dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
219
220 for file in files:
221 filepath = Path(subdir) / file
222 filename = str(filepath.relative_to(topdir))
223
224 if not filepath.is_symlink() and filepath.is_file():
225 spdx_file = oe.spdx.SPDXFile()
226 spdx_file.SPDXID = get_spdxid(file_counter)
227 for t in get_types(filepath):
228 spdx_file.fileTypes.append(t)
229 spdx_file.fileName = filename
230
231 if archive is not None:
232 with filepath.open("rb") as f:
233 info = archive.gettarinfo(fileobj=f)
234 info.name = filename
235 info.uid = 0
236 info.gid = 0
237 info.uname = "root"
238 info.gname = "root"
239
240 if source_date_epoch is not None and info.mtime > source_date_epoch:
241 info.mtime = source_date_epoch
242
243 archive.addfile(info, f)
244
245 sha1 = bb.utils.sha1_file(filepath)
246 sha1s.append(sha1)
247 spdx_file.checksums.append(oe.spdx.SPDXChecksum(
248 algorithm="SHA1",
249 checksumValue=sha1,
250 ))
251 spdx_file.checksums.append(oe.spdx.SPDXChecksum(
252 algorithm="SHA256",
253 checksumValue=bb.utils.sha256_file(filepath),
254 ))
255
256 if "SOURCE" in spdx_file.fileTypes:
257 extracted_lics = extract_licenses(filepath)
258 if extracted_lics:
259 spdx_file.licenseInfoInFiles = extracted_lics
260
261 doc.files.append(spdx_file)
262 doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
263 spdx_pkg.hasFiles.append(spdx_file.SPDXID)
264
265 spdx_files.append(spdx_file)
266
267 file_counter += 1
268
269 sha1s.sort()
270 verifier = hashlib.sha1()
271 for v in sha1s:
272 verifier.update(v.encode("utf-8"))
273 spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
274
275 return spdx_files
276
277
278def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
279 from pathlib import Path
280 import hashlib
281 import oe.packagedata
282 import oe.spdx
283
284 debug_search_paths = [
285 Path(d.getVar('PKGD')),
286 Path(d.getVar('STAGING_DIR_TARGET')),
287 Path(d.getVar('STAGING_DIR_NATIVE')),
288 Path(d.getVar('STAGING_KERNEL_DIR')),
289 ]
290
291 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
292
293 if pkg_data is None:
294 return
295
296 for file_path, file_data in pkg_data["files_info"].items():
297 if not "debugsrc" in file_data:
298 continue
299
300 for pkg_file in package_files:
301 if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
302 break
303 else:
304 bb.fatal("No package file found for %s" % str(file_path))
305 continue
306
307 for debugsrc in file_data["debugsrc"]:
308 ref_id = "NOASSERTION"
309 for search in debug_search_paths:
310 if debugsrc.startswith("/usr/src/kernel"):
311 debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
312 else:
313 debugsrc_path = search / debugsrc.lstrip("/")
314 if not debugsrc_path.exists():
315 continue
316
317 file_sha256 = bb.utils.sha256_file(debugsrc_path)
318
319 if file_sha256 in sources:
320 source_file = sources[file_sha256]
321
322 doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
323 if doc_ref is None:
324 doc_ref = oe.spdx.SPDXExternalDocumentRef()
325 doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
326 doc_ref.spdxDocument = source_file.doc.documentNamespace
327 doc_ref.checksum.algorithm = "SHA1"
328 doc_ref.checksum.checksumValue = source_file.doc_sha1
329 package_doc.externalDocumentRefs.append(doc_ref)
330
331 ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
332 else:
333 bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
334 break
335 else:
336 bb.debug(1, "Debug source %s not found" % debugsrc)
337
338 package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
339
340def collect_dep_recipes(d, doc, spdx_recipe):
341 from pathlib import Path
342 import oe.sbom
343 import oe.spdx
344
345 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
346
347 dep_recipes = []
348 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
349 deps = sorted(set(
350 dep[0] for dep in taskdepdata.values() if
351 dep[1] == "do_create_spdx" and dep[0] != d.getVar("PN")
352 ))
353 for dep_pn in deps:
354 dep_recipe_path = deploy_dir_spdx / "recipes" / ("recipe-%s.spdx.json" % dep_pn)
355
356 spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
357
358 for pkg in spdx_dep_doc.packages:
359 if pkg.name == dep_pn:
360 spdx_dep_recipe = pkg
361 break
362 else:
363 continue
364
365 dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
366
367 dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
368 dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
369 dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
370 dep_recipe_ref.checksum.algorithm = "SHA1"
371 dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
372
373 doc.externalDocumentRefs.append(dep_recipe_ref)
374
375 doc.add_relationship(
376 "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
377 "BUILD_DEPENDENCY_OF",
378 spdx_recipe
379 )
380
381 return dep_recipes
382
383collect_dep_recipes[vardepsexclude] += "BB_TASKDEPDATA"
384
385
386def collect_dep_sources(d, dep_recipes):
387 import oe.sbom
388
389 sources = {}
390 for dep in dep_recipes:
391 # Don't collect sources from native recipes as they
392 # match non-native sources also.
393 if recipe_spdx_is_native(d, dep.recipe):
394 continue
395 recipe_files = set(dep.recipe.hasFiles)
396
397 for spdx_file in dep.doc.files:
398 if spdx_file.SPDXID not in recipe_files:
399 continue
400
401 if "SOURCE" in spdx_file.fileTypes:
402 for checksum in spdx_file.checksums:
403 if checksum.algorithm == "SHA256":
404 sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
405 break
406
407 return sources
408
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600409def add_download_packages(d, doc, recipe):
410 import os.path
411 from bb.fetch2 import decodeurl, CHECKSUM_LIST
412 import bb.process
413 import oe.spdx
414 import oe.sbom
415
416 for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()):
417 f = bb.fetch2.FetchData(src_uri, d)
418
419 for name in f.names:
420 package = oe.spdx.SPDXPackage()
421 package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1)
422 package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1)
423
424 if f.type == "file":
425 continue
426
427 uri = f.type
428 proto = getattr(f, "proto", None)
429 if proto is not None:
430 uri = uri + "+" + proto
431 uri = uri + "://" + f.host + f.path
432
433 if f.method.supports_srcrev():
434 uri = uri + "@" + f.revisions[name]
435
436 if f.method.supports_checksum(f):
437 for checksum_id in CHECKSUM_LIST:
438 if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS:
439 continue
440
441 expected_checksum = getattr(f, "%s_expected" % checksum_id)
442 if expected_checksum is None:
443 continue
444
445 c = oe.spdx.SPDXChecksum()
446 c.algorithm = checksum_id.upper()
447 c.checksumValue = expected_checksum
448 package.checksums.append(c)
449
450 package.downloadLocation = uri
451 doc.packages.append(package)
452 doc.add_relationship(doc, "DESCRIBES", package)
453 # In the future, we might be able to do more fancy dependencies,
454 # but this should be sufficient for now
455 doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
Patrick Williams7784c422022-11-17 07:29:11 -0600456
457python do_create_spdx() {
458 from datetime import datetime, timezone
459 import oe.sbom
460 import oe.spdx
461 import uuid
462 from pathlib import Path
463 from contextlib import contextmanager
464 import oe.cve_check
465
466 @contextmanager
467 def optional_tarfile(name, guard, mode="w"):
468 import tarfile
469 import bb.compress.zstd
470
471 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
472
473 if guard:
474 name.parent.mkdir(parents=True, exist_ok=True)
475 with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
476 with tarfile.open(fileobj=f, mode=mode + "|") as tf:
477 yield tf
478 else:
479 yield None
480
481
482 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
483 spdx_workdir = Path(d.getVar("SPDXWORK"))
484 include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
485 archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
486 archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
487
488 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
489
490 doc = oe.spdx.SPDXDocument()
491
492 doc.name = "recipe-" + d.getVar("PN")
493 doc.documentNamespace = get_doc_namespace(d, doc)
494 doc.creationInfo.created = creation_time
495 doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
496 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
497 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
498 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
499 doc.creationInfo.creators.append("Person: N/A ()")
500
501 recipe = oe.spdx.SPDXPackage()
502 recipe.name = d.getVar("PN")
503 recipe.versionInfo = d.getVar("PV")
504 recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
505 recipe.supplier = d.getVar("SPDX_SUPPLIER")
506 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
507 recipe.annotations.append(create_annotation(d, "isNative"))
508
Patrick Williams7784c422022-11-17 07:29:11 -0600509 homepage = d.getVar("HOMEPAGE")
510 if homepage:
511 recipe.homepage = homepage
512
513 license = d.getVar("LICENSE")
514 if license:
515 recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
516
517 summary = d.getVar("SUMMARY")
518 if summary:
519 recipe.summary = summary
520
521 description = d.getVar("DESCRIPTION")
522 if description:
523 recipe.description = description
524
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600525 if d.getVar("SPDX_CUSTOM_ANNOTATION_VARS"):
526 for var in d.getVar('SPDX_CUSTOM_ANNOTATION_VARS').split():
527 recipe.annotations.append(create_annotation(d, var + "=" + d.getVar(var)))
528
Patrick Williams7784c422022-11-17 07:29:11 -0600529 # Some CVEs may be patched during the build process without incrementing the version number,
530 # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
531 # save the CVEs fixed by patches to source information field in the SPDX.
532 patched_cves = oe.cve_check.get_patched_cves(d)
533 patched_cves = list(patched_cves)
534 patched_cves = ' '.join(patched_cves)
535 if patched_cves:
536 recipe.sourceInfo = "CVEs fixed: " + patched_cves
537
538 cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
539 if cpe_ids:
540 for cpe_id in cpe_ids:
541 cpe = oe.spdx.SPDXExternalReference()
542 cpe.referenceCategory = "SECURITY"
543 cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
544 cpe.referenceLocator = cpe_id
545 recipe.externalRefs.append(cpe)
546
547 doc.packages.append(recipe)
548 doc.add_relationship(doc, "DESCRIBES", recipe)
549
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600550 add_download_packages(d, doc, recipe)
551
Patrick Williams7784c422022-11-17 07:29:11 -0600552 if process_sources(d) and include_sources:
553 recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
554 with optional_tarfile(recipe_archive, archive_sources) as archive:
555 spdx_get_src(d)
556
557 add_package_files(
558 d,
559 doc,
560 recipe,
561 spdx_workdir,
562 lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
563 lambda filepath: ["SOURCE"],
564 ignore_dirs=[".git"],
565 ignore_top_level_dirs=["temp"],
566 archive=archive,
567 )
568
569 if archive is not None:
570 recipe.packageFileName = str(recipe_archive.name)
571
572 dep_recipes = collect_dep_recipes(d, doc, recipe)
573
574 doc_sha1 = oe.sbom.write_doc(d, doc, "recipes", indent=get_json_indent(d))
575 dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
576
577 recipe_ref = oe.spdx.SPDXExternalDocumentRef()
578 recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
579 recipe_ref.spdxDocument = doc.documentNamespace
580 recipe_ref.checksum.algorithm = "SHA1"
581 recipe_ref.checksum.checksumValue = doc_sha1
582
583 sources = collect_dep_sources(d, dep_recipes)
584 found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
585
586 if not recipe_spdx_is_native(d, recipe):
587 bb.build.exec_func("read_subpackage_metadata", d)
588
589 pkgdest = Path(d.getVar("PKGDEST"))
590 for package in d.getVar("PACKAGES").split():
591 if not oe.packagedata.packaged(package, d):
592 continue
593
594 package_doc = oe.spdx.SPDXDocument()
595 pkg_name = d.getVar("PKG:%s" % package) or package
596 package_doc.name = pkg_name
597 package_doc.documentNamespace = get_doc_namespace(d, package_doc)
598 package_doc.creationInfo.created = creation_time
599 package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
600 package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
601 package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
602 package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
603 package_doc.creationInfo.creators.append("Person: N/A ()")
604 package_doc.externalDocumentRefs.append(recipe_ref)
605
606 package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
607
608 spdx_package = oe.spdx.SPDXPackage()
609
610 spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
611 spdx_package.name = pkg_name
612 spdx_package.versionInfo = d.getVar("PV")
613 spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
614 spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
615
616 package_doc.packages.append(spdx_package)
617
618 package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
619 package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
620
621 package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
622 with optional_tarfile(package_archive, archive_packaged) as archive:
623 package_files = add_package_files(
624 d,
625 package_doc,
626 spdx_package,
627 pkgdest / package,
628 lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
629 lambda filepath: ["BINARY"],
630 ignore_top_level_dirs=['CONTROL', 'DEBIAN'],
631 archive=archive,
632 )
633
634 if archive is not None:
635 spdx_package.packageFileName = str(package_archive.name)
636
637 add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
638
639 oe.sbom.write_doc(d, package_doc, "packages", indent=get_json_indent(d))
640}
641# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
642addtask do_create_spdx after do_package do_packagedata do_unpack before do_populate_sdk do_build do_rm_work
643
644SSTATETASKS += "do_create_spdx"
645do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
646do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
647
648python do_create_spdx_setscene () {
649 sstate_setscene(d)
650}
651addtask do_create_spdx_setscene
652
653do_create_spdx[dirs] = "${SPDXWORK}"
654do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
655do_create_spdx[depends] += "${PATCHDEPENDENCY}"
656do_create_spdx[deptask] = "do_create_spdx"
657
658def collect_package_providers(d):
659 from pathlib import Path
660 import oe.sbom
661 import oe.spdx
662 import json
663
664 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
665
666 providers = {}
667
668 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
669 deps = sorted(set(
670 dep[0] for dep in taskdepdata.values() if dep[0] != d.getVar("PN")
671 ))
672 deps.append(d.getVar("PN"))
673
674 for dep_pn in deps:
675 recipe_data = oe.packagedata.read_pkgdata(dep_pn, d)
676
677 for pkg in recipe_data.get("PACKAGES", "").split():
678
679 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, d)
680 rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
681 rprovides.add(pkg)
682
683 for r in rprovides:
684 providers[r] = pkg
685
686 return providers
687
688collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
689
690python do_create_runtime_spdx() {
691 from datetime import datetime, timezone
692 import oe.sbom
693 import oe.spdx
694 import oe.packagedata
695 from pathlib import Path
696
697 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
698 spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
699 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
700
701 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
702
703 providers = collect_package_providers(d)
704
705 if not is_native:
706 bb.build.exec_func("read_subpackage_metadata", d)
707
708 dep_package_cache = {}
709
710 pkgdest = Path(d.getVar("PKGDEST"))
711 for package in d.getVar("PACKAGES").split():
712 localdata = bb.data.createCopy(d)
713 pkg_name = d.getVar("PKG:%s" % package) or package
714 localdata.setVar("PKG", pkg_name)
715 localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
716
717 if not oe.packagedata.packaged(package, localdata):
718 continue
719
720 pkg_spdx_path = deploy_dir_spdx / "packages" / (pkg_name + ".spdx.json")
721
722 package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
723
724 for p in package_doc.packages:
725 if p.name == pkg_name:
726 spdx_package = p
727 break
728 else:
729 bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
730
731 runtime_doc = oe.spdx.SPDXDocument()
732 runtime_doc.name = "runtime-" + pkg_name
733 runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
734 runtime_doc.creationInfo.created = creation_time
735 runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
736 runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
737 runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
738 runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
739 runtime_doc.creationInfo.creators.append("Person: N/A ()")
740
741 package_ref = oe.spdx.SPDXExternalDocumentRef()
742 package_ref.externalDocumentId = "DocumentRef-package-" + package
743 package_ref.spdxDocument = package_doc.documentNamespace
744 package_ref.checksum.algorithm = "SHA1"
745 package_ref.checksum.checksumValue = package_doc_sha1
746
747 runtime_doc.externalDocumentRefs.append(package_ref)
748
749 runtime_doc.add_relationship(
750 runtime_doc.SPDXID,
751 "AMENDS",
752 "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
753 )
754
755 deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
756 seen_deps = set()
757 for dep, _ in deps.items():
758 if dep in seen_deps:
759 continue
760
761 if dep not in providers:
762 continue
763
764 dep = providers[dep]
765
766 if not oe.packagedata.packaged(dep, localdata):
767 continue
768
769 dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
770 dep_pkg = dep_pkg_data["PKG"]
771
772 if dep in dep_package_cache:
773 (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
774 else:
775 dep_path = deploy_dir_spdx / "packages" / ("%s.spdx.json" % dep_pkg)
776
777 spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
778
779 for pkg in spdx_dep_doc.packages:
780 if pkg.name == dep_pkg:
781 dep_spdx_package = pkg
782 break
783 else:
784 bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
785
786 dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
787 dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
788 dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
789 dep_package_ref.checksum.algorithm = "SHA1"
790 dep_package_ref.checksum.checksumValue = spdx_dep_sha1
791
792 dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
793
794 runtime_doc.externalDocumentRefs.append(dep_package_ref)
795
796 runtime_doc.add_relationship(
797 "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
798 "RUNTIME_DEPENDENCY_OF",
799 "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
800 )
801 seen_deps.add(dep)
802
803 oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy, indent=get_json_indent(d))
804}
805
806addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
807SSTATETASKS += "do_create_runtime_spdx"
808do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
809do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
810
811python do_create_runtime_spdx_setscene () {
812 sstate_setscene(d)
813}
814addtask do_create_runtime_spdx_setscene
815
816do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
817do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
818do_create_runtime_spdx[rdeptask] = "do_create_spdx"
819
820def spdx_get_src(d):
821 """
822 save patched source of the recipe in SPDX_WORKDIR.
823 """
824 import shutil
825 spdx_workdir = d.getVar('SPDXWORK')
826 spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
827 pn = d.getVar('PN')
828
829 workdir = d.getVar("WORKDIR")
830
831 try:
832 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
833 if not is_work_shared_spdx(d):
834 # Change the WORKDIR to make do_unpack do_patch run in another dir.
835 d.setVar('WORKDIR', spdx_workdir)
836 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
837 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
838
839 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
840 # possibly requiring of the following tasks (such as some recipes's
841 # do_patch required 'B' existed).
842 bb.utils.mkdirhier(d.getVar('B'))
843
844 bb.build.exec_func('do_unpack', d)
845 # Copy source of kernel to spdx_workdir
846 if is_work_shared_spdx(d):
847 share_src = d.getVar('WORKDIR')
848 d.setVar('WORKDIR', spdx_workdir)
849 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
850 src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
851 bb.utils.mkdirhier(src_dir)
852 if bb.data.inherits_class('kernel',d):
853 share_src = d.getVar('STAGING_KERNEL_DIR')
854 cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
855 cmd_copy_shared_res = os.popen(cmd_copy_share).read()
856 bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
857
858 git_path = src_dir + "/.git"
859 if os.path.exists(git_path):
860 shutils.rmtree(git_path)
861
862 # Make sure gcc and kernel sources are patched only once
863 if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
864 bb.build.exec_func('do_patch', d)
865
866 # Some userland has no source.
867 if not os.path.exists( spdx_workdir ):
868 bb.utils.mkdirhier(spdx_workdir)
869 finally:
870 d.setVar("WORKDIR", workdir)
871
872do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600873do_rootfs[cleandirs] += "${SPDXIMAGEWORK}"
Patrick Williams7784c422022-11-17 07:29:11 -0600874
875ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx ; "
876
877do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600878do_populate_sdk[cleandirs] += "${SPDXSDKWORK}"
Patrick Williams7784c422022-11-17 07:29:11 -0600879POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx; "
880POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx; "
881
882python image_combine_spdx() {
883 import os
884 import oe.sbom
885 from pathlib import Path
886 from oe.rootfs import image_list_installed_packages
887
888 image_name = d.getVar("IMAGE_NAME")
889 image_link_name = d.getVar("IMAGE_LINK_NAME")
890 imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
891 img_spdxid = oe.sbom.get_image_spdxid(image_name)
892 packages = image_list_installed_packages(d)
893
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600894 combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages, Path(d.getVar("SPDXIMAGEWORK")))
Patrick Williams7784c422022-11-17 07:29:11 -0600895
896 def make_image_link(target_path, suffix):
897 if image_link_name:
898 link = imgdeploydir / (image_link_name + suffix)
899 if link != target_path:
900 link.symlink_to(os.path.relpath(target_path, link.parent))
901
Patrick Williams7784c422022-11-17 07:29:11 -0600902 spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
903 make_image_link(spdx_tar_path, ".spdx.tar.zst")
Patrick Williams7784c422022-11-17 07:29:11 -0600904}
905
906python sdk_host_combine_spdx() {
907 sdk_combine_spdx(d, "host")
908}
909
910python sdk_target_combine_spdx() {
911 sdk_combine_spdx(d, "target")
912}
913
914def sdk_combine_spdx(d, sdk_type):
915 import oe.sbom
916 from pathlib import Path
917 from oe.sdk import sdk_list_installed_packages
918
919 sdk_name = d.getVar("SDK_NAME") + "-" + sdk_type
920 sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR"))
921 sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name)
922 sdk_packages = sdk_list_installed_packages(d, sdk_type == "target")
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600923 combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages, Path(d.getVar('SPDXSDKWORK')))
Patrick Williams7784c422022-11-17 07:29:11 -0600924
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600925def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx_workdir):
Patrick Williams7784c422022-11-17 07:29:11 -0600926 import os
927 import oe.spdx
928 import oe.sbom
929 import io
930 import json
931 from datetime import timezone, datetime
932 from pathlib import Path
933 import tarfile
934 import bb.compress.zstd
935
936 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
937 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
938 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
939
940 doc = oe.spdx.SPDXDocument()
941 doc.name = rootfs_name
942 doc.documentNamespace = get_doc_namespace(d, doc)
943 doc.creationInfo.created = creation_time
944 doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
945 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
946 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
947 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
948 doc.creationInfo.creators.append("Person: N/A ()")
949
950 image = oe.spdx.SPDXPackage()
951 image.name = d.getVar("PN")
952 image.versionInfo = d.getVar("PV")
953 image.SPDXID = rootfs_spdxid
954 image.supplier = d.getVar("SPDX_SUPPLIER")
955
956 doc.packages.append(image)
957
958 for name in sorted(packages.keys()):
959 pkg_spdx_path = deploy_dir_spdx / "packages" / (name + ".spdx.json")
960 pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
961
962 for p in pkg_doc.packages:
963 if p.name == name:
964 pkg_ref = oe.spdx.SPDXExternalDocumentRef()
965 pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
966 pkg_ref.spdxDocument = pkg_doc.documentNamespace
967 pkg_ref.checksum.algorithm = "SHA1"
968 pkg_ref.checksum.checksumValue = pkg_doc_sha1
969
970 doc.externalDocumentRefs.append(pkg_ref)
971 doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
972 break
973 else:
974 bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
975
976 runtime_spdx_path = deploy_dir_spdx / "runtime" / ("runtime-" + name + ".spdx.json")
977 runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
978
979 runtime_ref = oe.spdx.SPDXExternalDocumentRef()
980 runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
981 runtime_ref.spdxDocument = runtime_doc.documentNamespace
982 runtime_ref.checksum.algorithm = "SHA1"
983 runtime_ref.checksum.checksumValue = runtime_doc_sha1
984
985 # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
986 doc.externalDocumentRefs.append(runtime_ref)
987 doc.add_relationship(
988 image,
989 "OTHER",
990 "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
991 comment="Runtime dependencies for %s" % name
992 )
993
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600994 image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json")
Patrick Williams7784c422022-11-17 07:29:11 -0600995
996 with image_spdx_path.open("wb") as f:
997 doc.to_json(f, sort_keys=True, indent=get_json_indent(d))
998
999 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1000
1001 visited_docs = set()
1002
1003 index = {"documents": []}
1004
1005 spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst")
1006 with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
1007 with tarfile.open(fileobj=f, mode="w|") as tar:
1008 def collect_spdx_document(path):
1009 nonlocal tar
1010 nonlocal deploy_dir_spdx
1011 nonlocal source_date_epoch
1012 nonlocal index
1013
1014 if path in visited_docs:
1015 return
1016
1017 visited_docs.add(path)
1018
1019 with path.open("rb") as f:
1020 doc, sha1 = oe.sbom.read_doc(f)
1021 f.seek(0)
1022
1023 if doc.documentNamespace in visited_docs:
1024 return
1025
1026 bb.note("Adding SPDX document %s" % path)
1027 visited_docs.add(doc.documentNamespace)
1028 info = tar.gettarinfo(fileobj=f)
1029
1030 info.name = doc.name + ".spdx.json"
1031 info.uid = 0
1032 info.gid = 0
1033 info.uname = "root"
1034 info.gname = "root"
1035
1036 if source_date_epoch is not None and info.mtime > int(source_date_epoch):
1037 info.mtime = int(source_date_epoch)
1038
1039 tar.addfile(info, f)
1040
1041 index["documents"].append({
1042 "filename": info.name,
1043 "documentNamespace": doc.documentNamespace,
1044 "sha1": sha1,
1045 })
1046
1047 for ref in doc.externalDocumentRefs:
1048 ref_path = deploy_dir_spdx / "by-namespace" / ref.spdxDocument.replace("/", "_")
1049 collect_spdx_document(ref_path)
1050
1051 collect_spdx_document(image_spdx_path)
1052
1053 index["documents"].sort(key=lambda x: x["filename"])
1054
1055 index_str = io.BytesIO(json.dumps(
1056 index,
1057 sort_keys=True,
1058 indent=get_json_indent(d),
1059 ).encode("utf-8"))
1060
1061 info = tarfile.TarInfo()
1062 info.name = "index.json"
1063 info.size = len(index_str.getvalue())
1064 info.uid = 0
1065 info.gid = 0
1066 info.uname = "root"
1067 info.gname = "root"
1068
1069 tar.addfile(info, fileobj=index_str)