blob: 5e2da560460f0c2b82c1f2379af23f02deacd274 [file] [log] [blame]
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: MIT
#
# This class is used to check recipes against public CVEs.
#
# In order to use this class just inherit the class in the
# local.conf file and it will add the cve_check task for
# every recipe. The task can be used per recipe, per image,
# or using the special cases "world" and "universe". The
# cve_check task will print a warning for every unpatched
# CVE found and generate a file in the recipe WORKDIR/cve
# directory. If an image is build it will generate a report
# in DEPLOY_DIR_IMAGE for all the packages used.
#
# Example:
# bitbake -c cve_check openssl
# bitbake core-image-sato
# bitbake -k -c cve_check universe
#
# DISCLAIMER
#
# This class/tool is meant to be used as support and not
# the only method to check against CVEs. Running this tool
# doesn't guarantee your packages are free of CVEs.
# The product name that the CVE database uses defaults to BPN, but may need to
# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK"
CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.1.db"
CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock"
CVE_CHECK_LOG ?= "${T}/cve.log"
CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check"
CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve"
CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary"
CVE_CHECK_SUMMARY_FILE ?= "${CVE_CHECK_SUMMARY_DIR}/${CVE_CHECK_SUMMARY_FILE_NAME}"
CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json"
CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt"
CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
CVE_CHECK_MANIFEST ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.json"
CVE_CHECK_COPY_FILES ??= "1"
CVE_CHECK_CREATE_MANIFEST ??= "1"
# Report Patched or Ignored CVEs
CVE_CHECK_REPORT_PATCHED ??= "1"
CVE_CHECK_SHOW_WARNINGS ??= "1"
# Provide text output
CVE_CHECK_FORMAT_TEXT ??= "1"
# Provide JSON output
CVE_CHECK_FORMAT_JSON ??= "1"
# Check for packages without CVEs (no issues or missing product name)
CVE_CHECK_COVERAGE ??= "1"
# Skip CVE Check for packages (PN)
CVE_CHECK_SKIP_RECIPE ?= ""
# Ingore the check for a given list of CVEs. If a CVE is found,
# then it is considered patched. The value is a string containing
# space separated CVE values:
#
# CVE_CHECK_IGNORE = 'CVE-2014-2524 CVE-2018-1234'
#
CVE_CHECK_IGNORE ?= ""
# Layers to be excluded
CVE_CHECK_LAYER_EXCLUDELIST ??= ""
# Layers to be included
CVE_CHECK_LAYER_INCLUDELIST ??= ""
# set to "alphabetical" for version using single alphabetical character as increment release
CVE_VERSION_SUFFIX ??= ""
def generate_json_report(d, out_path, link_path):
if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
import json
from oe.cve_check import cve_check_merge_jsons, update_symlinks
bb.note("Generating JSON CVE summary")
index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
summary = {"version":"1", "package": []}
with open(index_file) as f:
filename = f.readline()
while filename:
with open(filename.rstrip()) as j:
data = json.load(j)
cve_check_merge_jsons(summary, data)
filename = f.readline()
with open(out_path, "w") as f:
json.dump(summary, f, indent=2)
update_symlinks(out_path, link_path)
python cve_save_summary_handler () {
import shutil
import datetime
from oe.cve_check import update_symlinks
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME")
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
bb.utils.mkdirhier(cvelogpath)
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
cve_summary_file = os.path.join(cvelogpath, "%s-%s.txt" % (cve_summary_name, timestamp))
if os.path.exists(cve_tmp_file):
shutil.copyfile(cve_tmp_file, cve_summary_file)
cvefile_link = os.path.join(cvelogpath, cve_summary_name)
update_symlinks(cve_summary_file, cvefile_link)
bb.plain("Complete CVE report summary created at: %s" % cvefile_link)
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
json_summary_name = os.path.join(cvelogpath, "%s-%s.json" % (cve_summary_name, timestamp))
generate_json_report(d, json_summary_name, json_summary_link_name)
bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name)
}
addhandler cve_save_summary_handler
cve_save_summary_handler[eventmask] = "bb.event.BuildCompleted"
python do_cve_check () {
"""
Check recipe for patched and unpatched CVEs
"""
from oe.cve_check import get_patched_cves
with bb.utils.fileslocked([d.getVar("CVE_CHECK_DB_FILE_LOCK")], shared=True):
if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
try:
patched_cves = get_patched_cves(d)
except FileNotFoundError:
bb.fatal("Failure in searching patches")
ignored, patched, unpatched, status = check_cves(d, patched_cves)
if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
cve_data = get_cve_info(d, patched + unpatched + ignored)
cve_write_data(d, patched, unpatched, ignored, cve_data, status)
else:
bb.note("No CVE database found, skipping CVE check")
}
addtask cve_check before do_build
do_cve_check[depends] = "cve-update-db-native:do_fetch"
do_cve_check[nostamp] = "1"
python cve_check_cleanup () {
"""
Delete the file used to gather all the CVE information.
"""
bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH"))
}
addhandler cve_check_cleanup
cve_check_cleanup[eventmask] = "bb.event.BuildCompleted"
python cve_check_write_rootfs_manifest () {
"""
Create CVE manifest when building an image
"""
import shutil
import json
from oe.rootfs import image_list_installed_packages
from oe.cve_check import cve_check_merge_jsons, update_symlinks
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
if os.path.exists(deploy_file):
bb.utils.remove(deploy_file)
deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
if os.path.exists(deploy_file_json):
bb.utils.remove(deploy_file_json)
# Create a list of relevant recipies
recipies = set()
for pkg in list(image_list_installed_packages(d)):
pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
'runtime-reverse', pkg)
pkg_data = oe.packagedata.read_pkgdatafile(pkg_info)
recipies.add(pkg_data["PN"])
bb.note("Writing rootfs CVE manifest")
deploy_dir = d.getVar("IMGDEPLOYDIR")
link_name = d.getVar("IMAGE_LINK_NAME")
json_data = {"version":"1", "package": []}
text_data = ""
enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1"
enable_text = d.getVar("CVE_CHECK_FORMAT_TEXT") == "1"
save_pn = d.getVar("PN")
for pkg in recipies:
# To be able to use the CVE_CHECK_RECIPE_FILE variable we have to evaluate
# it with the different PN names set each time.
d.setVar("PN", pkg)
if enable_text:
pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE")
if os.path.exists(pkgfilepath):
with open(pkgfilepath) as pfile:
text_data += pfile.read()
if enable_json:
pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
if os.path.exists(pkgfilepath):
with open(pkgfilepath) as j:
data = json.load(j)
cve_check_merge_jsons(json_data, data)
d.setVar("PN", save_pn)
if enable_text:
link_path = os.path.join(deploy_dir, "%s.cve" % link_name)
manifest_name = d.getVar("CVE_CHECK_MANIFEST")
with open(manifest_name, "w") as f:
f.write(text_data)
update_symlinks(manifest_name, link_path)
bb.plain("Image CVE report stored in: %s" % manifest_name)
if enable_json:
link_path = os.path.join(deploy_dir, "%s.json" % link_name)
manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
with open(manifest_name, "w") as f:
json.dump(json_data, f, indent=2)
update_symlinks(manifest_name, link_path)
bb.plain("Image CVE JSON report stored in: %s" % manifest_name)
}
ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
def check_cves(d, patched_cves):
"""
Connect to the NVD database and find unpatched cves.
"""
from oe.cve_check import Version, convert_cve_version
pn = d.getVar("PN")
real_pv = d.getVar("PV")
suffix = d.getVar("CVE_VERSION_SUFFIX")
cves_unpatched = []
cves_ignored = []
cves_status = []
cves_in_recipe = False
# CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
products = d.getVar("CVE_PRODUCT").split()
# If this has been unset then we're not scanning for CVEs here (for example, image recipes)
if not products:
return ([], [], [], [])
pv = d.getVar("CVE_VERSION").split("+git")[0]
# If the recipe has been skipped/ignored we return empty lists
if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split():
bb.note("Recipe has been skipped by cve-check")
return ([], [], [], [])
cve_ignore = d.getVar("CVE_CHECK_IGNORE").split()
import sqlite3
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
conn = sqlite3.connect(db_file, uri=True)
# For each of the known product names (e.g. curl has CPEs using curl and libcurl)...
for product in products:
cves_in_product = False
if ":" in product:
vendor, product = product.split(":", 1)
else:
vendor = "%"
# Find all relevant CVE IDs.
cve_cursor = conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor))
for cverow in cve_cursor:
cve = cverow[0]
if cve in cve_ignore:
bb.note("%s-%s ignores %s" % (product, pv, cve))
cves_ignored.append(cve)
continue
elif cve in patched_cves:
bb.note("%s has been patched" % (cve))
continue
# Write status once only for each product
if not cves_in_product:
cves_status.append([product, True])
cves_in_product = True
cves_in_recipe = True
vulnerable = False
ignored = False
product_cursor = conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor))
for row in product_cursor:
(_, _, _, version_start, operator_start, version_end, operator_end) = row
#bb.debug(2, "Evaluating row " + str(row))
if cve in cve_ignore:
ignored = True
version_start = convert_cve_version(version_start)
version_end = convert_cve_version(version_end)
if (operator_start == '=' and pv == version_start) or version_start == '-':
vulnerable = True
else:
if operator_start:
try:
vulnerable_start = (operator_start == '>=' and Version(pv,suffix) >= Version(version_start,suffix))
vulnerable_start |= (operator_start == '>' and Version(pv,suffix) > Version(version_start,suffix))
except:
bb.warn("%s: Failed to compare %s %s %s for %s" %
(product, pv, operator_start, version_start, cve))
vulnerable_start = False
else:
vulnerable_start = False
if operator_end:
try:
vulnerable_end = (operator_end == '<=' and Version(pv,suffix) <= Version(version_end,suffix) )
vulnerable_end |= (operator_end == '<' and Version(pv,suffix) < Version(version_end,suffix) )
except:
bb.warn("%s: Failed to compare %s %s %s for %s" %
(product, pv, operator_end, version_end, cve))
vulnerable_end = False
else:
vulnerable_end = False
if operator_start and operator_end:
vulnerable = vulnerable_start and vulnerable_end
else:
vulnerable = vulnerable_start or vulnerable_end
if vulnerable:
if ignored:
bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv))
cves_ignored.append(cve)
else:
bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
cves_unpatched.append(cve)
break
product_cursor.close()
if not vulnerable:
bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve))
patched_cves.add(cve)
cve_cursor.close()
if not cves_in_product:
bb.note("No CVE records found for product %s, pn %s" % (product, pn))
cves_status.append([product, False])
conn.close()
if not cves_in_recipe:
bb.note("No CVE records for products in recipe %s" % (pn))
return (list(cves_ignored), list(patched_cves), cves_unpatched, cves_status)
def get_cve_info(d, cves):
"""
Get CVE information from the database.
"""
import sqlite3
cve_data = {}
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
conn = sqlite3.connect(db_file, uri=True)
for cve in cves:
cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,))
for row in cursor:
cve_data[row[0]] = {}
cve_data[row[0]]["summary"] = row[1]
cve_data[row[0]]["scorev2"] = row[2]
cve_data[row[0]]["scorev3"] = row[3]
cve_data[row[0]]["modified"] = row[4]
cve_data[row[0]]["vector"] = row[5]
cursor.close()
conn.close()
return cve_data
def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
"""
Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
CVE manifest if enabled.
"""
cve_file = d.getVar("CVE_CHECK_LOG")
fdir_name = d.getVar("FILE_DIRNAME")
layer = fdir_name.split("/")[-3]
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
if exclude_layers and layer in exclude_layers:
return
if include_layers and layer not in include_layers:
return
# Early exit, the text format does not report packages without CVEs
if not patched+unpatched+ignored:
return
nvd_link = "https://nvd.nist.gov/vuln/detail/"
write_string = ""
unpatched_cves = []
bb.utils.mkdirhier(os.path.dirname(cve_file))
for cve in sorted(cve_data):
is_patched = cve in patched
is_ignored = cve in ignored
if (is_patched or is_ignored) and not report_all:
continue
write_string += "LAYER: %s\n" % layer
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
write_string += "CVE: %s\n" % cve
if is_ignored:
write_string += "CVE STATUS: Ignored\n"
elif is_patched:
write_string += "CVE STATUS: Patched\n"
else:
unpatched_cves.append(cve)
write_string += "CVE STATUS: Unpatched\n"
write_string += "CVE SUMMARY: %s\n" % cve_data[cve]["summary"]
write_string += "CVSS v2 BASE SCORE: %s\n" % cve_data[cve]["scorev2"]
write_string += "CVSS v3 BASE SCORE: %s\n" % cve_data[cve]["scorev3"]
write_string += "VECTOR: %s\n" % cve_data[cve]["vector"]
write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve)
if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
with open(cve_file, "w") as f:
bb.note("Writing file %s with CVE information" % cve_file)
f.write(write_string)
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
bb.utils.mkdirhier(os.path.dirname(deploy_file))
with open(deploy_file, "w") as f:
f.write(write_string)
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
bb.utils.mkdirhier(cvelogpath)
with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
f.write("%s" % write_string)
def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file):
"""
Write CVE information in the JSON format: to WORKDIR; and to
CVE_CHECK_DIR, if CVE manifest if enabled, write fragment
files that will be assembled at the end in cve_check_write_rootfs_manifest.
"""
import json
write_string = json.dumps(output, indent=2)
with open(direct_file, "w") as f:
bb.note("Writing file %s with CVE information" % direct_file)
f.write(write_string)
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
bb.utils.mkdirhier(os.path.dirname(deploy_file))
with open(deploy_file, "w") as f:
f.write(write_string)
if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
bb.utils.mkdirhier(cvelogpath)
fragment_file = os.path.basename(deploy_file)
fragment_path = os.path.join(cvelogpath, fragment_file)
with open(fragment_path, "w") as f:
f.write(write_string)
with open(index_path, "a+") as f:
f.write("%s\n" % fragment_path)
def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
"""
Prepare CVE data for the JSON format, then write it.
"""
output = {"version":"1", "package": []}
nvd_link = "https://nvd.nist.gov/vuln/detail/"
fdir_name = d.getVar("FILE_DIRNAME")
layer = fdir_name.split("/")[-3]
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
if exclude_layers and layer in exclude_layers:
return
if include_layers and layer not in include_layers:
return
unpatched_cves = []
product_data = []
for s in cve_status:
p = {"product": s[0], "cvesInRecord": "Yes"}
if s[1] == False:
p["cvesInRecord"] = "No"
product_data.append(p)
package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV"))
package_data = {
"name" : d.getVar("PN"),
"layer" : layer,
"version" : package_version,
"products": product_data
}
cve_list = []
for cve in sorted(cve_data):
is_patched = cve in patched
is_ignored = cve in ignored
status = "Unpatched"
if (is_patched or is_ignored) and not report_all:
continue
if is_ignored:
status = "Ignored"
elif is_patched:
status = "Patched"
else:
# default value of status is Unpatched
unpatched_cves.append(cve)
issue_link = "%s%s" % (nvd_link, cve)
cve_item = {
"id" : cve,
"summary" : cve_data[cve]["summary"],
"scorev2" : cve_data[cve]["scorev2"],
"scorev3" : cve_data[cve]["scorev3"],
"vector" : cve_data[cve]["vector"],
"status" : status,
"link": issue_link
}
cve_list.append(cve_item)
package_data["issue"] = cve_list
output["package"].append(package_data)
direct_file = d.getVar("CVE_CHECK_LOG_JSON")
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
manifest_file = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON")
cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file)
def cve_write_data(d, patched, unpatched, ignored, cve_data, status):
"""
Write CVE data in each enabled format.
"""
if d.getVar("CVE_CHECK_FORMAT_TEXT") == "1":
cve_write_data_text(d, patched, unpatched, ignored, cve_data)
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
cve_write_data_json(d, patched, unpatched, ignored, cve_data, status)