| # |
| # Copyright OpenEmbedded Contributors |
| # |
| # SPDX-License-Identifier: MIT |
| # |
| |
| # This class is used to check recipes against public CVEs. |
| # |
| # In order to use this class just inherit the class in the |
| # local.conf file and it will add the cve_check task for |
| # every recipe. The task can be used per recipe, per image, |
| # or using the special cases "world" and "universe". The |
| # cve_check task will print a warning for every unpatched |
| # CVE found and generate a file in the recipe WORKDIR/cve |
| # directory. If an image is build it will generate a report |
| # in DEPLOY_DIR_IMAGE for all the packages used. |
| # |
| # Example: |
| # bitbake -c cve_check openssl |
| # bitbake core-image-sato |
| # bitbake -k -c cve_check universe |
| # |
| # DISCLAIMER |
| # |
| # This class/tool is meant to be used as support and not |
| # the only method to check against CVEs. Running this tool |
| # doesn't guarantee your packages are free of CVEs. |
| |
| # The product name that the CVE database uses defaults to BPN, but may need to |
| # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). |
| CVE_PRODUCT ??= "${BPN}" |
| CVE_VERSION ??= "${PV}" |
| |
| CVE_CHECK_DB_FILENAME ?= "nvdcve_2-2.db" |
| CVE_CHECK_DB_DIR ?= "${STAGING_DIR}/CVE_CHECK" |
| CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/${CVE_CHECK_DB_FILENAME}" |
| CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock" |
| |
| CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve" |
| CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary" |
| CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json" |
| CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt" |
| |
| CVE_CHECK_LOG_JSON ?= "${T}/cve.json" |
| |
| CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve" |
| CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json" |
| CVE_CHECK_MANIFEST_JSON_SUFFIX ?= "json" |
| CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.${CVE_CHECK_MANIFEST_JSON_SUFFIX}" |
| CVE_CHECK_COPY_FILES ??= "1" |
| CVE_CHECK_CREATE_MANIFEST ??= "1" |
| |
| # Report Patched or Ignored CVEs |
| CVE_CHECK_REPORT_PATCHED ??= "1" |
| |
| CVE_CHECK_SHOW_WARNINGS ??= "1" |
| |
| # Provide JSON output |
| CVE_CHECK_FORMAT_JSON ??= "1" |
| |
| # Check for packages without CVEs (no issues or missing product name) |
| CVE_CHECK_COVERAGE ??= "1" |
| |
| # Skip CVE Check for packages (PN) |
| CVE_CHECK_SKIP_RECIPE ?= "" |
| |
| # Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned |
| # separately with optional detail and description for this status. |
| # |
| # CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows" |
| # CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally" |
| # |
| # Settings the same status and reason for multiple CVEs is possible |
| # via CVE_STATUS_GROUPS variable. |
| # |
| # CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED" |
| # |
| # CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003" |
| # CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows" |
| # CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004" |
| # CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally" |
| # |
| # All possible CVE statuses could be found in cve-check-map.conf |
| # CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored" |
| # CVE_CHECK_STATUSMAP[fixed-version] = "Patched" |
| # |
| # CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead. |
| # Keep CVE_CHECK_IGNORE until other layers migrate to new variables |
| CVE_CHECK_IGNORE ?= "" |
| |
| # Layers to be excluded |
| CVE_CHECK_LAYER_EXCLUDELIST ??= "" |
| |
| # Layers to be included |
| CVE_CHECK_LAYER_INCLUDELIST ??= "" |
| |
| |
| # set to "alphabetical" for version using single alphabetical character as increment release |
| CVE_VERSION_SUFFIX ??= "" |
| |
| python () { |
| # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS |
| cve_check_ignore = d.getVar("CVE_CHECK_IGNORE") |
| if cve_check_ignore: |
| bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS") |
| for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split(): |
| d.setVarFlag("CVE_STATUS", cve, "ignored") |
| |
| # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once |
| for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split(): |
| cve_group = d.getVar(cve_status_group) |
| if cve_group is not None: |
| for cve in cve_group.split(): |
| d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status")) |
| else: |
| bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group) |
| } |
| |
| def generate_json_report(d, out_path, link_path): |
| if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")): |
| import json |
| from oe.cve_check import cve_check_merge_jsons, update_symlinks |
| |
| bb.note("Generating JSON CVE summary") |
| index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH") |
| summary = {"version":"1", "package": []} |
| with open(index_file) as f: |
| filename = f.readline() |
| while filename: |
| with open(filename.rstrip()) as j: |
| data = json.load(j) |
| cve_check_merge_jsons(summary, data) |
| filename = f.readline() |
| |
| summary["package"].sort(key=lambda d: d['name']) |
| |
| with open(out_path, "w") as f: |
| json.dump(summary, f, indent=2) |
| |
| update_symlinks(out_path, link_path) |
| |
| python cve_save_summary_handler () { |
| import shutil |
| import datetime |
| from oe.cve_check import update_symlinks |
| |
| cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME") |
| cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") |
| bb.utils.mkdirhier(cvelogpath) |
| |
| timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') |
| |
| if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": |
| json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON")) |
| json_summary_name = os.path.join(cvelogpath, "%s-%s.json" % (cve_summary_name, timestamp)) |
| generate_json_report(d, json_summary_name, json_summary_link_name) |
| bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name) |
| } |
| |
| addhandler cve_save_summary_handler |
| cve_save_summary_handler[eventmask] = "bb.event.BuildCompleted" |
| |
| python do_cve_check () { |
| """ |
| Check recipe for patched and unpatched CVEs |
| """ |
| from oe.cve_check import get_patched_cves |
| |
| with bb.utils.fileslocked([d.getVar("CVE_CHECK_DB_FILE_LOCK")], shared=True): |
| if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")): |
| try: |
| patched_cves = get_patched_cves(d) |
| except FileNotFoundError: |
| bb.fatal("Failure in searching patches") |
| cve_data, status = check_cves(d, patched_cves) |
| if len(cve_data) or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status): |
| get_cve_info(d, cve_data) |
| cve_write_data(d, cve_data, status) |
| else: |
| bb.note("No CVE database found, skipping CVE check") |
| |
| } |
| |
| addtask cve_check before do_build |
| do_cve_check[depends] = "cve-update-nvd2-native:do_unpack" |
| do_cve_check[nostamp] = "1" |
| |
| python cve_check_cleanup () { |
| """ |
| Delete the file used to gather all the CVE information. |
| """ |
| bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")) |
| } |
| |
| addhandler cve_check_cleanup |
| cve_check_cleanup[eventmask] = "bb.event.BuildCompleted" |
| |
| python cve_check_write_rootfs_manifest () { |
| """ |
| Create CVE manifest when building an image |
| """ |
| |
| import shutil |
| import json |
| from oe.rootfs import image_list_installed_packages |
| from oe.cve_check import cve_check_merge_jsons, update_symlinks |
| |
| if d.getVar("CVE_CHECK_COPY_FILES") == "1": |
| deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") |
| if os.path.exists(deploy_file_json): |
| bb.utils.remove(deploy_file_json) |
| |
| # Create a list of relevant recipies |
| recipies = set() |
| for pkg in list(image_list_installed_packages(d)): |
| pkg_info = os.path.join(d.getVar('PKGDATA_DIR'), |
| 'runtime-reverse', pkg) |
| pkg_data = oe.packagedata.read_pkgdatafile(pkg_info) |
| recipies.add(pkg_data["PN"]) |
| |
| bb.note("Writing rootfs CVE manifest") |
| deploy_dir = d.getVar("IMGDEPLOYDIR") |
| link_name = d.getVar("IMAGE_LINK_NAME") |
| |
| json_data = {"version":"1", "package": []} |
| text_data = "" |
| enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1" |
| |
| save_pn = d.getVar("PN") |
| |
| for pkg in recipies: |
| # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate |
| # it with the different PN names set each time. |
| d.setVar("PN", pkg) |
| |
| if enable_json: |
| pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") |
| if os.path.exists(pkgfilepath): |
| with open(pkgfilepath) as j: |
| data = json.load(j) |
| cve_check_merge_jsons(json_data, data) |
| |
| d.setVar("PN", save_pn) |
| |
| if enable_json: |
| manifest_name_suffix = d.getVar("CVE_CHECK_MANIFEST_JSON_SUFFIX") |
| link_path = os.path.join(deploy_dir, "%s.%s" % (link_name, manifest_name_suffix)) |
| manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON") |
| |
| with open(manifest_name, "w") as f: |
| json.dump(json_data, f, indent=2) |
| |
| update_symlinks(manifest_name, link_path) |
| bb.plain("Image CVE JSON report stored in: %s" % manifest_name) |
| } |
| |
| ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
| do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
| do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
| |
| def cve_is_ignored(d, cve_data, cve): |
| if cve not in cve_data: |
| return False |
| if cve_data[cve]['abbrev-status'] == "Ignored": |
| return True |
| return False |
| |
| def cve_is_patched(d, cve_data, cve): |
| if cve not in cve_data: |
| return False |
| if cve_data[cve]['abbrev-status'] == "Patched": |
| return True |
| return False |
| |
| def cve_update(d, cve_data, cve, entry): |
| # If no entry, just add it |
| if cve not in cve_data: |
| cve_data[cve] = entry |
| return |
| # If we are updating, there might be change in the status |
| bb.debug("Trying CVE entry update for %s from %s to %s" % (cve, cve_data[cve]['abbrev-status'], entry['abbrev-status'])) |
| if cve_data[cve]['abbrev-status'] == "Unknown": |
| cve_data[cve] = entry |
| return |
| if cve_data[cve]['abbrev-status'] == entry['abbrev-status']: |
| return |
| # Update like in {'abbrev-status': 'Patched', 'status': 'version-not-in-range'} to {'abbrev-status': 'Unpatched', 'status': 'version-in-range'} |
| if entry['abbrev-status'] == "Unpatched" and cve_data[cve]['abbrev-status'] == "Patched": |
| if entry['status'] == "version-in-range" and cve_data[cve]['status'] == "version-not-in-range": |
| # New result from the scan, vulnerable |
| cve_data[cve] = entry |
| bb.debug("CVE entry %s update from Patched to Unpatched from the scan result" % cve) |
| return |
| if entry['abbrev-status'] == "Patched" and cve_data[cve]['abbrev-status'] == "Unpatched": |
| if entry['status'] == "version-not-in-range" and cve_data[cve]['status'] == "version-in-range": |
| # Range does not match the scan, but we already have a vulnerable match, ignore |
| bb.debug("CVE entry %s update from Patched to Unpatched from the scan result - not applying" % cve) |
| return |
| # If we have an "Ignored", it has a priority |
| if cve_data[cve]['abbrev-status'] == "Ignored": |
| bb.debug("CVE %s not updating because Ignored" % cve) |
| return |
| bb.warn("Unhandled CVE entry update for %s from %s to %s" % (cve, cve_data[cve], entry)) |
| |
| def check_cves(d, cve_data): |
| """ |
| Connect to the NVD database and find unpatched cves. |
| """ |
| from oe.cve_check import Version, convert_cve_version, decode_cve_status |
| |
| pn = d.getVar("PN") |
| real_pv = d.getVar("PV") |
| suffix = d.getVar("CVE_VERSION_SUFFIX") |
| |
| cves_status = [] |
| cves_in_recipe = False |
| # CVE_PRODUCT can contain more than one product (eg. curl/libcurl) |
| products = d.getVar("CVE_PRODUCT").split() |
| # If this has been unset then we're not scanning for CVEs here (for example, image recipes) |
| if not products: |
| return ([], []) |
| pv = d.getVar("CVE_VERSION").split("+git")[0] |
| |
| # If the recipe has been skipped/ignored we return empty lists |
| if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split(): |
| bb.note("Recipe has been skipped by cve-check") |
| return ([], []) |
| |
| import sqlite3 |
| db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") |
| conn = sqlite3.connect(db_file, uri=True) |
| |
| # For each of the known product names (e.g. curl has CPEs using curl and libcurl)... |
| for product in products: |
| cves_in_product = False |
| if ":" in product: |
| vendor, product = product.split(":", 1) |
| else: |
| vendor = "%" |
| |
| # Find all relevant CVE IDs. |
| cve_cursor = conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)) |
| for cverow in cve_cursor: |
| cve = cverow[0] |
| |
| # Write status once only for each product |
| if not cves_in_product: |
| cves_status.append([product, True]) |
| cves_in_product = True |
| cves_in_recipe = True |
| |
| if cve_is_ignored(d, cve_data, cve): |
| bb.note("%s-%s ignores %s" % (product, pv, cve)) |
| continue |
| elif cve_is_patched(d, cve_data, cve): |
| bb.note("%s has been patched" % (cve)) |
| continue |
| |
| vulnerable = False |
| ignored = False |
| |
| product_cursor = conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)) |
| for row in product_cursor: |
| (_, _, _, version_start, operator_start, version_end, operator_end) = row |
| #bb.debug(2, "Evaluating row " + str(row)) |
| if cve_is_ignored(d, cve_data, cve): |
| ignored = True |
| |
| version_start = convert_cve_version(version_start) |
| version_end = convert_cve_version(version_end) |
| |
| if (operator_start == '=' and pv == version_start) or version_start == '-': |
| vulnerable = True |
| else: |
| if operator_start: |
| try: |
| vulnerable_start = (operator_start == '>=' and Version(pv,suffix) >= Version(version_start,suffix)) |
| vulnerable_start |= (operator_start == '>' and Version(pv,suffix) > Version(version_start,suffix)) |
| except: |
| bb.warn("%s: Failed to compare %s %s %s for %s" % |
| (product, pv, operator_start, version_start, cve)) |
| vulnerable_start = False |
| else: |
| vulnerable_start = False |
| |
| if operator_end: |
| try: |
| vulnerable_end = (operator_end == '<=' and Version(pv,suffix) <= Version(version_end,suffix) ) |
| vulnerable_end |= (operator_end == '<' and Version(pv,suffix) < Version(version_end,suffix) ) |
| except: |
| bb.warn("%s: Failed to compare %s %s %s for %s" % |
| (product, pv, operator_end, version_end, cve)) |
| vulnerable_end = False |
| else: |
| vulnerable_end = False |
| |
| if operator_start and operator_end: |
| vulnerable = vulnerable_start and vulnerable_end |
| else: |
| vulnerable = vulnerable_start or vulnerable_end |
| |
| if vulnerable: |
| if ignored: |
| bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv)) |
| cve_update(d, cve_data, cve, {"abbrev-status": "Ignored"}) |
| else: |
| bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve)) |
| cve_update(d, cve_data, cve, {"abbrev-status": "Unpatched", "status": "version-in-range"}) |
| break |
| product_cursor.close() |
| |
| if not vulnerable: |
| bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve)) |
| cve_update(d, cve_data, cve, {"abbrev-status": "Patched", "status": "version-not-in-range"}) |
| cve_cursor.close() |
| |
| if not cves_in_product: |
| bb.note("No CVE records found for product %s, pn %s" % (product, pn)) |
| cves_status.append([product, False]) |
| |
| conn.close() |
| |
| if not cves_in_recipe: |
| bb.note("No CVE records for products in recipe %s" % (pn)) |
| |
| if d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1": |
| unpatched_cves = [cve for cve in cve_data if cve_data[cve]["abbrev-status"] == "Unpatched"] |
| if unpatched_cves: |
| bb.warn("Found unpatched CVE (%s)" % " ".join(unpatched_cves)) |
| |
| return (cve_data, cves_status) |
| |
| def get_cve_info(d, cve_data): |
| """ |
| Get CVE information from the database. |
| """ |
| |
| import sqlite3 |
| |
| db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") |
| conn = sqlite3.connect(db_file, uri=True) |
| |
| for cve in cve_data: |
| cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)) |
| for row in cursor: |
| # The CVE itdelf has been added already |
| if row[0] not in cve_data: |
| bb.note("CVE record %s not present" % row[0]) |
| continue |
| #cve_data[row[0]] = {} |
| cve_data[row[0]]["NVD-summary"] = row[1] |
| cve_data[row[0]]["NVD-scorev2"] = row[2] |
| cve_data[row[0]]["NVD-scorev3"] = row[3] |
| cve_data[row[0]]["NVD-scorev4"] = row[4] |
| cve_data[row[0]]["NVD-modified"] = row[5] |
| cve_data[row[0]]["NVD-vector"] = row[6] |
| cve_data[row[0]]["NVD-vectorString"] = row[7] |
| cursor.close() |
| conn.close() |
| |
| def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file): |
| """ |
| Write CVE information in the JSON format: to WORKDIR; and to |
| CVE_CHECK_DIR, if CVE manifest if enabled, write fragment |
| files that will be assembled at the end in cve_check_write_rootfs_manifest. |
| """ |
| |
| import json |
| |
| write_string = json.dumps(output, indent=2) |
| with open(direct_file, "w") as f: |
| bb.note("Writing file %s with CVE information" % direct_file) |
| f.write(write_string) |
| |
| if d.getVar("CVE_CHECK_COPY_FILES") == "1": |
| bb.utils.mkdirhier(os.path.dirname(deploy_file)) |
| with open(deploy_file, "w") as f: |
| f.write(write_string) |
| |
| if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1": |
| cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") |
| index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH") |
| bb.utils.mkdirhier(cvelogpath) |
| fragment_file = os.path.basename(deploy_file) |
| fragment_path = os.path.join(cvelogpath, fragment_file) |
| with open(fragment_path, "w") as f: |
| f.write(write_string) |
| with open(index_path, "a+") as f: |
| f.write("%s\n" % fragment_path) |
| |
| def cve_write_data_json(d, cve_data, cve_status): |
| """ |
| Prepare CVE data for the JSON format, then write it. |
| """ |
| |
| output = {"version":"1", "package": []} |
| nvd_link = "https://nvd.nist.gov/vuln/detail/" |
| |
| fdir_name = d.getVar("FILE_DIRNAME") |
| layer = fdir_name.split("/")[-3] |
| |
| include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split() |
| exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split() |
| |
| report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1" |
| |
| if exclude_layers and layer in exclude_layers: |
| return |
| |
| if include_layers and layer not in include_layers: |
| return |
| |
| product_data = [] |
| for s in cve_status: |
| p = {"product": s[0], "cvesInRecord": "Yes"} |
| if s[1] == False: |
| p["cvesInRecord"] = "No" |
| product_data.append(p) |
| |
| package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV")) |
| package_data = { |
| "name" : d.getVar("PN"), |
| "layer" : layer, |
| "version" : package_version, |
| "products": product_data |
| } |
| |
| cve_list = [] |
| |
| for cve in sorted(cve_data): |
| if not report_all and (cve_data[cve]["abbrev-status"] == "Patched" or cve_data[cve]["abbrev-status"] == "Ignored"): |
| continue |
| issue_link = "%s%s" % (nvd_link, cve) |
| |
| cve_item = { |
| "id" : cve, |
| "status" : cve_data[cve]["abbrev-status"], |
| "link": issue_link, |
| } |
| if 'NVD-summary' in cve_data[cve]: |
| cve_item["summary"] = cve_data[cve]["NVD-summary"] |
| cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"] |
| cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"] |
| cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"] |
| cve_item["modified"] = cve_data[cve]["NVD-modified"] |
| cve_item["vector"] = cve_data[cve]["NVD-vector"] |
| cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"] |
| if 'status' in cve_data[cve]: |
| cve_item["detail"] = cve_data[cve]["status"] |
| if 'justification' in cve_data[cve]: |
| cve_item["description"] = cve_data[cve]["justification"] |
| if 'resource' in cve_data[cve]: |
| cve_item["patch-file"] = cve_data[cve]["resource"] |
| cve_list.append(cve_item) |
| |
| package_data["issue"] = cve_list |
| output["package"].append(package_data) |
| |
| direct_file = d.getVar("CVE_CHECK_LOG_JSON") |
| deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") |
| manifest_file = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON") |
| |
| cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file) |
| |
| def cve_write_data(d, cve_data, status): |
| """ |
| Write CVE data in each enabled format. |
| """ |
| |
| if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": |
| cve_write_data_json(d, cve_data, status) |