diff options
Diffstat (limited to 'meta/classes/cve-check.bbclass')
| -rw-r--r-- | meta/classes/cve-check.bbclass | 570 |
1 files changed, 0 insertions, 570 deletions
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass deleted file mode 100644 index c63ebd56e1..0000000000 --- a/meta/classes/cve-check.bbclass +++ /dev/null | |||
| @@ -1,570 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | # This class is used to check recipes against public CVEs. | ||
| 8 | # | ||
| 9 | # In order to use this class just inherit the class in the | ||
| 10 | # local.conf file and it will add the cve_check task for | ||
| 11 | # every recipe. The task can be used per recipe, per image, | ||
| 12 | # or using the special cases "world" and "universe". The | ||
| 13 | # cve_check task will print a warning for every unpatched | ||
| 14 | # CVE found and generate a file in the recipe WORKDIR/cve | ||
| 15 | # directory. If an image is build it will generate a report | ||
| 16 | # in DEPLOY_DIR_IMAGE for all the packages used. | ||
| 17 | # | ||
| 18 | # Example: | ||
| 19 | # bitbake -c cve_check openssl | ||
| 20 | # bitbake core-image-sato | ||
| 21 | # bitbake -k -c cve_check universe | ||
| 22 | # | ||
| 23 | # DISCLAIMER | ||
| 24 | # | ||
| 25 | # This class/tool is meant to be used as support and not | ||
| 26 | # the only method to check against CVEs. Running this tool | ||
| 27 | # doesn't guarantee your packages are free of CVEs. | ||
| 28 | |||
| 29 | # The product name that the CVE database uses defaults to BPN, but may need to | ||
| 30 | # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). | ||
| 31 | CVE_PRODUCT ??= "${BPN}" | ||
| 32 | CVE_VERSION ??= "${PV}" | ||
| 33 | |||
| 34 | # Possible database sources: NVD1, NVD2, FKIE | ||
| 35 | NVD_DB_VERSION ?= "FKIE" | ||
| 36 | |||
| 37 | # Use different file names for each database source, as they synchronize at different moments, so may be slightly different | ||
| 38 | CVE_CHECK_DB_FILENAME ?= "${@'nvdcve_2-2.db' if d.getVar('NVD_DB_VERSION') == 'NVD2' else 'nvdcve_1-3.db' if d.getVar('NVD_DB_VERSION') == 'NVD1' else 'nvdfkie_1-1.db'}" | ||
| 39 | CVE_CHECK_DB_FETCHER ?= "${@'cve-update-nvd2-native' if d.getVar('NVD_DB_VERSION') == 'NVD2' else 'cve-update-db-native'}" | ||
| 40 | CVE_CHECK_DB_DIR ?= "${STAGING_DIR}/CVE_CHECK" | ||
| 41 | CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/${CVE_CHECK_DB_FILENAME}" | ||
| 42 | CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock" | ||
| 43 | |||
| 44 | CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve" | ||
| 45 | CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary" | ||
| 46 | CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json" | ||
| 47 | CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt" | ||
| 48 | |||
| 49 | CVE_CHECK_LOG_JSON ?= "${T}/cve.json" | ||
| 50 | |||
| 51 | CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve" | ||
| 52 | CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json" | ||
| 53 | CVE_CHECK_MANIFEST_JSON_SUFFIX ?= "json" | ||
| 54 | CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.${CVE_CHECK_MANIFEST_JSON_SUFFIX}" | ||
| 55 | CVE_CHECK_COPY_FILES ??= "1" | ||
| 56 | CVE_CHECK_CREATE_MANIFEST ??= "1" | ||
| 57 | |||
| 58 | # Report Patched or Ignored CVEs | ||
| 59 | CVE_CHECK_REPORT_PATCHED ??= "1" | ||
| 60 | |||
| 61 | CVE_CHECK_SHOW_WARNINGS ??= "1" | ||
| 62 | |||
| 63 | # Provide JSON output | ||
| 64 | CVE_CHECK_FORMAT_JSON ??= "1" | ||
| 65 | |||
| 66 | # Check for packages without CVEs (no issues or missing product name) | ||
| 67 | CVE_CHECK_COVERAGE ??= "1" | ||
| 68 | |||
| 69 | # Skip CVE Check for packages (PN) | ||
| 70 | CVE_CHECK_SKIP_RECIPE ?= "" | ||
| 71 | |||
| 72 | # Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned | ||
| 73 | # separately with optional detail and description for this status. | ||
| 74 | # | ||
| 75 | # CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows" | ||
| 76 | # CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally" | ||
| 77 | # | ||
| 78 | # Settings the same status and reason for multiple CVEs is possible | ||
| 79 | # via CVE_STATUS_GROUPS variable. | ||
| 80 | # | ||
| 81 | # CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED" | ||
| 82 | # | ||
| 83 | # CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003" | ||
| 84 | # CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows" | ||
| 85 | # CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004" | ||
| 86 | # CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally" | ||
| 87 | # | ||
| 88 | # All possible CVE statuses could be found in cve-check-map.conf | ||
| 89 | # CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored" | ||
| 90 | # CVE_CHECK_STATUSMAP[fixed-version] = "Patched" | ||
| 91 | # | ||
| 92 | # CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead. | ||
| 93 | # Keep CVE_CHECK_IGNORE until other layers migrate to new variables | ||
| 94 | CVE_CHECK_IGNORE ?= "" | ||
| 95 | |||
| 96 | # Layers to be excluded | ||
| 97 | CVE_CHECK_LAYER_EXCLUDELIST ??= "" | ||
| 98 | |||
| 99 | # Layers to be included | ||
| 100 | CVE_CHECK_LAYER_INCLUDELIST ??= "" | ||
| 101 | |||
| 102 | |||
| 103 | # set to "alphabetical" for version using single alphabetical character as increment release | ||
| 104 | CVE_VERSION_SUFFIX ??= "" | ||
| 105 | |||
| 106 | python () { | ||
| 107 | from oe.cve_check import extend_cve_status | ||
| 108 | extend_cve_status(d) | ||
| 109 | |||
| 110 | nvd_database_type = d.getVar("NVD_DB_VERSION") | ||
| 111 | if nvd_database_type not in ("NVD1", "NVD2", "FKIE"): | ||
| 112 | bb.erroronce("Malformed NVD_DB_VERSION, must be one of: NVD1, NVD2, FKIE. Defaulting to NVD2") | ||
| 113 | d.setVar("NVD_DB_VERSION", "NVD2") | ||
| 114 | } | ||
| 115 | |||
| 116 | def generate_json_report(d, out_path, link_path): | ||
| 117 | if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")): | ||
| 118 | import json | ||
| 119 | from oe.cve_check import cve_check_merge_jsons, update_symlinks | ||
| 120 | |||
| 121 | bb.note("Generating JSON CVE summary") | ||
| 122 | index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH") | ||
| 123 | summary = {"version":"1", "package": []} | ||
| 124 | with open(index_file) as f: | ||
| 125 | filename = f.readline() | ||
| 126 | while filename: | ||
| 127 | with open(filename.rstrip()) as j: | ||
| 128 | data = json.load(j) | ||
| 129 | cve_check_merge_jsons(summary, data) | ||
| 130 | filename = f.readline() | ||
| 131 | |||
| 132 | summary["package"].sort(key=lambda d: d['name']) | ||
| 133 | |||
| 134 | with open(out_path, "w") as f: | ||
| 135 | json.dump(summary, f, indent=2) | ||
| 136 | |||
| 137 | update_symlinks(out_path, link_path) | ||
| 138 | |||
| 139 | python cve_save_summary_handler () { | ||
| 140 | import shutil | ||
| 141 | import datetime | ||
| 142 | from oe.cve_check import update_symlinks | ||
| 143 | |||
| 144 | cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME") | ||
| 145 | cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") | ||
| 146 | bb.utils.mkdirhier(cvelogpath) | ||
| 147 | |||
| 148 | timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') | ||
| 149 | |||
| 150 | if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": | ||
| 151 | json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON")) | ||
| 152 | json_summary_name = os.path.join(cvelogpath, "%s-%s.json" % (cve_summary_name, timestamp)) | ||
| 153 | generate_json_report(d, json_summary_name, json_summary_link_name) | ||
| 154 | bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name) | ||
| 155 | } | ||
| 156 | |||
| 157 | addhandler cve_save_summary_handler | ||
| 158 | cve_save_summary_handler[eventmask] = "bb.event.BuildCompleted" | ||
| 159 | |||
| 160 | python do_cve_check () { | ||
| 161 | """ | ||
| 162 | Check recipe for patched and unpatched CVEs | ||
| 163 | """ | ||
| 164 | from oe.cve_check import get_patched_cves | ||
| 165 | |||
| 166 | with bb.utils.fileslocked([d.getVar("CVE_CHECK_DB_FILE_LOCK")], shared=True): | ||
| 167 | if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")): | ||
| 168 | try: | ||
| 169 | patched_cves = get_patched_cves(d) | ||
| 170 | except FileNotFoundError: | ||
| 171 | bb.fatal("Failure in searching patches") | ||
| 172 | cve_data, status = check_cves(d, patched_cves) | ||
| 173 | if len(cve_data) or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status): | ||
| 174 | get_cve_info(d, cve_data) | ||
| 175 | cve_write_data(d, cve_data, status) | ||
| 176 | else: | ||
| 177 | bb.note("No CVE database found, skipping CVE check") | ||
| 178 | |||
| 179 | } | ||
| 180 | |||
| 181 | addtask cve_check before do_build | ||
| 182 | do_cve_check[depends] = "${CVE_CHECK_DB_FETCHER}:do_unpack" | ||
| 183 | do_cve_check[nostamp] = "1" | ||
| 184 | |||
| 185 | python cve_check_cleanup () { | ||
| 186 | """ | ||
| 187 | Delete the file used to gather all the CVE information. | ||
| 188 | """ | ||
| 189 | bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")) | ||
| 190 | } | ||
| 191 | |||
| 192 | addhandler cve_check_cleanup | ||
| 193 | cve_check_cleanup[eventmask] = "bb.event.BuildCompleted" | ||
| 194 | |||
| 195 | python cve_check_write_rootfs_manifest () { | ||
| 196 | """ | ||
| 197 | Create CVE manifest when building an image | ||
| 198 | """ | ||
| 199 | |||
| 200 | import shutil | ||
| 201 | import json | ||
| 202 | from oe.rootfs import image_list_installed_packages | ||
| 203 | from oe.cve_check import cve_check_merge_jsons, update_symlinks | ||
| 204 | |||
| 205 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": | ||
| 206 | deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") | ||
| 207 | if os.path.exists(deploy_file_json): | ||
| 208 | bb.utils.remove(deploy_file_json) | ||
| 209 | |||
| 210 | # Create a list of relevant recipies | ||
| 211 | recipies = set() | ||
| 212 | for pkg in list(image_list_installed_packages(d)): | ||
| 213 | pkg_info = os.path.join(d.getVar('PKGDATA_DIR'), | ||
| 214 | 'runtime-reverse', pkg) | ||
| 215 | pkg_data = oe.packagedata.read_pkgdatafile(pkg_info) | ||
| 216 | recipies.add(pkg_data["PN"]) | ||
| 217 | |||
| 218 | bb.note("Writing rootfs CVE manifest") | ||
| 219 | deploy_dir = d.getVar("IMGDEPLOYDIR") | ||
| 220 | link_name = d.getVar("IMAGE_LINK_NAME") | ||
| 221 | |||
| 222 | json_data = {"version":"1", "package": []} | ||
| 223 | text_data = "" | ||
| 224 | enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1" | ||
| 225 | |||
| 226 | save_pn = d.getVar("PN") | ||
| 227 | |||
| 228 | for pkg in recipies: | ||
| 229 | # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate | ||
| 230 | # it with the different PN names set each time. | ||
| 231 | d.setVar("PN", pkg) | ||
| 232 | |||
| 233 | if enable_json: | ||
| 234 | pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") | ||
| 235 | if os.path.exists(pkgfilepath): | ||
| 236 | with open(pkgfilepath) as j: | ||
| 237 | data = json.load(j) | ||
| 238 | cve_check_merge_jsons(json_data, data) | ||
| 239 | |||
| 240 | d.setVar("PN", save_pn) | ||
| 241 | |||
| 242 | if enable_json: | ||
| 243 | manifest_name_suffix = d.getVar("CVE_CHECK_MANIFEST_JSON_SUFFIX") | ||
| 244 | manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON") | ||
| 245 | |||
| 246 | with open(manifest_name, "w") as f: | ||
| 247 | json.dump(json_data, f, indent=2) | ||
| 248 | |||
| 249 | if link_name: | ||
| 250 | link_path = os.path.join(deploy_dir, "%s.%s" % (link_name, manifest_name_suffix)) | ||
| 251 | update_symlinks(manifest_name, link_path) | ||
| 252 | |||
| 253 | bb.plain("Image CVE JSON report stored in: %s" % manifest_name) | ||
| 254 | } | ||
| 255 | |||
| 256 | ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | ||
| 257 | do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | ||
| 258 | do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | ||
| 259 | |||
| 260 | def cve_is_ignored(d, cve_data, cve): | ||
| 261 | if cve not in cve_data: | ||
| 262 | return False | ||
| 263 | if cve_data[cve]['abbrev-status'] == "Ignored": | ||
| 264 | return True | ||
| 265 | return False | ||
| 266 | |||
| 267 | def cve_is_patched(d, cve_data, cve): | ||
| 268 | if cve not in cve_data: | ||
| 269 | return False | ||
| 270 | if cve_data[cve]['abbrev-status'] == "Patched": | ||
| 271 | return True | ||
| 272 | return False | ||
| 273 | |||
| 274 | def cve_update(d, cve_data, cve, entry): | ||
| 275 | # If no entry, just add it | ||
| 276 | if cve not in cve_data: | ||
| 277 | cve_data[cve] = entry | ||
| 278 | return | ||
| 279 | # If we are updating, there might be change in the status | ||
| 280 | bb.debug(1, "Trying CVE entry update for %s from %s to %s" % (cve, cve_data[cve]['abbrev-status'], entry['abbrev-status'])) | ||
| 281 | if cve_data[cve]['abbrev-status'] == "Unknown": | ||
| 282 | cve_data[cve] = entry | ||
| 283 | return | ||
| 284 | if cve_data[cve]['abbrev-status'] == entry['abbrev-status']: | ||
| 285 | return | ||
| 286 | # Update like in {'abbrev-status': 'Patched', 'status': 'version-not-in-range'} to {'abbrev-status': 'Unpatched', 'status': 'version-in-range'} | ||
| 287 | if entry['abbrev-status'] == "Unpatched" and cve_data[cve]['abbrev-status'] == "Patched": | ||
| 288 | if entry['status'] == "version-in-range" and cve_data[cve]['status'] == "version-not-in-range": | ||
| 289 | # New result from the scan, vulnerable | ||
| 290 | cve_data[cve] = entry | ||
| 291 | bb.debug(1, "CVE entry %s update from Patched to Unpatched from the scan result" % cve) | ||
| 292 | return | ||
| 293 | if entry['abbrev-status'] == "Patched" and cve_data[cve]['abbrev-status'] == "Unpatched": | ||
| 294 | if entry['status'] == "version-not-in-range" and cve_data[cve]['status'] == "version-in-range": | ||
| 295 | # Range does not match the scan, but we already have a vulnerable match, ignore | ||
| 296 | bb.debug(1, "CVE entry %s update from Patched to Unpatched from the scan result - not applying" % cve) | ||
| 297 | return | ||
| 298 | # If we have an "Ignored", it has a priority | ||
| 299 | if cve_data[cve]['abbrev-status'] == "Ignored": | ||
| 300 | bb.debug(1, "CVE %s not updating because Ignored" % cve) | ||
| 301 | return | ||
| 302 | bb.warn("Unhandled CVE entry update for %s from %s to %s" % (cve, cve_data[cve], entry)) | ||
| 303 | |||
| 304 | def check_cves(d, cve_data): | ||
| 305 | """ | ||
| 306 | Connect to the NVD database and find unpatched cves. | ||
| 307 | """ | ||
| 308 | from oe.cve_check import Version, convert_cve_version, decode_cve_status | ||
| 309 | |||
| 310 | pn = d.getVar("PN") | ||
| 311 | real_pv = d.getVar("PV") | ||
| 312 | suffix = d.getVar("CVE_VERSION_SUFFIX") | ||
| 313 | |||
| 314 | cves_status = [] | ||
| 315 | cves_in_recipe = False | ||
| 316 | # CVE_PRODUCT can contain more than one product (eg. curl/libcurl) | ||
| 317 | products = d.getVar("CVE_PRODUCT").split() | ||
| 318 | # If this has been unset then we're not scanning for CVEs here (for example, image recipes) | ||
| 319 | if not products: | ||
| 320 | return ([], []) | ||
| 321 | pv = d.getVar("CVE_VERSION").split("+git")[0] | ||
| 322 | |||
| 323 | # If the recipe has been skipped/ignored we return empty lists | ||
| 324 | if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split(): | ||
| 325 | bb.note("Recipe has been skipped by cve-check") | ||
| 326 | return ([], []) | ||
| 327 | |||
| 328 | import sqlite3 | ||
| 329 | db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") | ||
| 330 | conn = sqlite3.connect(db_file, uri=True) | ||
| 331 | |||
| 332 | # For each of the known product names (e.g. curl has CPEs using curl and libcurl)... | ||
| 333 | for product in products: | ||
| 334 | cves_in_product = False | ||
| 335 | if ":" in product: | ||
| 336 | vendor, product = product.split(":", 1) | ||
| 337 | else: | ||
| 338 | vendor = "%" | ||
| 339 | |||
| 340 | # Find all relevant CVE IDs. | ||
| 341 | cve_cursor = conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)) | ||
| 342 | for cverow in cve_cursor: | ||
| 343 | cve = cverow[0] | ||
| 344 | |||
| 345 | # Write status once only for each product | ||
| 346 | if not cves_in_product: | ||
| 347 | cves_status.append([product, True]) | ||
| 348 | cves_in_product = True | ||
| 349 | cves_in_recipe = True | ||
| 350 | |||
| 351 | if cve_is_ignored(d, cve_data, cve): | ||
| 352 | bb.note("%s-%s ignores %s" % (product, pv, cve)) | ||
| 353 | continue | ||
| 354 | elif cve_is_patched(d, cve_data, cve): | ||
| 355 | bb.note("%s has been patched" % (cve)) | ||
| 356 | continue | ||
| 357 | |||
| 358 | vulnerable = False | ||
| 359 | ignored = False | ||
| 360 | |||
| 361 | product_cursor = conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)) | ||
| 362 | for row in product_cursor: | ||
| 363 | (_, _, _, version_start, operator_start, version_end, operator_end) = row | ||
| 364 | #bb.debug(2, "Evaluating row " + str(row)) | ||
| 365 | if cve_is_ignored(d, cve_data, cve): | ||
| 366 | ignored = True | ||
| 367 | |||
| 368 | version_start = convert_cve_version(version_start) | ||
| 369 | version_end = convert_cve_version(version_end) | ||
| 370 | |||
| 371 | if (operator_start == '=' and pv == version_start) or version_start == '-': | ||
| 372 | vulnerable = True | ||
| 373 | else: | ||
| 374 | if operator_start: | ||
| 375 | try: | ||
| 376 | vulnerable_start = (operator_start == '>=' and Version(pv,suffix) >= Version(version_start,suffix)) | ||
| 377 | vulnerable_start |= (operator_start == '>' and Version(pv,suffix) > Version(version_start,suffix)) | ||
| 378 | except: | ||
| 379 | bb.warn("%s: Failed to compare %s %s %s for %s" % | ||
| 380 | (product, pv, operator_start, version_start, cve)) | ||
| 381 | vulnerable_start = False | ||
| 382 | else: | ||
| 383 | vulnerable_start = False | ||
| 384 | |||
| 385 | if operator_end: | ||
| 386 | try: | ||
| 387 | vulnerable_end = (operator_end == '<=' and Version(pv,suffix) <= Version(version_end,suffix) ) | ||
| 388 | vulnerable_end |= (operator_end == '<' and Version(pv,suffix) < Version(version_end,suffix) ) | ||
| 389 | except: | ||
| 390 | bb.warn("%s: Failed to compare %s %s %s for %s" % | ||
| 391 | (product, pv, operator_end, version_end, cve)) | ||
| 392 | vulnerable_end = False | ||
| 393 | else: | ||
| 394 | vulnerable_end = False | ||
| 395 | |||
| 396 | if operator_start and operator_end: | ||
| 397 | vulnerable = vulnerable_start and vulnerable_end | ||
| 398 | else: | ||
| 399 | vulnerable = vulnerable_start or vulnerable_end | ||
| 400 | |||
| 401 | if vulnerable: | ||
| 402 | if ignored: | ||
| 403 | bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv)) | ||
| 404 | cve_update(d, cve_data, cve, {"abbrev-status": "Ignored"}) | ||
| 405 | else: | ||
| 406 | bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve)) | ||
| 407 | cve_update(d, cve_data, cve, {"abbrev-status": "Unpatched", "status": "version-in-range"}) | ||
| 408 | break | ||
| 409 | product_cursor.close() | ||
| 410 | |||
| 411 | if not vulnerable: | ||
| 412 | bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve)) | ||
| 413 | cve_update(d, cve_data, cve, {"abbrev-status": "Patched", "status": "version-not-in-range"}) | ||
| 414 | cve_cursor.close() | ||
| 415 | |||
| 416 | if not cves_in_product: | ||
| 417 | bb.note("No CVE records found for product %s, pn %s" % (product, pn)) | ||
| 418 | cves_status.append([product, False]) | ||
| 419 | |||
| 420 | conn.close() | ||
| 421 | |||
| 422 | if not cves_in_recipe: | ||
| 423 | bb.note("No CVE records for products in recipe %s" % (pn)) | ||
| 424 | |||
| 425 | if d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1": | ||
| 426 | unpatched_cves = [cve for cve in cve_data if cve_data[cve]["abbrev-status"] == "Unpatched"] | ||
| 427 | if unpatched_cves: | ||
| 428 | bb.warn("Found unpatched CVE (%s)" % " ".join(unpatched_cves)) | ||
| 429 | |||
| 430 | return (cve_data, cves_status) | ||
| 431 | |||
| 432 | def get_cve_info(d, cve_data): | ||
| 433 | """ | ||
| 434 | Get CVE information from the database. | ||
| 435 | """ | ||
| 436 | |||
| 437 | import sqlite3 | ||
| 438 | |||
| 439 | db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") | ||
| 440 | conn = sqlite3.connect(db_file, uri=True) | ||
| 441 | |||
| 442 | for cve in cve_data: | ||
| 443 | cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)) | ||
| 444 | for row in cursor: | ||
| 445 | # The CVE itdelf has been added already | ||
| 446 | if row[0] not in cve_data: | ||
| 447 | bb.note("CVE record %s not present" % row[0]) | ||
| 448 | continue | ||
| 449 | #cve_data[row[0]] = {} | ||
| 450 | cve_data[row[0]]["NVD-summary"] = row[1] | ||
| 451 | cve_data[row[0]]["NVD-scorev2"] = row[2] | ||
| 452 | cve_data[row[0]]["NVD-scorev3"] = row[3] | ||
| 453 | cve_data[row[0]]["NVD-scorev4"] = row[4] | ||
| 454 | cve_data[row[0]]["NVD-modified"] = row[5] | ||
| 455 | cve_data[row[0]]["NVD-vector"] = row[6] | ||
| 456 | cve_data[row[0]]["NVD-vectorString"] = row[7] | ||
| 457 | cursor.close() | ||
| 458 | conn.close() | ||
| 459 | |||
| 460 | def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file): | ||
| 461 | """ | ||
| 462 | Write CVE information in the JSON format: to WORKDIR; and to | ||
| 463 | CVE_CHECK_DIR, if CVE manifest if enabled, write fragment | ||
| 464 | files that will be assembled at the end in cve_check_write_rootfs_manifest. | ||
| 465 | """ | ||
| 466 | |||
| 467 | import json | ||
| 468 | |||
| 469 | write_string = json.dumps(output, indent=2) | ||
| 470 | with open(direct_file, "w") as f: | ||
| 471 | bb.note("Writing file %s with CVE information" % direct_file) | ||
| 472 | f.write(write_string) | ||
| 473 | |||
| 474 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": | ||
| 475 | bb.utils.mkdirhier(os.path.dirname(deploy_file)) | ||
| 476 | with open(deploy_file, "w") as f: | ||
| 477 | f.write(write_string) | ||
| 478 | |||
| 479 | if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1": | ||
| 480 | cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") | ||
| 481 | index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH") | ||
| 482 | bb.utils.mkdirhier(cvelogpath) | ||
| 483 | fragment_file = os.path.basename(deploy_file) | ||
| 484 | fragment_path = os.path.join(cvelogpath, fragment_file) | ||
| 485 | with open(fragment_path, "w") as f: | ||
| 486 | f.write(write_string) | ||
| 487 | with open(index_path, "a+") as f: | ||
| 488 | f.write("%s\n" % fragment_path) | ||
| 489 | |||
| 490 | def cve_write_data_json(d, cve_data, cve_status): | ||
| 491 | """ | ||
| 492 | Prepare CVE data for the JSON format, then write it. | ||
| 493 | """ | ||
| 494 | |||
| 495 | output = {"version":"1", "package": []} | ||
| 496 | nvd_link = "https://nvd.nist.gov/vuln/detail/" | ||
| 497 | |||
| 498 | fdir_name = d.getVar("FILE_DIRNAME") | ||
| 499 | layer = fdir_name.split("/")[-3] | ||
| 500 | |||
| 501 | include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split() | ||
| 502 | exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split() | ||
| 503 | |||
| 504 | report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1" | ||
| 505 | |||
| 506 | if exclude_layers and layer in exclude_layers: | ||
| 507 | return | ||
| 508 | |||
| 509 | if include_layers and layer not in include_layers: | ||
| 510 | return | ||
| 511 | |||
| 512 | product_data = [] | ||
| 513 | for s in cve_status: | ||
| 514 | p = {"product": s[0], "cvesInRecord": "Yes"} | ||
| 515 | if s[1] == False: | ||
| 516 | p["cvesInRecord"] = "No" | ||
| 517 | product_data.append(p) | ||
| 518 | |||
| 519 | package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV")) | ||
| 520 | package_data = { | ||
| 521 | "name" : d.getVar("PN"), | ||
| 522 | "layer" : layer, | ||
| 523 | "version" : package_version, | ||
| 524 | "products": product_data | ||
| 525 | } | ||
| 526 | |||
| 527 | cve_list = [] | ||
| 528 | |||
| 529 | for cve in sorted(cve_data): | ||
| 530 | if not report_all and (cve_data[cve]["abbrev-status"] == "Patched" or cve_data[cve]["abbrev-status"] == "Ignored"): | ||
| 531 | continue | ||
| 532 | issue_link = "%s%s" % (nvd_link, cve) | ||
| 533 | |||
| 534 | cve_item = { | ||
| 535 | "id" : cve, | ||
| 536 | "status" : cve_data[cve]["abbrev-status"], | ||
| 537 | "link": issue_link, | ||
| 538 | } | ||
| 539 | if 'NVD-summary' in cve_data[cve]: | ||
| 540 | cve_item["summary"] = cve_data[cve]["NVD-summary"] | ||
| 541 | cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"] | ||
| 542 | cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"] | ||
| 543 | cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"] | ||
| 544 | cve_item["modified"] = cve_data[cve]["NVD-modified"] | ||
| 545 | cve_item["vector"] = cve_data[cve]["NVD-vector"] | ||
| 546 | cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"] | ||
| 547 | if 'status' in cve_data[cve]: | ||
| 548 | cve_item["detail"] = cve_data[cve]["status"] | ||
| 549 | if 'justification' in cve_data[cve]: | ||
| 550 | cve_item["description"] = cve_data[cve]["justification"] | ||
| 551 | if 'resource' in cve_data[cve]: | ||
| 552 | cve_item["patch-file"] = cve_data[cve]["resource"] | ||
| 553 | cve_list.append(cve_item) | ||
| 554 | |||
| 555 | package_data["issue"] = cve_list | ||
| 556 | output["package"].append(package_data) | ||
| 557 | |||
| 558 | direct_file = d.getVar("CVE_CHECK_LOG_JSON") | ||
| 559 | deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") | ||
| 560 | manifest_file = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON") | ||
| 561 | |||
| 562 | cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file) | ||
| 563 | |||
| 564 | def cve_write_data(d, cve_data, status): | ||
| 565 | """ | ||
| 566 | Write CVE data in each enabled format. | ||
| 567 | """ | ||
| 568 | |||
| 569 | if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": | ||
| 570 | cve_write_data_json(d, cve_data, status) | ||
