diff options
Diffstat (limited to 'meta/classes/cve-check.bbclass')
-rw-r--r-- | meta/classes/cve-check.bbclass | 575 |
1 files changed, 371 insertions, 204 deletions
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass index 112ee3379d..c63ebd56e1 100644 --- a/meta/classes/cve-check.bbclass +++ b/meta/classes/cve-check.bbclass | |||
@@ -1,3 +1,9 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
1 | # This class is used to check recipes against public CVEs. | 7 | # This class is used to check recipes against public CVEs. |
2 | # | 8 | # |
3 | # In order to use this class just inherit the class in the | 9 | # In order to use this class just inherit the class in the |
@@ -20,71 +26,132 @@ | |||
20 | # the only method to check against CVEs. Running this tool | 26 | # the only method to check against CVEs. Running this tool |
21 | # doesn't guarantee your packages are free of CVEs. | 27 | # doesn't guarantee your packages are free of CVEs. |
22 | 28 | ||
23 | # The product name that the CVE database uses. Defaults to BPN, but may need to | 29 | # The product name that the CVE database uses defaults to BPN, but may need to |
24 | # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). | 30 | # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). |
25 | CVE_PRODUCT ??= "${BPN}" | 31 | CVE_PRODUCT ??= "${BPN}" |
26 | CVE_VERSION ??= "${PV}" | 32 | CVE_VERSION ??= "${PV}" |
27 | 33 | ||
28 | CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK" | 34 | # Possible database sources: NVD1, NVD2, FKIE |
29 | CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.1.db" | 35 | NVD_DB_VERSION ?= "FKIE" |
36 | |||
37 | # Use different file names for each database source, as they synchronize at different moments, so may be slightly different | ||
38 | CVE_CHECK_DB_FILENAME ?= "${@'nvdcve_2-2.db' if d.getVar('NVD_DB_VERSION') == 'NVD2' else 'nvdcve_1-3.db' if d.getVar('NVD_DB_VERSION') == 'NVD1' else 'nvdfkie_1-1.db'}" | ||
39 | CVE_CHECK_DB_FETCHER ?= "${@'cve-update-nvd2-native' if d.getVar('NVD_DB_VERSION') == 'NVD2' else 'cve-update-db-native'}" | ||
40 | CVE_CHECK_DB_DIR ?= "${STAGING_DIR}/CVE_CHECK" | ||
41 | CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/${CVE_CHECK_DB_FILENAME}" | ||
30 | CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock" | 42 | CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock" |
31 | 43 | ||
32 | CVE_CHECK_LOG ?= "${T}/cve.log" | ||
33 | CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check" | ||
34 | CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve" | 44 | CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve" |
35 | CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary" | 45 | CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary" |
36 | CVE_CHECK_SUMMARY_FILE ?= "${CVE_CHECK_SUMMARY_DIR}/${CVE_CHECK_SUMMARY_FILE_NAME}" | 46 | CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json" |
47 | CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt" | ||
48 | |||
49 | CVE_CHECK_LOG_JSON ?= "${T}/cve.json" | ||
37 | 50 | ||
38 | CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve" | 51 | CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve" |
39 | CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}" | 52 | CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json" |
40 | CVE_CHECK_MANIFEST ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve" | 53 | CVE_CHECK_MANIFEST_JSON_SUFFIX ?= "json" |
54 | CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.${CVE_CHECK_MANIFEST_JSON_SUFFIX}" | ||
41 | CVE_CHECK_COPY_FILES ??= "1" | 55 | CVE_CHECK_COPY_FILES ??= "1" |
42 | CVE_CHECK_CREATE_MANIFEST ??= "1" | 56 | CVE_CHECK_CREATE_MANIFEST ??= "1" |
43 | 57 | ||
58 | # Report Patched or Ignored CVEs | ||
44 | CVE_CHECK_REPORT_PATCHED ??= "1" | 59 | CVE_CHECK_REPORT_PATCHED ??= "1" |
45 | 60 | ||
46 | # Whitelist for packages (PN) | 61 | CVE_CHECK_SHOW_WARNINGS ??= "1" |
47 | CVE_CHECK_PN_WHITELIST ?= "" | 62 | |
63 | # Provide JSON output | ||
64 | CVE_CHECK_FORMAT_JSON ??= "1" | ||
65 | |||
66 | # Check for packages without CVEs (no issues or missing product name) | ||
67 | CVE_CHECK_COVERAGE ??= "1" | ||
68 | |||
69 | # Skip CVE Check for packages (PN) | ||
70 | CVE_CHECK_SKIP_RECIPE ?= "" | ||
48 | 71 | ||
49 | # Whitelist for CVE. If a CVE is found, then it is considered patched. | 72 | # Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned |
50 | # The value is a string containing space separated CVE values: | 73 | # separately with optional detail and description for this status. |
51 | # | 74 | # |
52 | # CVE_CHECK_WHITELIST = 'CVE-2014-2524 CVE-2018-1234' | 75 | # CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows" |
76 | # CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally" | ||
53 | # | 77 | # |
54 | CVE_CHECK_WHITELIST ?= "" | 78 | # Settings the same status and reason for multiple CVEs is possible |
79 | # via CVE_STATUS_GROUPS variable. | ||
80 | # | ||
81 | # CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED" | ||
82 | # | ||
83 | # CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003" | ||
84 | # CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows" | ||
85 | # CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004" | ||
86 | # CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally" | ||
87 | # | ||
88 | # All possible CVE statuses could be found in cve-check-map.conf | ||
89 | # CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored" | ||
90 | # CVE_CHECK_STATUSMAP[fixed-version] = "Patched" | ||
91 | # | ||
92 | # CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead. | ||
93 | # Keep CVE_CHECK_IGNORE until other layers migrate to new variables | ||
94 | CVE_CHECK_IGNORE ?= "" | ||
55 | 95 | ||
56 | # Layers to be excluded | 96 | # Layers to be excluded |
57 | CVE_CHECK_LAYER_EXCLUDELIST ??= "" | 97 | CVE_CHECK_LAYER_EXCLUDELIST ??= "" |
58 | 98 | ||
59 | # Layers to be included | 99 | # Layers to be included |
60 | CVE_CHECK_LAYER_INCLUDELIST ??= "" | 100 | CVE_CHECK_LAYER_INCLUDELIST ??= "" |
61 | 101 | ||
62 | 102 | ||
63 | # set to "alphabetical" for version using single alphabetical character as increament release | 103 | # set to "alphabetical" for version using single alphabetical character as increment release |
64 | CVE_VERSION_SUFFIX ??= "" | 104 | CVE_VERSION_SUFFIX ??= "" |
65 | 105 | ||
106 | python () { | ||
107 | from oe.cve_check import extend_cve_status | ||
108 | extend_cve_status(d) | ||
109 | |||
110 | nvd_database_type = d.getVar("NVD_DB_VERSION") | ||
111 | if nvd_database_type not in ("NVD1", "NVD2", "FKIE"): | ||
112 | bb.erroronce("Malformed NVD_DB_VERSION, must be one of: NVD1, NVD2, FKIE. Defaulting to NVD2") | ||
113 | d.setVar("NVD_DB_VERSION", "NVD2") | ||
114 | } | ||
115 | |||
116 | def generate_json_report(d, out_path, link_path): | ||
117 | if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")): | ||
118 | import json | ||
119 | from oe.cve_check import cve_check_merge_jsons, update_symlinks | ||
120 | |||
121 | bb.note("Generating JSON CVE summary") | ||
122 | index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH") | ||
123 | summary = {"version":"1", "package": []} | ||
124 | with open(index_file) as f: | ||
125 | filename = f.readline() | ||
126 | while filename: | ||
127 | with open(filename.rstrip()) as j: | ||
128 | data = json.load(j) | ||
129 | cve_check_merge_jsons(summary, data) | ||
130 | filename = f.readline() | ||
131 | |||
132 | summary["package"].sort(key=lambda d: d['name']) | ||
133 | |||
134 | with open(out_path, "w") as f: | ||
135 | json.dump(summary, f, indent=2) | ||
136 | |||
137 | update_symlinks(out_path, link_path) | ||
138 | |||
66 | python cve_save_summary_handler () { | 139 | python cve_save_summary_handler () { |
67 | import shutil | 140 | import shutil |
68 | import datetime | 141 | import datetime |
69 | 142 | from oe.cve_check import update_symlinks | |
70 | cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE") | ||
71 | 143 | ||
72 | cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME") | 144 | cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME") |
73 | cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") | 145 | cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") |
74 | bb.utils.mkdirhier(cvelogpath) | 146 | bb.utils.mkdirhier(cvelogpath) |
75 | 147 | ||
76 | timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') | 148 | timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') |
77 | cve_summary_file = os.path.join(cvelogpath, "%s-%s.txt" % (cve_summary_name, timestamp)) | ||
78 | 149 | ||
79 | if os.path.exists(cve_tmp_file): | 150 | if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": |
80 | shutil.copyfile(cve_tmp_file, cve_summary_file) | 151 | json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON")) |
81 | 152 | json_summary_name = os.path.join(cvelogpath, "%s-%s.json" % (cve_summary_name, timestamp)) | |
82 | if cve_summary_file and os.path.exists(cve_summary_file): | 153 | generate_json_report(d, json_summary_name, json_summary_link_name) |
83 | cvefile_link = os.path.join(cvelogpath, cve_summary_name) | 154 | bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name) |
84 | |||
85 | if os.path.exists(os.path.realpath(cvefile_link)): | ||
86 | os.remove(cvefile_link) | ||
87 | os.symlink(os.path.basename(cve_summary_file), cvefile_link) | ||
88 | } | 155 | } |
89 | 156 | ||
90 | addhandler cve_save_summary_handler | 157 | addhandler cve_save_summary_handler |
@@ -94,34 +161,36 @@ python do_cve_check () { | |||
94 | """ | 161 | """ |
95 | Check recipe for patched and unpatched CVEs | 162 | Check recipe for patched and unpatched CVEs |
96 | """ | 163 | """ |
164 | from oe.cve_check import get_patched_cves | ||
97 | 165 | ||
98 | if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")): | 166 | with bb.utils.fileslocked([d.getVar("CVE_CHECK_DB_FILE_LOCK")], shared=True): |
99 | try: | 167 | if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")): |
100 | patched_cves = get_patches_cves(d) | 168 | try: |
101 | except FileNotFoundError: | 169 | patched_cves = get_patched_cves(d) |
102 | bb.fatal("Failure in searching patches") | 170 | except FileNotFoundError: |
103 | whitelisted, patched, unpatched = check_cves(d, patched_cves) | 171 | bb.fatal("Failure in searching patches") |
104 | if patched or unpatched: | 172 | cve_data, status = check_cves(d, patched_cves) |
105 | cve_data = get_cve_info(d, patched + unpatched) | 173 | if len(cve_data) or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status): |
106 | cve_write_data(d, patched, unpatched, whitelisted, cve_data) | 174 | get_cve_info(d, cve_data) |
107 | else: | 175 | cve_write_data(d, cve_data, status) |
108 | bb.note("No CVE database found, skipping CVE check") | 176 | else: |
177 | bb.note("No CVE database found, skipping CVE check") | ||
109 | 178 | ||
110 | } | 179 | } |
111 | 180 | ||
112 | addtask cve_check before do_build after do_fetch | 181 | addtask cve_check before do_build |
113 | do_cve_check[depends] = "cve-update-db-native:do_fetch" | 182 | do_cve_check[depends] = "${CVE_CHECK_DB_FETCHER}:do_unpack" |
114 | do_cve_check[nostamp] = "1" | 183 | do_cve_check[nostamp] = "1" |
115 | 184 | ||
116 | python cve_check_cleanup () { | 185 | python cve_check_cleanup () { |
117 | """ | 186 | """ |
118 | Delete the file used to gather all the CVE information. | 187 | Delete the file used to gather all the CVE information. |
119 | """ | 188 | """ |
120 | bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE")) | 189 | bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")) |
121 | } | 190 | } |
122 | 191 | ||
123 | addhandler cve_check_cleanup | 192 | addhandler cve_check_cleanup |
124 | cve_check_cleanup[eventmask] = "bb.cooker.CookerExit" | 193 | cve_check_cleanup[eventmask] = "bb.event.BuildCompleted" |
125 | 194 | ||
126 | python cve_check_write_rootfs_manifest () { | 195 | python cve_check_write_rootfs_manifest () { |
127 | """ | 196 | """ |
@@ -129,116 +198,132 @@ python cve_check_write_rootfs_manifest () { | |||
129 | """ | 198 | """ |
130 | 199 | ||
131 | import shutil | 200 | import shutil |
201 | import json | ||
202 | from oe.rootfs import image_list_installed_packages | ||
203 | from oe.cve_check import cve_check_merge_jsons, update_symlinks | ||
132 | 204 | ||
133 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": | 205 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": |
134 | deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE") | 206 | deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") |
135 | if os.path.exists(deploy_file): | 207 | if os.path.exists(deploy_file_json): |
136 | bb.utils.remove(deploy_file) | 208 | bb.utils.remove(deploy_file_json) |
137 | 209 | ||
138 | if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")): | 210 | # Create a list of relevant recipies |
139 | bb.note("Writing rootfs CVE manifest") | 211 | recipies = set() |
140 | deploy_dir = d.getVar("DEPLOY_DIR_IMAGE") | 212 | for pkg in list(image_list_installed_packages(d)): |
141 | link_name = d.getVar("IMAGE_LINK_NAME") | 213 | pkg_info = os.path.join(d.getVar('PKGDATA_DIR'), |
142 | manifest_name = d.getVar("CVE_CHECK_MANIFEST") | 214 | 'runtime-reverse', pkg) |
143 | cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE") | 215 | pkg_data = oe.packagedata.read_pkgdatafile(pkg_info) |
144 | 216 | recipies.add(pkg_data["PN"]) | |
145 | shutil.copyfile(cve_tmp_file, manifest_name) | 217 | |
146 | 218 | bb.note("Writing rootfs CVE manifest") | |
147 | if manifest_name and os.path.exists(manifest_name): | 219 | deploy_dir = d.getVar("IMGDEPLOYDIR") |
148 | manifest_link = os.path.join(deploy_dir, "%s.cve" % link_name) | 220 | link_name = d.getVar("IMAGE_LINK_NAME") |
149 | # If we already have another manifest, update symlinks | 221 | |
150 | if os.path.exists(os.path.realpath(manifest_link)): | 222 | json_data = {"version":"1", "package": []} |
151 | os.remove(manifest_link) | 223 | text_data = "" |
152 | os.symlink(os.path.basename(manifest_name), manifest_link) | 224 | enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1" |
153 | bb.plain("Image CVE report stored in: %s" % manifest_name) | 225 | |
226 | save_pn = d.getVar("PN") | ||
227 | |||
228 | for pkg in recipies: | ||
229 | # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate | ||
230 | # it with the different PN names set each time. | ||
231 | d.setVar("PN", pkg) | ||
232 | |||
233 | if enable_json: | ||
234 | pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") | ||
235 | if os.path.exists(pkgfilepath): | ||
236 | with open(pkgfilepath) as j: | ||
237 | data = json.load(j) | ||
238 | cve_check_merge_jsons(json_data, data) | ||
239 | |||
240 | d.setVar("PN", save_pn) | ||
241 | |||
242 | if enable_json: | ||
243 | manifest_name_suffix = d.getVar("CVE_CHECK_MANIFEST_JSON_SUFFIX") | ||
244 | manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON") | ||
245 | |||
246 | with open(manifest_name, "w") as f: | ||
247 | json.dump(json_data, f, indent=2) | ||
248 | |||
249 | if link_name: | ||
250 | link_path = os.path.join(deploy_dir, "%s.%s" % (link_name, manifest_name_suffix)) | ||
251 | update_symlinks(manifest_name, link_path) | ||
252 | |||
253 | bb.plain("Image CVE JSON report stored in: %s" % manifest_name) | ||
154 | } | 254 | } |
155 | 255 | ||
156 | ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | 256 | ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
157 | do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | 257 | do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
258 | do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | ||
259 | |||
260 | def cve_is_ignored(d, cve_data, cve): | ||
261 | if cve not in cve_data: | ||
262 | return False | ||
263 | if cve_data[cve]['abbrev-status'] == "Ignored": | ||
264 | return True | ||
265 | return False | ||
266 | |||
267 | def cve_is_patched(d, cve_data, cve): | ||
268 | if cve not in cve_data: | ||
269 | return False | ||
270 | if cve_data[cve]['abbrev-status'] == "Patched": | ||
271 | return True | ||
272 | return False | ||
273 | |||
274 | def cve_update(d, cve_data, cve, entry): | ||
275 | # If no entry, just add it | ||
276 | if cve not in cve_data: | ||
277 | cve_data[cve] = entry | ||
278 | return | ||
279 | # If we are updating, there might be change in the status | ||
280 | bb.debug(1, "Trying CVE entry update for %s from %s to %s" % (cve, cve_data[cve]['abbrev-status'], entry['abbrev-status'])) | ||
281 | if cve_data[cve]['abbrev-status'] == "Unknown": | ||
282 | cve_data[cve] = entry | ||
283 | return | ||
284 | if cve_data[cve]['abbrev-status'] == entry['abbrev-status']: | ||
285 | return | ||
286 | # Update like in {'abbrev-status': 'Patched', 'status': 'version-not-in-range'} to {'abbrev-status': 'Unpatched', 'status': 'version-in-range'} | ||
287 | if entry['abbrev-status'] == "Unpatched" and cve_data[cve]['abbrev-status'] == "Patched": | ||
288 | if entry['status'] == "version-in-range" and cve_data[cve]['status'] == "version-not-in-range": | ||
289 | # New result from the scan, vulnerable | ||
290 | cve_data[cve] = entry | ||
291 | bb.debug(1, "CVE entry %s update from Patched to Unpatched from the scan result" % cve) | ||
292 | return | ||
293 | if entry['abbrev-status'] == "Patched" and cve_data[cve]['abbrev-status'] == "Unpatched": | ||
294 | if entry['status'] == "version-not-in-range" and cve_data[cve]['status'] == "version-in-range": | ||
295 | # Range does not match the scan, but we already have a vulnerable match, ignore | ||
296 | bb.debug(1, "CVE entry %s update from Patched to Unpatched from the scan result - not applying" % cve) | ||
297 | return | ||
298 | # If we have an "Ignored", it has a priority | ||
299 | if cve_data[cve]['abbrev-status'] == "Ignored": | ||
300 | bb.debug(1, "CVE %s not updating because Ignored" % cve) | ||
301 | return | ||
302 | bb.warn("Unhandled CVE entry update for %s from %s to %s" % (cve, cve_data[cve], entry)) | ||
158 | 303 | ||
159 | def get_patches_cves(d): | 304 | def check_cves(d, cve_data): |
160 | """ | ||
161 | Get patches that solve CVEs using the "CVE: " tag. | ||
162 | """ | ||
163 | |||
164 | import re | ||
165 | |||
166 | pn = d.getVar("PN") | ||
167 | cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") | ||
168 | |||
169 | # Matches last CVE-1234-211432 in the file name, also if written | ||
170 | # with small letters. Not supporting multiple CVE id's in a single | ||
171 | # file name. | ||
172 | cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)") | ||
173 | |||
174 | patched_cves = set() | ||
175 | bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) | ||
176 | for url in src_patches(d): | ||
177 | patch_file = bb.fetch.decodeurl(url)[2] | ||
178 | |||
179 | if not os.path.isfile(patch_file): | ||
180 | bb.error("File Not found: %s" % patch_file) | ||
181 | raise FileNotFoundError | ||
182 | |||
183 | # Check patch file name for CVE ID | ||
184 | fname_match = cve_file_name_match.search(patch_file) | ||
185 | if fname_match: | ||
186 | cve = fname_match.group(1).upper() | ||
187 | patched_cves.add(cve) | ||
188 | bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file)) | ||
189 | |||
190 | with open(patch_file, "r", encoding="utf-8") as f: | ||
191 | try: | ||
192 | patch_text = f.read() | ||
193 | except UnicodeDecodeError: | ||
194 | bb.debug(1, "Failed to read patch %s using UTF-8 encoding" | ||
195 | " trying with iso8859-1" % patch_file) | ||
196 | f.close() | ||
197 | with open(patch_file, "r", encoding="iso8859-1") as f: | ||
198 | patch_text = f.read() | ||
199 | |||
200 | # Search for one or more "CVE: " lines | ||
201 | text_match = False | ||
202 | for match in cve_match.finditer(patch_text): | ||
203 | # Get only the CVEs without the "CVE: " tag | ||
204 | cves = patch_text[match.start()+5:match.end()] | ||
205 | for cve in cves.split(): | ||
206 | bb.debug(2, "Patch %s solves %s" % (patch_file, cve)) | ||
207 | patched_cves.add(cve) | ||
208 | text_match = True | ||
209 | |||
210 | if not fname_match and not text_match: | ||
211 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) | ||
212 | |||
213 | return patched_cves | ||
214 | |||
215 | def check_cves(d, patched_cves): | ||
216 | """ | 305 | """ |
217 | Connect to the NVD database and find unpatched cves. | 306 | Connect to the NVD database and find unpatched cves. |
218 | """ | 307 | """ |
219 | from oe.cve_check import Version | 308 | from oe.cve_check import Version, convert_cve_version, decode_cve_status |
220 | 309 | ||
221 | pn = d.getVar("PN") | 310 | pn = d.getVar("PN") |
222 | real_pv = d.getVar("PV") | 311 | real_pv = d.getVar("PV") |
223 | suffix = d.getVar("CVE_VERSION_SUFFIX") | 312 | suffix = d.getVar("CVE_VERSION_SUFFIX") |
224 | 313 | ||
225 | cves_unpatched = [] | 314 | cves_status = [] |
315 | cves_in_recipe = False | ||
226 | # CVE_PRODUCT can contain more than one product (eg. curl/libcurl) | 316 | # CVE_PRODUCT can contain more than one product (eg. curl/libcurl) |
227 | products = d.getVar("CVE_PRODUCT").split() | 317 | products = d.getVar("CVE_PRODUCT").split() |
228 | # If this has been unset then we're not scanning for CVEs here (for example, image recipes) | 318 | # If this has been unset then we're not scanning for CVEs here (for example, image recipes) |
229 | if not products: | 319 | if not products: |
230 | return ([], [], []) | 320 | return ([], []) |
231 | pv = d.getVar("CVE_VERSION").split("+git")[0] | 321 | pv = d.getVar("CVE_VERSION").split("+git")[0] |
232 | 322 | ||
233 | # If the recipe has been whitlisted we return empty lists | 323 | # If the recipe has been skipped/ignored we return empty lists |
234 | if pn in d.getVar("CVE_CHECK_PN_WHITELIST").split(): | 324 | if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split(): |
235 | bb.note("Recipe has been whitelisted, skipping check") | 325 | bb.note("Recipe has been skipped by cve-check") |
236 | return ([], [], []) | 326 | return ([], []) |
237 | |||
238 | old_cve_whitelist = d.getVar("CVE_CHECK_CVE_WHITELIST") | ||
239 | if old_cve_whitelist: | ||
240 | bb.warn("CVE_CHECK_CVE_WHITELIST is deprecated, please use CVE_CHECK_WHITELIST.") | ||
241 | cve_whitelist = d.getVar("CVE_CHECK_WHITELIST").split() | ||
242 | 327 | ||
243 | import sqlite3 | 328 | import sqlite3 |
244 | db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") | 329 | db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") |
@@ -246,28 +331,42 @@ def check_cves(d, patched_cves): | |||
246 | 331 | ||
247 | # For each of the known product names (e.g. curl has CPEs using curl and libcurl)... | 332 | # For each of the known product names (e.g. curl has CPEs using curl and libcurl)... |
248 | for product in products: | 333 | for product in products: |
334 | cves_in_product = False | ||
249 | if ":" in product: | 335 | if ":" in product: |
250 | vendor, product = product.split(":", 1) | 336 | vendor, product = product.split(":", 1) |
251 | else: | 337 | else: |
252 | vendor = "%" | 338 | vendor = "%" |
253 | 339 | ||
254 | # Find all relevant CVE IDs. | 340 | # Find all relevant CVE IDs. |
255 | for cverow in conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)): | 341 | cve_cursor = conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)) |
342 | for cverow in cve_cursor: | ||
256 | cve = cverow[0] | 343 | cve = cverow[0] |
257 | 344 | ||
258 | if cve in cve_whitelist: | 345 | # Write status once only for each product |
259 | bb.note("%s-%s has been whitelisted for %s" % (product, pv, cve)) | 346 | if not cves_in_product: |
260 | # TODO: this should be in the report as 'whitelisted' | 347 | cves_status.append([product, True]) |
261 | patched_cves.add(cve) | 348 | cves_in_product = True |
349 | cves_in_recipe = True | ||
350 | |||
351 | if cve_is_ignored(d, cve_data, cve): | ||
352 | bb.note("%s-%s ignores %s" % (product, pv, cve)) | ||
262 | continue | 353 | continue |
263 | elif cve in patched_cves: | 354 | elif cve_is_patched(d, cve_data, cve): |
264 | bb.note("%s has been patched" % (cve)) | 355 | bb.note("%s has been patched" % (cve)) |
265 | continue | 356 | continue |
266 | 357 | ||
267 | vulnerable = False | 358 | vulnerable = False |
268 | for row in conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)): | 359 | ignored = False |
360 | |||
361 | product_cursor = conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)) | ||
362 | for row in product_cursor: | ||
269 | (_, _, _, version_start, operator_start, version_end, operator_end) = row | 363 | (_, _, _, version_start, operator_start, version_end, operator_end) = row |
270 | #bb.debug(2, "Evaluating row " + str(row)) | 364 | #bb.debug(2, "Evaluating row " + str(row)) |
365 | if cve_is_ignored(d, cve_data, cve): | ||
366 | ignored = True | ||
367 | |||
368 | version_start = convert_cve_version(version_start) | ||
369 | version_end = convert_cve_version(version_end) | ||
271 | 370 | ||
272 | if (operator_start == '=' and pv == version_start) or version_start == '-': | 371 | if (operator_start == '=' and pv == version_start) or version_start == '-': |
273 | vulnerable = True | 372 | vulnerable = True |
@@ -300,104 +399,172 @@ def check_cves(d, patched_cves): | |||
300 | vulnerable = vulnerable_start or vulnerable_end | 399 | vulnerable = vulnerable_start or vulnerable_end |
301 | 400 | ||
302 | if vulnerable: | 401 | if vulnerable: |
303 | bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve)) | 402 | if ignored: |
304 | cves_unpatched.append(cve) | 403 | bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv)) |
404 | cve_update(d, cve_data, cve, {"abbrev-status": "Ignored"}) | ||
405 | else: | ||
406 | bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve)) | ||
407 | cve_update(d, cve_data, cve, {"abbrev-status": "Unpatched", "status": "version-in-range"}) | ||
305 | break | 408 | break |
409 | product_cursor.close() | ||
306 | 410 | ||
307 | if not vulnerable: | 411 | if not vulnerable: |
308 | bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve)) | 412 | bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve)) |
309 | # TODO: not patched but not vulnerable | 413 | cve_update(d, cve_data, cve, {"abbrev-status": "Patched", "status": "version-not-in-range"}) |
310 | patched_cves.add(cve) | 414 | cve_cursor.close() |
415 | |||
416 | if not cves_in_product: | ||
417 | bb.note("No CVE records found for product %s, pn %s" % (product, pn)) | ||
418 | cves_status.append([product, False]) | ||
311 | 419 | ||
312 | conn.close() | 420 | conn.close() |
313 | 421 | ||
314 | return (list(cve_whitelist), list(patched_cves), cves_unpatched) | 422 | if not cves_in_recipe: |
423 | bb.note("No CVE records for products in recipe %s" % (pn)) | ||
424 | |||
425 | if d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1": | ||
426 | unpatched_cves = [cve for cve in cve_data if cve_data[cve]["abbrev-status"] == "Unpatched"] | ||
427 | if unpatched_cves: | ||
428 | bb.warn("Found unpatched CVE (%s)" % " ".join(unpatched_cves)) | ||
315 | 429 | ||
316 | def get_cve_info(d, cves): | 430 | return (cve_data, cves_status) |
431 | |||
432 | def get_cve_info(d, cve_data): | ||
317 | """ | 433 | """ |
318 | Get CVE information from the database. | 434 | Get CVE information from the database. |
319 | """ | 435 | """ |
320 | 436 | ||
321 | import sqlite3 | 437 | import sqlite3 |
322 | 438 | ||
323 | cve_data = {} | 439 | db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") |
324 | conn = sqlite3.connect(d.getVar("CVE_CHECK_DB_FILE")) | 440 | conn = sqlite3.connect(db_file, uri=True) |
325 | |||
326 | for cve in cves: | ||
327 | for row in conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)): | ||
328 | cve_data[row[0]] = {} | ||
329 | cve_data[row[0]]["summary"] = row[1] | ||
330 | cve_data[row[0]]["scorev2"] = row[2] | ||
331 | cve_data[row[0]]["scorev3"] = row[3] | ||
332 | cve_data[row[0]]["modified"] = row[4] | ||
333 | cve_data[row[0]]["vector"] = row[5] | ||
334 | 441 | ||
442 | for cve in cve_data: | ||
443 | cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)) | ||
444 | for row in cursor: | ||
445 | # The CVE itdelf has been added already | ||
446 | if row[0] not in cve_data: | ||
447 | bb.note("CVE record %s not present" % row[0]) | ||
448 | continue | ||
449 | #cve_data[row[0]] = {} | ||
450 | cve_data[row[0]]["NVD-summary"] = row[1] | ||
451 | cve_data[row[0]]["NVD-scorev2"] = row[2] | ||
452 | cve_data[row[0]]["NVD-scorev3"] = row[3] | ||
453 | cve_data[row[0]]["NVD-scorev4"] = row[4] | ||
454 | cve_data[row[0]]["NVD-modified"] = row[5] | ||
455 | cve_data[row[0]]["NVD-vector"] = row[6] | ||
456 | cve_data[row[0]]["NVD-vectorString"] = row[7] | ||
457 | cursor.close() | ||
335 | conn.close() | 458 | conn.close() |
336 | return cve_data | ||
337 | 459 | ||
338 | def cve_write_data(d, patched, unpatched, whitelisted, cve_data): | 460 | def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file): |
339 | """ | 461 | """ |
340 | Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and | 462 | Write CVE information in the JSON format: to WORKDIR; and to |
341 | CVE manifest if enabled. | 463 | CVE_CHECK_DIR, if CVE manifest if enabled, write fragment |
464 | files that will be assembled at the end in cve_check_write_rootfs_manifest. | ||
342 | """ | 465 | """ |
343 | 466 | ||
467 | import json | ||
468 | |||
469 | write_string = json.dumps(output, indent=2) | ||
470 | with open(direct_file, "w") as f: | ||
471 | bb.note("Writing file %s with CVE information" % direct_file) | ||
472 | f.write(write_string) | ||
473 | |||
474 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": | ||
475 | bb.utils.mkdirhier(os.path.dirname(deploy_file)) | ||
476 | with open(deploy_file, "w") as f: | ||
477 | f.write(write_string) | ||
478 | |||
479 | if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1": | ||
480 | cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") | ||
481 | index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH") | ||
482 | bb.utils.mkdirhier(cvelogpath) | ||
483 | fragment_file = os.path.basename(deploy_file) | ||
484 | fragment_path = os.path.join(cvelogpath, fragment_file) | ||
485 | with open(fragment_path, "w") as f: | ||
486 | f.write(write_string) | ||
487 | with open(index_path, "a+") as f: | ||
488 | f.write("%s\n" % fragment_path) | ||
489 | |||
490 | def cve_write_data_json(d, cve_data, cve_status): | ||
491 | """ | ||
492 | Prepare CVE data for the JSON format, then write it. | ||
493 | """ | ||
494 | |||
495 | output = {"version":"1", "package": []} | ||
496 | nvd_link = "https://nvd.nist.gov/vuln/detail/" | ||
344 | 497 | ||
345 | cve_file = d.getVar("CVE_CHECK_LOG") | ||
346 | fdir_name = d.getVar("FILE_DIRNAME") | 498 | fdir_name = d.getVar("FILE_DIRNAME") |
347 | layer = fdir_name.split("/")[-3] | 499 | layer = fdir_name.split("/")[-3] |
348 | 500 | ||
349 | include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split() | 501 | include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split() |
350 | exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split() | 502 | exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split() |
351 | 503 | ||
504 | report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1" | ||
505 | |||
352 | if exclude_layers and layer in exclude_layers: | 506 | if exclude_layers and layer in exclude_layers: |
353 | return | 507 | return |
354 | 508 | ||
355 | if include_layers and layer not in include_layers: | 509 | if include_layers and layer not in include_layers: |
356 | return | 510 | return |
357 | 511 | ||
358 | nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId=" | 512 | product_data = [] |
359 | write_string = "" | 513 | for s in cve_status: |
360 | unpatched_cves = [] | 514 | p = {"product": s[0], "cvesInRecord": "Yes"} |
361 | bb.utils.mkdirhier(os.path.dirname(cve_file)) | 515 | if s[1] == False: |
516 | p["cvesInRecord"] = "No" | ||
517 | product_data.append(p) | ||
362 | 518 | ||
363 | for cve in sorted(cve_data): | 519 | package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV")) |
364 | is_patched = cve in patched | 520 | package_data = { |
365 | if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"): | 521 | "name" : d.getVar("PN"), |
366 | continue | 522 | "layer" : layer, |
367 | write_string += "LAYER: %s\n" % layer | 523 | "version" : package_version, |
368 | write_string += "PACKAGE NAME: %s\n" % d.getVar("PN") | 524 | "products": product_data |
369 | write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV")) | 525 | } |
370 | write_string += "CVE: %s\n" % cve | ||
371 | if cve in whitelisted: | ||
372 | write_string += "CVE STATUS: Whitelisted\n" | ||
373 | elif is_patched: | ||
374 | write_string += "CVE STATUS: Patched\n" | ||
375 | else: | ||
376 | unpatched_cves.append(cve) | ||
377 | write_string += "CVE STATUS: Unpatched\n" | ||
378 | write_string += "CVE SUMMARY: %s\n" % cve_data[cve]["summary"] | ||
379 | write_string += "CVSS v2 BASE SCORE: %s\n" % cve_data[cve]["scorev2"] | ||
380 | write_string += "CVSS v3 BASE SCORE: %s\n" % cve_data[cve]["scorev3"] | ||
381 | write_string += "VECTOR: %s\n" % cve_data[cve]["vector"] | ||
382 | write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve) | ||
383 | |||
384 | if unpatched_cves: | ||
385 | bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file)) | ||
386 | |||
387 | if write_string: | ||
388 | with open(cve_file, "w") as f: | ||
389 | bb.note("Writing file %s with CVE information" % cve_file) | ||
390 | f.write(write_string) | ||
391 | 526 | ||
392 | if d.getVar("CVE_CHECK_COPY_FILES") == "1": | 527 | cve_list = [] |
393 | deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE") | ||
394 | bb.utils.mkdirhier(os.path.dirname(deploy_file)) | ||
395 | with open(deploy_file, "w") as f: | ||
396 | f.write(write_string) | ||
397 | 528 | ||
398 | if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1": | 529 | for cve in sorted(cve_data): |
399 | cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") | 530 | if not report_all and (cve_data[cve]["abbrev-status"] == "Patched" or cve_data[cve]["abbrev-status"] == "Ignored"): |
400 | bb.utils.mkdirhier(cvelogpath) | 531 | continue |
532 | issue_link = "%s%s" % (nvd_link, cve) | ||
533 | |||
534 | cve_item = { | ||
535 | "id" : cve, | ||
536 | "status" : cve_data[cve]["abbrev-status"], | ||
537 | "link": issue_link, | ||
538 | } | ||
539 | if 'NVD-summary' in cve_data[cve]: | ||
540 | cve_item["summary"] = cve_data[cve]["NVD-summary"] | ||
541 | cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"] | ||
542 | cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"] | ||
543 | cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"] | ||
544 | cve_item["modified"] = cve_data[cve]["NVD-modified"] | ||
545 | cve_item["vector"] = cve_data[cve]["NVD-vector"] | ||
546 | cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"] | ||
547 | if 'status' in cve_data[cve]: | ||
548 | cve_item["detail"] = cve_data[cve]["status"] | ||
549 | if 'justification' in cve_data[cve]: | ||
550 | cve_item["description"] = cve_data[cve]["justification"] | ||
551 | if 'resource' in cve_data[cve]: | ||
552 | cve_item["patch-file"] = cve_data[cve]["resource"] | ||
553 | cve_list.append(cve_item) | ||
554 | |||
555 | package_data["issue"] = cve_list | ||
556 | output["package"].append(package_data) | ||
557 | |||
558 | direct_file = d.getVar("CVE_CHECK_LOG_JSON") | ||
559 | deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") | ||
560 | manifest_file = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON") | ||
561 | |||
562 | cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file) | ||
563 | |||
564 | def cve_write_data(d, cve_data, status): | ||
565 | """ | ||
566 | Write CVE data in each enabled format. | ||
567 | """ | ||
401 | 568 | ||
402 | with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f: | 569 | if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": |
403 | f.write("%s" % write_string) | 570 | cve_write_data_json(d, cve_data, status) |