diff options
Diffstat (limited to 'meta/recipes-core')
-rw-r--r-- | meta/recipes-core/meta/cve-update-nvd2-native.bb | 333 |
1 files changed, 333 insertions, 0 deletions
diff --git a/meta/recipes-core/meta/cve-update-nvd2-native.bb b/meta/recipes-core/meta/cve-update-nvd2-native.bb new file mode 100644 index 0000000000..1c14481c21 --- /dev/null +++ b/meta/recipes-core/meta/cve-update-nvd2-native.bb | |||
@@ -0,0 +1,333 @@ | |||
1 | SUMMARY = "Updates the NVD CVE database" | ||
2 | LICENSE = "MIT" | ||
3 | |||
4 | # Important note: | ||
5 | # This product uses the NVD API but is not endorsed or certified by the NVD. | ||
6 | |||
7 | INHIBIT_DEFAULT_DEPS = "1" | ||
8 | |||
9 | inherit native | ||
10 | |||
11 | deltask do_unpack | ||
12 | deltask do_patch | ||
13 | deltask do_configure | ||
14 | deltask do_compile | ||
15 | deltask do_install | ||
16 | deltask do_populate_sysroot | ||
17 | |||
18 | NVDCVE_URL ?= "https://services.nvd.nist.gov/rest/json/cves/2.0" | ||
19 | |||
20 | # CVE database update interval, in seconds. By default: once a day (24*60*60). | ||
21 | # Use 0 to force the update | ||
22 | # Use a negative value to skip the update | ||
23 | CVE_DB_UPDATE_INTERVAL ?= "86400" | ||
24 | |||
25 | # Timeout for blocking socket operations, such as the connection attempt. | ||
26 | CVE_SOCKET_TIMEOUT ?= "60" | ||
27 | |||
28 | CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_2.db" | ||
29 | |||
30 | CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_2.db" | ||
31 | |||
32 | python () { | ||
33 | if not bb.data.inherits_class("cve-check", d): | ||
34 | raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.") | ||
35 | } | ||
36 | |||
37 | python do_fetch() { | ||
38 | """ | ||
39 | Update NVD database with API 2.0 | ||
40 | """ | ||
41 | import bb.utils | ||
42 | import bb.progress | ||
43 | import shutil | ||
44 | |||
45 | bb.utils.export_proxies(d) | ||
46 | |||
47 | db_file = d.getVar("CVE_CHECK_DB_FILE") | ||
48 | db_dir = os.path.dirname(db_file) | ||
49 | db_tmp_file = d.getVar("CVE_DB_TEMP_FILE") | ||
50 | |||
51 | cleanup_db_download(db_file, db_tmp_file) | ||
52 | # By default let's update the whole database (since time 0) | ||
53 | database_time = 0 | ||
54 | |||
55 | # The NVD database changes once a day, so no need to update more frequently | ||
56 | # Allow the user to force-update | ||
57 | try: | ||
58 | import time | ||
59 | update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL")) | ||
60 | if update_interval < 0: | ||
61 | bb.note("CVE database update skipped") | ||
62 | return | ||
63 | if time.time() - os.path.getmtime(db_file) < update_interval: | ||
64 | bb.note("CVE database recently updated, skipping") | ||
65 | return | ||
66 | database_time = os.path.getmtime(db_file) | ||
67 | |||
68 | except OSError: | ||
69 | pass | ||
70 | |||
71 | bb.utils.mkdirhier(db_dir) | ||
72 | if os.path.exists(db_file): | ||
73 | shutil.copy2(db_file, db_tmp_file) | ||
74 | |||
75 | if update_db_file(db_tmp_file, d, database_time) == True: | ||
76 | # Update downloaded correctly, can swap files | ||
77 | shutil.move(db_tmp_file, db_file) | ||
78 | else: | ||
79 | # Update failed, do not modify the database | ||
80 | bb.warn("CVE database update failed") | ||
81 | os.remove(db_tmp_file) | ||
82 | } | ||
83 | |||
84 | do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | ||
85 | do_fetch[file-checksums] = "" | ||
86 | do_fetch[vardeps] = "" | ||
87 | |||
88 | def cleanup_db_download(db_file, db_tmp_file): | ||
89 | """ | ||
90 | Cleanup the download space from possible failed downloads | ||
91 | """ | ||
92 | |||
93 | # Clean up the updates done on the main file | ||
94 | # Remove it only if a journal file exists - it means a complete re-download | ||
95 | if os.path.exists("{0}-journal".format(db_file)): | ||
96 | # If a journal is present the last update might have been interrupted. In that case, | ||
97 | # just wipe any leftovers and force the DB to be recreated. | ||
98 | os.remove("{0}-journal".format(db_file)) | ||
99 | |||
100 | if os.path.exists(db_file): | ||
101 | os.remove(db_file) | ||
102 | |||
103 | # Clean-up the temporary file downloads, we can remove both journal | ||
104 | # and the temporary database | ||
105 | if os.path.exists("{0}-journal".format(db_tmp_file)): | ||
106 | # If a journal is present the last update might have been interrupted. In that case, | ||
107 | # just wipe any leftovers and force the DB to be recreated. | ||
108 | os.remove("{0}-journal".format(db_tmp_file)) | ||
109 | |||
110 | if os.path.exists(db_tmp_file): | ||
111 | os.remove(db_tmp_file) | ||
112 | |||
113 | def nvd_request_next(url, api_key, args): | ||
114 | """ | ||
115 | Request next part of the NVD dabase | ||
116 | """ | ||
117 | |||
118 | import urllib.request | ||
119 | import urllib.parse | ||
120 | import gzip | ||
121 | |||
122 | headers = {} | ||
123 | if api_key: | ||
124 | headers['apiKey'] = api_key | ||
125 | |||
126 | data = urllib.parse.urlencode(args) | ||
127 | |||
128 | full_request = url + '?' + data | ||
129 | |||
130 | for attempt in range(3): | ||
131 | try: | ||
132 | r = urllib.request.urlopen(full_request) | ||
133 | |||
134 | if (r.headers['content-encoding'] == 'gzip'): | ||
135 | buf = r.read() | ||
136 | raw_data = gzip.decompress(buf) | ||
137 | else: | ||
138 | raw_data = r.read().decode("utf-8") | ||
139 | |||
140 | r.close() | ||
141 | |||
142 | except UnicodeDecodeError: | ||
143 | # Received garbage, retry | ||
144 | bb.debug(2, "CVE database: received malformed data, retrying (request: %s)" %(full_request)) | ||
145 | pass | ||
146 | except http.client.IncompleteRead: | ||
147 | # Read incomplete, let's try again | ||
148 | bb.debug(2, "CVE database: received incomplete data, retrying (request: %s)" %(full_request)) | ||
149 | pass | ||
150 | else: | ||
151 | return raw_data | ||
152 | else: | ||
153 | # We failed at all attempts | ||
154 | return None | ||
155 | |||
156 | def update_db_file(db_tmp_file, d, database_time): | ||
157 | """ | ||
158 | Update the given database file | ||
159 | """ | ||
160 | import bb.utils, bb.progress | ||
161 | import datetime | ||
162 | import sqlite3 | ||
163 | import json | ||
164 | |||
165 | # Connect to database | ||
166 | conn = sqlite3.connect(db_tmp_file) | ||
167 | initialize_db(conn) | ||
168 | |||
169 | req_args = {'startIndex' : 0} | ||
170 | |||
171 | # The maximum range for time is 120 days | ||
172 | # Force a complete update if our range is longer | ||
173 | if (database_time != 0): | ||
174 | database_date = datetime.datetime.combine(datetime.date.fromtimestamp(database_time), datetime.time()) | ||
175 | today_date = datetime.datetime.combine(datetime.date.today(), datetime.time()) | ||
176 | delta = today_date - database_date | ||
177 | if delta.days < 120: | ||
178 | bb.debug(2, "CVE database: performing partial update") | ||
179 | req_args['lastModStartDate'] = database_date.isoformat() | ||
180 | req_args['lastModEndDate'] = today_date.isoformat() | ||
181 | else: | ||
182 | bb.note("CVE database: file too old, forcing a full update") | ||
183 | |||
184 | with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: | ||
185 | |||
186 | bb.debug(2, "Updating entries") | ||
187 | index = 0 | ||
188 | url = d.getVar("NVDCVE_URL") | ||
189 | while True: | ||
190 | req_args['startIndex'] = index | ||
191 | raw_data = nvd_request_next(url, None, req_args) | ||
192 | if raw_data is None: | ||
193 | # We haven't managed to download data | ||
194 | return False | ||
195 | |||
196 | data = json.loads(raw_data) | ||
197 | |||
198 | index = data["startIndex"] | ||
199 | total = data["totalResults"] | ||
200 | per_page = data["resultsPerPage"] | ||
201 | |||
202 | for cve in data["vulnerabilities"]: | ||
203 | update_db(conn, cve) | ||
204 | |||
205 | index += per_page | ||
206 | ph.update((float(index) / (total+1)) * 100) | ||
207 | if index >= total: | ||
208 | break | ||
209 | |||
210 | # Recommended by NVD | ||
211 | time.sleep(6) | ||
212 | |||
213 | # Update success, set the date to cve_check file. | ||
214 | cve_f.write('CVE database update : %s\n\n' % datetime.date.today()) | ||
215 | |||
216 | conn.commit() | ||
217 | conn.close() | ||
218 | return True | ||
219 | |||
220 | def initialize_db(conn): | ||
221 | with conn: | ||
222 | c = conn.cursor() | ||
223 | |||
224 | c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)") | ||
225 | |||
226 | c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \ | ||
227 | SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)") | ||
228 | |||
229 | c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \ | ||
230 | VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \ | ||
231 | VERSION_END TEXT, OPERATOR_END TEXT)") | ||
232 | c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);") | ||
233 | |||
234 | c.close() | ||
235 | |||
236 | def parse_node_and_insert(conn, node, cveId): | ||
237 | |||
238 | def cpe_generator(): | ||
239 | for cpe in node.get('cpeMatch', ()): | ||
240 | if not cpe['vulnerable']: | ||
241 | return | ||
242 | cpe23 = cpe.get('criteria') | ||
243 | if not cpe23: | ||
244 | return | ||
245 | cpe23 = cpe23.split(':') | ||
246 | if len(cpe23) < 6: | ||
247 | return | ||
248 | vendor = cpe23[3] | ||
249 | product = cpe23[4] | ||
250 | version = cpe23[5] | ||
251 | |||
252 | if cpe23[6] == '*' or cpe23[6] == '-': | ||
253 | version_suffix = "" | ||
254 | else: | ||
255 | version_suffix = "_" + cpe23[6] | ||
256 | |||
257 | if version != '*' and version != '-': | ||
258 | # Version is defined, this is a '=' match | ||
259 | yield [cveId, vendor, product, version + version_suffix, '=', '', ''] | ||
260 | elif version == '-': | ||
261 | # no version information is available | ||
262 | yield [cveId, vendor, product, version, '', '', ''] | ||
263 | else: | ||
264 | # Parse start version, end version and operators | ||
265 | op_start = '' | ||
266 | op_end = '' | ||
267 | v_start = '' | ||
268 | v_end = '' | ||
269 | |||
270 | if 'versionStartIncluding' in cpe: | ||
271 | op_start = '>=' | ||
272 | v_start = cpe['versionStartIncluding'] | ||
273 | |||
274 | if 'versionStartExcluding' in cpe: | ||
275 | op_start = '>' | ||
276 | v_start = cpe['versionStartExcluding'] | ||
277 | |||
278 | if 'versionEndIncluding' in cpe: | ||
279 | op_end = '<=' | ||
280 | v_end = cpe['versionEndIncluding'] | ||
281 | |||
282 | if 'versionEndExcluding' in cpe: | ||
283 | op_end = '<' | ||
284 | v_end = cpe['versionEndExcluding'] | ||
285 | |||
286 | if op_start or op_end or v_start or v_end: | ||
287 | yield [cveId, vendor, product, v_start, op_start, v_end, op_end] | ||
288 | else: | ||
289 | # This is no version information, expressed differently. | ||
290 | # Save processing by representing as -. | ||
291 | yield [cveId, vendor, product, '-', '', '', ''] | ||
292 | |||
293 | conn.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()).close() | ||
294 | |||
295 | def update_db(conn, elt): | ||
296 | """ | ||
297 | Update a single entry in the on-disk database | ||
298 | """ | ||
299 | |||
300 | accessVector = None | ||
301 | cveId = elt['cve']['id'] | ||
302 | if elt['cve']['vulnStatus'] == "Rejected": | ||
303 | return | ||
304 | cveDesc = "" | ||
305 | for desc in elt['cve']['descriptions']: | ||
306 | if desc['lang'] == 'en': | ||
307 | cveDesc = desc['value'] | ||
308 | date = elt['cve']['lastModified'] | ||
309 | try: | ||
310 | accessVector = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['accessVector'] | ||
311 | cvssv2 = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['baseScore'] | ||
312 | except KeyError: | ||
313 | cvssv2 = 0.0 | ||
314 | try: | ||
315 | accessVector = accessVector or elt['impact']['baseMetricV3']['cvssV3']['attackVector'] | ||
316 | cvssv3 = elt['impact']['baseMetricV3']['cvssV3']['baseScore'] | ||
317 | except KeyError: | ||
318 | accessVector = accessVector or "UNKNOWN" | ||
319 | cvssv3 = 0.0 | ||
320 | |||
321 | conn.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)", | ||
322 | [cveId, cveDesc, cvssv2, cvssv3, date, accessVector]).close() | ||
323 | |||
324 | try: | ||
325 | configurations = elt['cve']['configurations'][0]['nodes'] | ||
326 | for config in configurations: | ||
327 | parse_node_and_insert(conn, config, cveId) | ||
328 | except KeyError: | ||
329 | bb.debug(2, "Entry without a configuration") | ||
330 | |||
331 | do_fetch[nostamp] = "1" | ||
332 | |||
333 | EXCLUDE_FROM_WORLD = "1" | ||