diff options
Diffstat (limited to 'meta/recipes-core/meta')
-rw-r--r-- | meta/recipes-core/meta/buildtools-tarball.bb | 2 | ||||
-rw-r--r-- | meta/recipes-core/meta/cve-update-db-native.bb | 181 | ||||
-rw-r--r-- | meta/recipes-core/meta/cve-update-nvd2-native.bb | 372 |
3 files changed, 502 insertions, 53 deletions
diff --git a/meta/recipes-core/meta/buildtools-tarball.bb b/meta/recipes-core/meta/buildtools-tarball.bb index faf7108a86..24f5f28589 100644 --- a/meta/recipes-core/meta/buildtools-tarball.bb +++ b/meta/recipes-core/meta/buildtools-tarball.bb | |||
@@ -66,7 +66,7 @@ create_sdk_files_append () { | |||
66 | # Generate new (mini) sdk-environment-setup file | 66 | # Generate new (mini) sdk-environment-setup file |
67 | script=${1:-${SDK_OUTPUT}/${SDKPATH}/environment-setup-${SDK_SYS}} | 67 | script=${1:-${SDK_OUTPUT}/${SDKPATH}/environment-setup-${SDK_SYS}} |
68 | touch $script | 68 | touch $script |
69 | echo 'export PATH=${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${sbindir_nativesdk}:${SDKPATHNATIVE}${base_bindir_nativesdk}:${SDKPATHNATIVE}${base_sbindir_nativesdk}:$PATH' >> $script | 69 | echo 'export PATH="${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${sbindir_nativesdk}:${SDKPATHNATIVE}${base_bindir_nativesdk}:${SDKPATHNATIVE}${base_sbindir_nativesdk}:$PATH"' >> $script |
70 | echo 'export OECORE_NATIVE_SYSROOT="${SDKPATHNATIVE}"' >> $script | 70 | echo 'export OECORE_NATIVE_SYSROOT="${SDKPATHNATIVE}"' >> $script |
71 | echo 'export GIT_SSL_CAINFO="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | 71 | echo 'export GIT_SSL_CAINFO="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script |
72 | echo 'export SSL_CERT_FILE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script | 72 | echo 'export SSL_CERT_FILE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script |
diff --git a/meta/recipes-core/meta/cve-update-db-native.bb b/meta/recipes-core/meta/cve-update-db-native.bb index 9e8e006a32..efc32470d3 100644 --- a/meta/recipes-core/meta/cve-update-db-native.bb +++ b/meta/recipes-core/meta/cve-update-db-native.bb | |||
@@ -12,28 +12,76 @@ deltask do_compile | |||
12 | deltask do_install | 12 | deltask do_install |
13 | deltask do_populate_sysroot | 13 | deltask do_populate_sysroot |
14 | 14 | ||
15 | # CVE database update interval, in seconds. By default: once a day (24*60*60). | ||
16 | # Use 0 to force the update | ||
17 | # Use a negative value to skip the update | ||
18 | CVE_DB_UPDATE_INTERVAL ?= "86400" | ||
19 | |||
20 | # Timeout for blocking socket operations, such as the connection attempt. | ||
21 | CVE_SOCKET_TIMEOUT ?= "60" | ||
22 | NVDCVE_URL ?= "https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-" | ||
23 | |||
24 | CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_1.1.db" | ||
25 | |||
15 | python () { | 26 | python () { |
16 | if not bb.data.inherits_class("cve-check", d): | 27 | if not bb.data.inherits_class("cve-check", d): |
17 | raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.") | 28 | raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.") |
18 | } | 29 | } |
19 | 30 | ||
20 | python do_populate_cve_db() { | 31 | python do_fetch() { |
21 | """ | 32 | """ |
22 | Update NVD database with json data feed | 33 | Update NVD database with json data feed |
23 | """ | 34 | """ |
24 | import bb.utils | 35 | import bb.utils |
25 | import bb.progress | 36 | import bb.progress |
26 | import sqlite3, urllib, urllib.parse, shutil, gzip | 37 | import shutil |
27 | from datetime import date | ||
28 | 38 | ||
29 | bb.utils.export_proxies(d) | 39 | bb.utils.export_proxies(d) |
30 | 40 | ||
31 | BASE_URL = "https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-" | ||
32 | YEAR_START = 2002 | ||
33 | |||
34 | db_file = d.getVar("CVE_CHECK_DB_FILE") | 41 | db_file = d.getVar("CVE_CHECK_DB_FILE") |
35 | db_dir = os.path.dirname(db_file) | 42 | db_dir = os.path.dirname(db_file) |
43 | db_tmp_file = d.getVar("CVE_DB_TEMP_FILE") | ||
44 | |||
45 | cleanup_db_download(db_file, db_tmp_file) | ||
46 | |||
47 | # The NVD database changes once a day, so no need to update more frequently | ||
48 | # Allow the user to force-update | ||
49 | try: | ||
50 | import time | ||
51 | update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL")) | ||
52 | if update_interval < 0: | ||
53 | bb.note("CVE database update skipped") | ||
54 | return | ||
55 | if time.time() - os.path.getmtime(db_file) < update_interval: | ||
56 | return | ||
36 | 57 | ||
58 | except OSError: | ||
59 | pass | ||
60 | |||
61 | bb.utils.mkdirhier(db_dir) | ||
62 | if os.path.exists(db_file): | ||
63 | shutil.copy2(db_file, db_tmp_file) | ||
64 | |||
65 | if update_db_file(db_tmp_file, d) == True: | ||
66 | # Update downloaded correctly, can swap files | ||
67 | shutil.move(db_tmp_file, db_file) | ||
68 | else: | ||
69 | # Update failed, do not modify the database | ||
70 | bb.note("CVE database update failed") | ||
71 | os.remove(db_tmp_file) | ||
72 | } | ||
73 | |||
74 | do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | ||
75 | do_fetch[file-checksums] = "" | ||
76 | do_fetch[vardeps] = "" | ||
77 | |||
78 | def cleanup_db_download(db_file, db_tmp_file): | ||
79 | """ | ||
80 | Cleanup the download space from possible failed downloads | ||
81 | """ | ||
82 | |||
83 | # Clean up the updates done on the main file | ||
84 | # Remove it only if a journal file exists - it means a complete re-download | ||
37 | if os.path.exists("{0}-journal".format(db_file)): | 85 | if os.path.exists("{0}-journal".format(db_file)): |
38 | # If a journal is present the last update might have been interrupted. In that case, | 86 | # If a journal is present the last update might have been interrupted. In that case, |
39 | # just wipe any leftovers and force the DB to be recreated. | 87 | # just wipe any leftovers and force the DB to be recreated. |
@@ -42,37 +90,50 @@ python do_populate_cve_db() { | |||
42 | if os.path.exists(db_file): | 90 | if os.path.exists(db_file): |
43 | os.remove(db_file) | 91 | os.remove(db_file) |
44 | 92 | ||
45 | # Don't refresh the database more than once an hour | 93 | # Clean-up the temporary file downloads, we can remove both journal |
46 | try: | 94 | # and the temporary database |
47 | import time | 95 | if os.path.exists("{0}-journal".format(db_tmp_file)): |
48 | if time.time() - os.path.getmtime(db_file) < (60*60): | 96 | # If a journal is present the last update might have been interrupted. In that case, |
49 | return | 97 | # just wipe any leftovers and force the DB to be recreated. |
50 | except OSError: | 98 | os.remove("{0}-journal".format(db_tmp_file)) |
51 | pass | ||
52 | 99 | ||
53 | bb.utils.mkdirhier(db_dir) | 100 | if os.path.exists(db_tmp_file): |
101 | os.remove(db_tmp_file) | ||
54 | 102 | ||
55 | # Connect to database | 103 | def update_db_file(db_tmp_file, d): |
56 | conn = sqlite3.connect(db_file) | 104 | """ |
57 | c = conn.cursor() | 105 | Update the given database file |
106 | """ | ||
107 | import bb.utils, bb.progress | ||
108 | from datetime import date | ||
109 | import urllib, gzip, sqlite3 | ||
58 | 110 | ||
59 | initialize_db(c) | 111 | YEAR_START = 2002 |
112 | cve_socket_timeout = int(d.getVar("CVE_SOCKET_TIMEOUT")) | ||
113 | |||
114 | # Connect to database | ||
115 | conn = sqlite3.connect(db_tmp_file) | ||
116 | initialize_db(conn) | ||
60 | 117 | ||
61 | with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: | 118 | with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: |
62 | total_years = date.today().year + 1 - YEAR_START | 119 | total_years = date.today().year + 1 - YEAR_START |
63 | for i, year in enumerate(range(YEAR_START, date.today().year + 1)): | 120 | for i, year in enumerate(range(YEAR_START, date.today().year + 1)): |
121 | bb.debug(2, "Updating %d" % year) | ||
64 | ph.update((float(i + 1) / total_years) * 100) | 122 | ph.update((float(i + 1) / total_years) * 100) |
65 | year_url = BASE_URL + str(year) | 123 | year_url = (d.getVar('NVDCVE_URL')) + str(year) |
66 | meta_url = year_url + ".meta" | 124 | meta_url = year_url + ".meta" |
67 | json_url = year_url + ".json.gz" | 125 | json_url = year_url + ".json.gz" |
68 | 126 | ||
69 | # Retrieve meta last modified date | 127 | # Retrieve meta last modified date |
70 | try: | 128 | try: |
71 | response = urllib.request.urlopen(meta_url) | 129 | response = urllib.request.urlopen(meta_url, timeout=cve_socket_timeout) |
72 | except urllib.error.URLError as e: | 130 | except urllib.error.URLError as e: |
73 | cve_f.write('Warning: CVE db update error, Unable to fetch CVE data.\n\n') | 131 | cve_f.write('Warning: CVE db update error, Unable to fetch CVE data.\n\n') |
74 | bb.warn("Failed to fetch CVE data (%s)" % e.reason) | 132 | bb.warn("Failed to fetch CVE data (%s)" % e) |
75 | return | 133 | import socket |
134 | result = socket.getaddrinfo("nvd.nist.gov", 443, proto=socket.IPPROTO_TCP) | ||
135 | bb.warn("Host IPs are %s" % (", ".join(t[4][0] for t in result))) | ||
136 | return False | ||
76 | 137 | ||
77 | if response: | 138 | if response: |
78 | for l in response.read().decode("utf-8").splitlines(): | 139 | for l in response.read().decode("utf-8").splitlines(): |
@@ -82,64 +143,81 @@ python do_populate_cve_db() { | |||
82 | break | 143 | break |
83 | else: | 144 | else: |
84 | bb.warn("Cannot parse CVE metadata, update failed") | 145 | bb.warn("Cannot parse CVE metadata, update failed") |
85 | return | 146 | return False |
86 | 147 | ||
87 | # Compare with current db last modified date | 148 | # Compare with current db last modified date |
88 | c.execute("select DATE from META where YEAR = ?", (year,)) | 149 | cursor = conn.execute("select DATE from META where YEAR = ?", (year,)) |
89 | meta = c.fetchone() | 150 | meta = cursor.fetchone() |
151 | cursor.close() | ||
152 | |||
90 | if not meta or meta[0] != last_modified: | 153 | if not meta or meta[0] != last_modified: |
154 | bb.debug(2, "Updating entries") | ||
91 | # Clear products table entries corresponding to current year | 155 | # Clear products table entries corresponding to current year |
92 | c.execute("delete from PRODUCTS where ID like ?", ('CVE-%d%%' % year,)) | 156 | conn.execute("delete from PRODUCTS where ID like ?", ('CVE-%d%%' % year,)).close() |
93 | 157 | ||
94 | # Update db with current year json file | 158 | # Update db with current year json file |
95 | try: | 159 | try: |
96 | response = urllib.request.urlopen(json_url) | 160 | response = urllib.request.urlopen(json_url, timeout=cve_socket_timeout) |
97 | if response: | 161 | if response: |
98 | update_db(c, gzip.decompress(response.read()).decode('utf-8')) | 162 | update_db(conn, gzip.decompress(response.read()).decode('utf-8')) |
99 | c.execute("insert or replace into META values (?, ?)", [year, last_modified]) | 163 | conn.execute("insert or replace into META values (?, ?)", [year, last_modified]).close() |
100 | except urllib.error.URLError as e: | 164 | except urllib.error.URLError as e: |
101 | cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n') | 165 | cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n') |
102 | bb.warn("Cannot parse CVE data (%s), update failed" % e.reason) | 166 | bb.warn("Cannot parse CVE data (%s), update failed" % e.reason) |
103 | return | 167 | return False |
104 | 168 | else: | |
169 | bb.debug(2, "Already up to date (last modified %s)" % last_modified) | ||
105 | # Update success, set the date to cve_check file. | 170 | # Update success, set the date to cve_check file. |
106 | if year == date.today().year: | 171 | if year == date.today().year: |
107 | cve_f.write('CVE database update : %s\n\n' % date.today()) | 172 | cve_f.write('CVE database update : %s\n\n' % date.today()) |
108 | 173 | ||
109 | conn.commit() | 174 | conn.commit() |
110 | conn.close() | 175 | conn.close() |
111 | } | 176 | return True |
112 | 177 | ||
113 | do_populate_cve_db[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | 178 | def initialize_db(conn): |
179 | with conn: | ||
180 | c = conn.cursor() | ||
114 | 181 | ||
115 | def initialize_db(c): | 182 | c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)") |
116 | c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)") | ||
117 | 183 | ||
118 | c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \ | 184 | c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \ |
119 | SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)") | 185 | SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)") |
120 | 186 | ||
121 | c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \ | 187 | c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \ |
122 | VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \ | 188 | VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \ |
123 | VERSION_END TEXT, OPERATOR_END TEXT)") | 189 | VERSION_END TEXT, OPERATOR_END TEXT)") |
124 | c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);") | 190 | c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);") |
125 | 191 | ||
126 | def parse_node_and_insert(c, node, cveId): | 192 | c.close() |
193 | |||
194 | def parse_node_and_insert(conn, node, cveId): | ||
127 | # Parse children node if needed | 195 | # Parse children node if needed |
128 | for child in node.get('children', ()): | 196 | for child in node.get('children', ()): |
129 | parse_node_and_insert(c, child, cveId) | 197 | parse_node_and_insert(conn, child, cveId) |
130 | 198 | ||
131 | def cpe_generator(): | 199 | def cpe_generator(): |
132 | for cpe in node.get('cpe_match', ()): | 200 | for cpe in node.get('cpe_match', ()): |
133 | if not cpe['vulnerable']: | 201 | if not cpe['vulnerable']: |
134 | return | 202 | return |
135 | cpe23 = cpe['cpe23Uri'].split(':') | 203 | cpe23 = cpe.get('cpe23Uri') |
204 | if not cpe23: | ||
205 | return | ||
206 | cpe23 = cpe23.split(':') | ||
207 | if len(cpe23) < 6: | ||
208 | return | ||
136 | vendor = cpe23[3] | 209 | vendor = cpe23[3] |
137 | product = cpe23[4] | 210 | product = cpe23[4] |
138 | version = cpe23[5] | 211 | version = cpe23[5] |
139 | 212 | ||
213 | if cpe23[6] == '*' or cpe23[6] == '-': | ||
214 | version_suffix = "" | ||
215 | else: | ||
216 | version_suffix = "_" + cpe23[6] | ||
217 | |||
140 | if version != '*' and version != '-': | 218 | if version != '*' and version != '-': |
141 | # Version is defined, this is a '=' match | 219 | # Version is defined, this is a '=' match |
142 | yield [cveId, vendor, product, version, '=', '', ''] | 220 | yield [cveId, vendor, product, version + version_suffix, '=', '', ''] |
143 | elif version == '-': | 221 | elif version == '-': |
144 | # no version information is available | 222 | # no version information is available |
145 | yield [cveId, vendor, product, version, '', '', ''] | 223 | yield [cveId, vendor, product, version, '', '', ''] |
@@ -173,9 +251,9 @@ def parse_node_and_insert(c, node, cveId): | |||
173 | # Save processing by representing as -. | 251 | # Save processing by representing as -. |
174 | yield [cveId, vendor, product, '-', '', '', ''] | 252 | yield [cveId, vendor, product, '-', '', '', ''] |
175 | 253 | ||
176 | c.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()) | 254 | conn.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()).close() |
177 | 255 | ||
178 | def update_db(c, jsondata): | 256 | def update_db(conn, jsondata): |
179 | import json | 257 | import json |
180 | root = json.loads(jsondata) | 258 | root = json.loads(jsondata) |
181 | 259 | ||
@@ -199,15 +277,14 @@ def update_db(c, jsondata): | |||
199 | accessVector = accessVector or "UNKNOWN" | 277 | accessVector = accessVector or "UNKNOWN" |
200 | cvssv3 = 0.0 | 278 | cvssv3 = 0.0 |
201 | 279 | ||
202 | c.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)", | 280 | conn.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)", |
203 | [cveId, cveDesc, cvssv2, cvssv3, date, accessVector]) | 281 | [cveId, cveDesc, cvssv2, cvssv3, date, accessVector]).close() |
204 | 282 | ||
205 | configurations = elt['configurations']['nodes'] | 283 | configurations = elt['configurations']['nodes'] |
206 | for config in configurations: | 284 | for config in configurations: |
207 | parse_node_and_insert(c, config, cveId) | 285 | parse_node_and_insert(conn, config, cveId) |
208 | 286 | ||
209 | 287 | ||
210 | addtask do_populate_cve_db before do_fetch | 288 | do_fetch[nostamp] = "1" |
211 | do_populate_cve_db[nostamp] = "1" | ||
212 | 289 | ||
213 | EXCLUDE_FROM_WORLD = "1" | 290 | EXCLUDE_FROM_WORLD = "1" |
diff --git a/meta/recipes-core/meta/cve-update-nvd2-native.bb b/meta/recipes-core/meta/cve-update-nvd2-native.bb new file mode 100644 index 0000000000..1a3eeba6d0 --- /dev/null +++ b/meta/recipes-core/meta/cve-update-nvd2-native.bb | |||
@@ -0,0 +1,372 @@ | |||
1 | SUMMARY = "Updates the NVD CVE database" | ||
2 | LICENSE = "MIT" | ||
3 | |||
4 | # Important note: | ||
5 | # This product uses the NVD API but is not endorsed or certified by the NVD. | ||
6 | |||
7 | INHIBIT_DEFAULT_DEPS = "1" | ||
8 | |||
9 | inherit native | ||
10 | |||
11 | deltask do_unpack | ||
12 | deltask do_patch | ||
13 | deltask do_configure | ||
14 | deltask do_compile | ||
15 | deltask do_install | ||
16 | deltask do_populate_sysroot | ||
17 | |||
18 | NVDCVE_URL ?= "https://services.nvd.nist.gov/rest/json/cves/2.0" | ||
19 | |||
20 | # If you have a NVD API key (https://nvd.nist.gov/developers/request-an-api-key) | ||
21 | # then setting this to get higher rate limits. | ||
22 | NVDCVE_API_KEY ?= "" | ||
23 | |||
24 | # CVE database update interval, in seconds. By default: once a day (24*60*60). | ||
25 | # Use 0 to force the update | ||
26 | # Use a negative value to skip the update | ||
27 | CVE_DB_UPDATE_INTERVAL ?= "86400" | ||
28 | |||
29 | # CVE database incremental update age threshold, in seconds. If the database is | ||
30 | # older than this threshold, do a full re-download, else, do an incremental | ||
31 | # update. By default: the maximum allowed value from NVD: 120 days (120*24*60*60) | ||
32 | # Use 0 to force a full download. | ||
33 | CVE_DB_INCR_UPDATE_AGE_THRES ?= "10368000" | ||
34 | |||
35 | # Number of attempts for each http query to nvd server before giving up | ||
36 | CVE_DB_UPDATE_ATTEMPTS ?= "5" | ||
37 | |||
38 | CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_2.db" | ||
39 | |||
40 | python () { | ||
41 | if not bb.data.inherits_class("cve-check", d): | ||
42 | raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.") | ||
43 | } | ||
44 | |||
45 | python do_fetch() { | ||
46 | """ | ||
47 | Update NVD database with API 2.0 | ||
48 | """ | ||
49 | import bb.utils | ||
50 | import bb.progress | ||
51 | import shutil | ||
52 | |||
53 | bb.utils.export_proxies(d) | ||
54 | |||
55 | db_file = d.getVar("CVE_CHECK_DB_FILE") | ||
56 | db_dir = os.path.dirname(db_file) | ||
57 | db_tmp_file = d.getVar("CVE_DB_TEMP_FILE") | ||
58 | |||
59 | cleanup_db_download(db_file, db_tmp_file) | ||
60 | # By default let's update the whole database (since time 0) | ||
61 | database_time = 0 | ||
62 | |||
63 | # The NVD database changes once a day, so no need to update more frequently | ||
64 | # Allow the user to force-update | ||
65 | try: | ||
66 | import time | ||
67 | update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL")) | ||
68 | if update_interval < 0: | ||
69 | bb.note("CVE database update skipped") | ||
70 | return | ||
71 | if time.time() - os.path.getmtime(db_file) < update_interval: | ||
72 | bb.note("CVE database recently updated, skipping") | ||
73 | return | ||
74 | database_time = os.path.getmtime(db_file) | ||
75 | |||
76 | except OSError: | ||
77 | pass | ||
78 | |||
79 | bb.utils.mkdirhier(db_dir) | ||
80 | if os.path.exists(db_file): | ||
81 | shutil.copy2(db_file, db_tmp_file) | ||
82 | |||
83 | if update_db_file(db_tmp_file, d, database_time) == True: | ||
84 | # Update downloaded correctly, can swap files | ||
85 | shutil.move(db_tmp_file, db_file) | ||
86 | else: | ||
87 | # Update failed, do not modify the database | ||
88 | bb.warn("CVE database update failed") | ||
89 | os.remove(db_tmp_file) | ||
90 | } | ||
91 | |||
92 | do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" | ||
93 | do_fetch[file-checksums] = "" | ||
94 | do_fetch[vardeps] = "" | ||
95 | |||
96 | def cleanup_db_download(db_file, db_tmp_file): | ||
97 | """ | ||
98 | Cleanup the download space from possible failed downloads | ||
99 | """ | ||
100 | |||
101 | # Clean up the updates done on the main file | ||
102 | # Remove it only if a journal file exists - it means a complete re-download | ||
103 | if os.path.exists("{0}-journal".format(db_file)): | ||
104 | # If a journal is present the last update might have been interrupted. In that case, | ||
105 | # just wipe any leftovers and force the DB to be recreated. | ||
106 | os.remove("{0}-journal".format(db_file)) | ||
107 | |||
108 | if os.path.exists(db_file): | ||
109 | os.remove(db_file) | ||
110 | |||
111 | # Clean-up the temporary file downloads, we can remove both journal | ||
112 | # and the temporary database | ||
113 | if os.path.exists("{0}-journal".format(db_tmp_file)): | ||
114 | # If a journal is present the last update might have been interrupted. In that case, | ||
115 | # just wipe any leftovers and force the DB to be recreated. | ||
116 | os.remove("{0}-journal".format(db_tmp_file)) | ||
117 | |||
118 | if os.path.exists(db_tmp_file): | ||
119 | os.remove(db_tmp_file) | ||
120 | |||
121 | def nvd_request_wait(attempt, min_wait): | ||
122 | return min ( ( (2 * attempt) + min_wait ) , 30) | ||
123 | |||
124 | def nvd_request_next(url, attempts, api_key, args, min_wait): | ||
125 | """ | ||
126 | Request next part of the NVD database | ||
127 | NVD API documentation: https://nvd.nist.gov/developers/vulnerabilities | ||
128 | """ | ||
129 | |||
130 | import urllib.request | ||
131 | import urllib.parse | ||
132 | import gzip | ||
133 | import http | ||
134 | import time | ||
135 | |||
136 | request = urllib.request.Request(url + "?" + urllib.parse.urlencode(args)) | ||
137 | if api_key: | ||
138 | request.add_header("apiKey", api_key) | ||
139 | bb.note("Requesting %s" % request.full_url) | ||
140 | |||
141 | for attempt in range(attempts): | ||
142 | try: | ||
143 | r = urllib.request.urlopen(request) | ||
144 | |||
145 | if (r.headers['content-encoding'] == 'gzip'): | ||
146 | buf = r.read() | ||
147 | raw_data = gzip.decompress(buf).decode("utf-8") | ||
148 | else: | ||
149 | raw_data = r.read().decode("utf-8") | ||
150 | |||
151 | r.close() | ||
152 | |||
153 | except Exception as e: | ||
154 | wait_time = nvd_request_wait(attempt, min_wait) | ||
155 | bb.note("CVE database: received error (%s)" % (e)) | ||
156 | bb.note("CVE database: retrying download after %d seconds. attempted (%d/%d)" % (wait_time, attempt+1, attempts)) | ||
157 | time.sleep(wait_time) | ||
158 | pass | ||
159 | else: | ||
160 | return raw_data | ||
161 | else: | ||
162 | # We failed at all attempts | ||
163 | return None | ||
164 | |||
165 | def update_db_file(db_tmp_file, d, database_time): | ||
166 | """ | ||
167 | Update the given database file | ||
168 | """ | ||
169 | import bb.utils, bb.progress | ||
170 | import datetime | ||
171 | import sqlite3 | ||
172 | import json | ||
173 | |||
174 | # Connect to database | ||
175 | conn = sqlite3.connect(db_tmp_file) | ||
176 | initialize_db(conn) | ||
177 | |||
178 | req_args = {'startIndex' : 0} | ||
179 | |||
180 | incr_update_threshold = int(d.getVar("CVE_DB_INCR_UPDATE_AGE_THRES")) | ||
181 | if database_time != 0: | ||
182 | database_date = datetime.datetime.fromtimestamp(database_time, tz=datetime.timezone.utc) | ||
183 | today_date = datetime.datetime.now(tz=datetime.timezone.utc) | ||
184 | delta = today_date - database_date | ||
185 | if incr_update_threshold == 0: | ||
186 | bb.note("CVE database: forced full update") | ||
187 | elif delta < datetime.timedelta(seconds=incr_update_threshold): | ||
188 | bb.note("CVE database: performing partial update") | ||
189 | # The maximum range for time is 120 days | ||
190 | if delta > datetime.timedelta(days=120): | ||
191 | bb.error("CVE database: Trying to do an incremental update on a larger than supported range") | ||
192 | req_args['lastModStartDate'] = database_date.isoformat() | ||
193 | req_args['lastModEndDate'] = today_date.isoformat() | ||
194 | else: | ||
195 | bb.note("CVE database: file too old, forcing a full update") | ||
196 | else: | ||
197 | bb.note("CVE database: no preexisting database, do a full download") | ||
198 | |||
199 | with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: | ||
200 | |||
201 | bb.note("Updating entries") | ||
202 | index = 0 | ||
203 | url = d.getVar("NVDCVE_URL") | ||
204 | api_key = d.getVar("NVDCVE_API_KEY") or None | ||
205 | attempts = int(d.getVar("CVE_DB_UPDATE_ATTEMPTS")) | ||
206 | |||
207 | # Recommended by NVD | ||
208 | wait_time = 6 | ||
209 | if api_key: | ||
210 | wait_time = 2 | ||
211 | |||
212 | while True: | ||
213 | req_args['startIndex'] = index | ||
214 | raw_data = nvd_request_next(url, attempts, api_key, req_args, wait_time) | ||
215 | if raw_data is None: | ||
216 | # We haven't managed to download data | ||
217 | return False | ||
218 | |||
219 | data = json.loads(raw_data) | ||
220 | |||
221 | index = data["startIndex"] | ||
222 | total = data["totalResults"] | ||
223 | per_page = data["resultsPerPage"] | ||
224 | bb.note("Got %d entries" % per_page) | ||
225 | for cve in data["vulnerabilities"]: | ||
226 | update_db(conn, cve) | ||
227 | |||
228 | index += per_page | ||
229 | ph.update((float(index) / (total+1)) * 100) | ||
230 | if index >= total: | ||
231 | break | ||
232 | |||
233 | # Recommended by NVD | ||
234 | time.sleep(wait_time) | ||
235 | |||
236 | # Update success, set the date to cve_check file. | ||
237 | cve_f.write('CVE database update : %s\n\n' % datetime.date.today()) | ||
238 | |||
239 | conn.commit() | ||
240 | conn.close() | ||
241 | return True | ||
242 | |||
243 | def initialize_db(conn): | ||
244 | with conn: | ||
245 | c = conn.cursor() | ||
246 | |||
247 | c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)") | ||
248 | |||
249 | c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \ | ||
250 | SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)") | ||
251 | |||
252 | c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \ | ||
253 | VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \ | ||
254 | VERSION_END TEXT, OPERATOR_END TEXT)") | ||
255 | c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);") | ||
256 | |||
257 | c.close() | ||
258 | |||
259 | def parse_node_and_insert(conn, node, cveId): | ||
260 | |||
261 | def cpe_generator(): | ||
262 | for cpe in node.get('cpeMatch', ()): | ||
263 | if not cpe['vulnerable']: | ||
264 | return | ||
265 | cpe23 = cpe.get('criteria') | ||
266 | if not cpe23: | ||
267 | return | ||
268 | cpe23 = cpe23.split(':') | ||
269 | if len(cpe23) < 6: | ||
270 | return | ||
271 | vendor = cpe23[3] | ||
272 | product = cpe23[4] | ||
273 | version = cpe23[5] | ||
274 | |||
275 | if cpe23[6] == '*' or cpe23[6] == '-': | ||
276 | version_suffix = "" | ||
277 | else: | ||
278 | version_suffix = "_" + cpe23[6] | ||
279 | |||
280 | if version != '*' and version != '-': | ||
281 | # Version is defined, this is a '=' match | ||
282 | yield [cveId, vendor, product, version + version_suffix, '=', '', ''] | ||
283 | elif version == '-': | ||
284 | # no version information is available | ||
285 | yield [cveId, vendor, product, version, '', '', ''] | ||
286 | else: | ||
287 | # Parse start version, end version and operators | ||
288 | op_start = '' | ||
289 | op_end = '' | ||
290 | v_start = '' | ||
291 | v_end = '' | ||
292 | |||
293 | if 'versionStartIncluding' in cpe: | ||
294 | op_start = '>=' | ||
295 | v_start = cpe['versionStartIncluding'] | ||
296 | |||
297 | if 'versionStartExcluding' in cpe: | ||
298 | op_start = '>' | ||
299 | v_start = cpe['versionStartExcluding'] | ||
300 | |||
301 | if 'versionEndIncluding' in cpe: | ||
302 | op_end = '<=' | ||
303 | v_end = cpe['versionEndIncluding'] | ||
304 | |||
305 | if 'versionEndExcluding' in cpe: | ||
306 | op_end = '<' | ||
307 | v_end = cpe['versionEndExcluding'] | ||
308 | |||
309 | if op_start or op_end or v_start or v_end: | ||
310 | yield [cveId, vendor, product, v_start, op_start, v_end, op_end] | ||
311 | else: | ||
312 | # This is no version information, expressed differently. | ||
313 | # Save processing by representing as -. | ||
314 | yield [cveId, vendor, product, '-', '', '', ''] | ||
315 | |||
316 | conn.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()).close() | ||
317 | |||
318 | def update_db(conn, elt): | ||
319 | """ | ||
320 | Update a single entry in the on-disk database | ||
321 | """ | ||
322 | |||
323 | accessVector = None | ||
324 | cveId = elt['cve']['id'] | ||
325 | if elt['cve']['vulnStatus'] == "Rejected": | ||
326 | c = conn.cursor() | ||
327 | c.execute("delete from PRODUCTS where ID = ?;", [cveId]) | ||
328 | c.execute("delete from NVD where ID = ?;", [cveId]) | ||
329 | c.close() | ||
330 | return | ||
331 | cveDesc = "" | ||
332 | for desc in elt['cve']['descriptions']: | ||
333 | if desc['lang'] == 'en': | ||
334 | cveDesc = desc['value'] | ||
335 | date = elt['cve']['lastModified'] | ||
336 | try: | ||
337 | accessVector = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['accessVector'] | ||
338 | cvssv2 = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['baseScore'] | ||
339 | except KeyError: | ||
340 | cvssv2 = 0.0 | ||
341 | cvssv3 = None | ||
342 | try: | ||
343 | accessVector = accessVector or elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['attackVector'] | ||
344 | cvssv3 = elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['baseScore'] | ||
345 | except KeyError: | ||
346 | pass | ||
347 | try: | ||
348 | accessVector = accessVector or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['attackVector'] | ||
349 | cvssv3 = cvssv3 or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['baseScore'] | ||
350 | except KeyError: | ||
351 | pass | ||
352 | accessVector = accessVector or "UNKNOWN" | ||
353 | cvssv3 = cvssv3 or 0.0 | ||
354 | |||
355 | conn.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)", | ||
356 | [cveId, cveDesc, cvssv2, cvssv3, date, accessVector]).close() | ||
357 | |||
358 | try: | ||
359 | # Remove any pre-existing CVE configuration. Even for partial database | ||
360 | # update, those will be repopulated. This ensures that old | ||
361 | # configuration is not kept for an updated CVE. | ||
362 | conn.execute("delete from PRODUCTS where ID = ?", [cveId]).close() | ||
363 | for config in elt['cve']['configurations']: | ||
364 | # This is suboptimal as it doesn't handle AND/OR and negate, but is better than nothing | ||
365 | for node in config["nodes"]: | ||
366 | parse_node_and_insert(conn, node, cveId) | ||
367 | except KeyError: | ||
368 | bb.note("CVE %s has no configurations" % cveId) | ||
369 | |||
370 | do_fetch[nostamp] = "1" | ||
371 | |||
372 | EXCLUDE_FROM_WORLD = "1" | ||