diff options
| -rw-r--r-- | meta/recipes-core/meta/cve-update-db-native.bb | 17 | ||||
| -rw-r--r-- | meta/recipes-core/meta/cve-update-nvd2-native.bb | 17 |
2 files changed, 4 insertions, 30 deletions
diff --git a/meta/recipes-core/meta/cve-update-db-native.bb b/meta/recipes-core/meta/cve-update-db-native.bb index ecdb1ed8fd..43cafb52b1 100644 --- a/meta/recipes-core/meta/cve-update-db-native.bb +++ b/meta/recipes-core/meta/cve-update-db-native.bb | |||
| @@ -45,7 +45,7 @@ python do_fetch() { | |||
| 45 | db_dir = os.path.dirname(db_file) | 45 | db_dir = os.path.dirname(db_file) |
| 46 | db_tmp_file = d.getVar("CVE_CHECK_DB_TEMP_FILE") | 46 | db_tmp_file = d.getVar("CVE_CHECK_DB_TEMP_FILE") |
| 47 | 47 | ||
| 48 | cleanup_db_download(db_file, db_tmp_file) | 48 | cleanup_db_download(db_tmp_file) |
| 49 | 49 | ||
| 50 | # The NVD database changes once a day, so no need to update more frequently | 50 | # The NVD database changes once a day, so no need to update more frequently |
| 51 | # Allow the user to force-update | 51 | # Allow the user to force-update |
| @@ -91,28 +91,15 @@ python do_unpack() { | |||
| 91 | } | 91 | } |
| 92 | do_unpack[lockfiles] += "${CVE_CHECK_DB_DLDIR_LOCK} ${CVE_CHECK_DB_FILE_LOCK}" | 92 | do_unpack[lockfiles] += "${CVE_CHECK_DB_DLDIR_LOCK} ${CVE_CHECK_DB_FILE_LOCK}" |
| 93 | 93 | ||
| 94 | def cleanup_db_download(db_file, db_tmp_file): | 94 | def cleanup_db_download(db_tmp_file): |
| 95 | """ | 95 | """ |
| 96 | Cleanup the download space from possible failed downloads | 96 | Cleanup the download space from possible failed downloads |
| 97 | """ | 97 | """ |
| 98 | 98 | ||
| 99 | # Clean up the updates done on the main file | ||
| 100 | # Remove it only if a journal file exists - it means a complete re-download | ||
| 101 | if os.path.exists("{0}-journal".format(db_file)): | ||
| 102 | # If a journal is present the last update might have been interrupted. In that case, | ||
| 103 | # just wipe any leftovers and force the DB to be recreated. | ||
| 104 | os.remove("{0}-journal".format(db_file)) | ||
| 105 | |||
| 106 | if os.path.exists(db_file): | ||
| 107 | os.remove(db_file) | ||
| 108 | |||
| 109 | # Clean-up the temporary file downloads, we can remove both journal | 99 | # Clean-up the temporary file downloads, we can remove both journal |
| 110 | # and the temporary database | 100 | # and the temporary database |
| 111 | if os.path.exists("{0}-journal".format(db_tmp_file)): | 101 | if os.path.exists("{0}-journal".format(db_tmp_file)): |
| 112 | # If a journal is present the last update might have been interrupted. In that case, | ||
| 113 | # just wipe any leftovers and force the DB to be recreated. | ||
| 114 | os.remove("{0}-journal".format(db_tmp_file)) | 102 | os.remove("{0}-journal".format(db_tmp_file)) |
| 115 | |||
| 116 | if os.path.exists(db_tmp_file): | 103 | if os.path.exists(db_tmp_file): |
| 117 | os.remove(db_tmp_file) | 104 | os.remove(db_tmp_file) |
| 118 | 105 | ||
diff --git a/meta/recipes-core/meta/cve-update-nvd2-native.bb b/meta/recipes-core/meta/cve-update-nvd2-native.bb index 83876c7467..f7a306c995 100644 --- a/meta/recipes-core/meta/cve-update-nvd2-native.bb +++ b/meta/recipes-core/meta/cve-update-nvd2-native.bb | |||
| @@ -57,7 +57,7 @@ python do_fetch() { | |||
| 57 | db_dir = os.path.dirname(db_file) | 57 | db_dir = os.path.dirname(db_file) |
| 58 | db_tmp_file = d.getVar("CVE_CHECK_DB_TEMP_FILE") | 58 | db_tmp_file = d.getVar("CVE_CHECK_DB_TEMP_FILE") |
| 59 | 59 | ||
| 60 | cleanup_db_download(db_file, db_tmp_file) | 60 | cleanup_db_download(db_tmp_file) |
| 61 | # By default let's update the whole database (since time 0) | 61 | # By default let's update the whole database (since time 0) |
| 62 | database_time = 0 | 62 | database_time = 0 |
| 63 | 63 | ||
| @@ -106,28 +106,15 @@ python do_unpack() { | |||
| 106 | } | 106 | } |
| 107 | do_unpack[lockfiles] += "${CVE_CHECK_DB_DLDIR_LOCK} ${CVE_CHECK_DB_FILE_LOCK}" | 107 | do_unpack[lockfiles] += "${CVE_CHECK_DB_DLDIR_LOCK} ${CVE_CHECK_DB_FILE_LOCK}" |
| 108 | 108 | ||
| 109 | def cleanup_db_download(db_file, db_tmp_file): | 109 | def cleanup_db_download(db_tmp_file): |
| 110 | """ | 110 | """ |
| 111 | Cleanup the download space from possible failed downloads | 111 | Cleanup the download space from possible failed downloads |
| 112 | """ | 112 | """ |
| 113 | 113 | ||
| 114 | # Clean up the updates done on the main file | ||
| 115 | # Remove it only if a journal file exists - it means a complete re-download | ||
| 116 | if os.path.exists("{0}-journal".format(db_file)): | ||
| 117 | # If a journal is present the last update might have been interrupted. In that case, | ||
| 118 | # just wipe any leftovers and force the DB to be recreated. | ||
| 119 | os.remove("{0}-journal".format(db_file)) | ||
| 120 | |||
| 121 | if os.path.exists(db_file): | ||
| 122 | os.remove(db_file) | ||
| 123 | |||
| 124 | # Clean-up the temporary file downloads, we can remove both journal | 114 | # Clean-up the temporary file downloads, we can remove both journal |
| 125 | # and the temporary database | 115 | # and the temporary database |
| 126 | if os.path.exists("{0}-journal".format(db_tmp_file)): | 116 | if os.path.exists("{0}-journal".format(db_tmp_file)): |
| 127 | # If a journal is present the last update might have been interrupted. In that case, | ||
| 128 | # just wipe any leftovers and force the DB to be recreated. | ||
| 129 | os.remove("{0}-journal".format(db_tmp_file)) | 117 | os.remove("{0}-journal".format(db_tmp_file)) |
| 130 | |||
| 131 | if os.path.exists(db_tmp_file): | 118 | if os.path.exists(db_tmp_file): |
| 132 | os.remove(db_tmp_file) | 119 | os.remove(db_tmp_file) |
| 133 | 120 | ||
