diff options
| author | Joshua Watt <JPEWhacker@gmail.com> | 2023-11-03 08:26:25 -0600 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2023-11-09 17:33:02 +0000 |
| commit | baa3e5391daf41b6dd6e914a112abb00d3517da1 (patch) | |
| tree | 8280b21caf78db2783f586250776c4813f24cb87 /bitbake/lib/hashserv/sqlite.py | |
| parent | e90fccfefd7693d8cdfa731fa7e170c8bd4b1a1b (diff) | |
| download | poky-baa3e5391daf41b6dd6e914a112abb00d3517da1.tar.gz | |
bitbake: hashserv: Abstract database
Abstracts the way the database backend is accessed by the hash
equivalence server to make it possible to use other backends
(Bitbake rev: 04b53deacf857488408bc82b9890b1e19874b5f1)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/hashserv/sqlite.py')
| -rw-r--r-- | bitbake/lib/hashserv/sqlite.py | 259 |
1 files changed, 259 insertions, 0 deletions
diff --git a/bitbake/lib/hashserv/sqlite.py b/bitbake/lib/hashserv/sqlite.py new file mode 100644 index 0000000000..6809c53706 --- /dev/null +++ b/bitbake/lib/hashserv/sqlite.py | |||
| @@ -0,0 +1,259 @@ | |||
| 1 | #! /usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Copyright (C) 2023 Garmin Ltd. | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | import sqlite3 | ||
| 8 | import logging | ||
| 9 | from contextlib import closing | ||
| 10 | |||
| 11 | logger = logging.getLogger("hashserv.sqlite") | ||
| 12 | |||
| 13 | UNIHASH_TABLE_DEFINITION = ( | ||
| 14 | ("method", "TEXT NOT NULL", "UNIQUE"), | ||
| 15 | ("taskhash", "TEXT NOT NULL", "UNIQUE"), | ||
| 16 | ("unihash", "TEXT NOT NULL", ""), | ||
| 17 | ) | ||
| 18 | |||
| 19 | UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION) | ||
| 20 | |||
| 21 | OUTHASH_TABLE_DEFINITION = ( | ||
| 22 | ("method", "TEXT NOT NULL", "UNIQUE"), | ||
| 23 | ("taskhash", "TEXT NOT NULL", "UNIQUE"), | ||
| 24 | ("outhash", "TEXT NOT NULL", "UNIQUE"), | ||
| 25 | ("created", "DATETIME", ""), | ||
| 26 | # Optional fields | ||
| 27 | ("owner", "TEXT", ""), | ||
| 28 | ("PN", "TEXT", ""), | ||
| 29 | ("PV", "TEXT", ""), | ||
| 30 | ("PR", "TEXT", ""), | ||
| 31 | ("task", "TEXT", ""), | ||
| 32 | ("outhash_siginfo", "TEXT", ""), | ||
| 33 | ) | ||
| 34 | |||
| 35 | OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION) | ||
| 36 | |||
| 37 | |||
| 38 | def _make_table(cursor, name, definition): | ||
| 39 | cursor.execute( | ||
| 40 | """ | ||
| 41 | CREATE TABLE IF NOT EXISTS {name} ( | ||
| 42 | id INTEGER PRIMARY KEY AUTOINCREMENT, | ||
| 43 | {fields} | ||
| 44 | UNIQUE({unique}) | ||
| 45 | ) | ||
| 46 | """.format( | ||
| 47 | name=name, | ||
| 48 | fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition), | ||
| 49 | unique=", ".join( | ||
| 50 | name for name, _, flags in definition if "UNIQUE" in flags | ||
| 51 | ), | ||
| 52 | ) | ||
| 53 | ) | ||
| 54 | |||
| 55 | |||
| 56 | class DatabaseEngine(object): | ||
| 57 | def __init__(self, dbname, sync): | ||
| 58 | self.dbname = dbname | ||
| 59 | self.logger = logger | ||
| 60 | self.sync = sync | ||
| 61 | |||
| 62 | async def create(self): | ||
| 63 | db = sqlite3.connect(self.dbname) | ||
| 64 | db.row_factory = sqlite3.Row | ||
| 65 | |||
| 66 | with closing(db.cursor()) as cursor: | ||
| 67 | _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION) | ||
| 68 | _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION) | ||
| 69 | |||
| 70 | cursor.execute("PRAGMA journal_mode = WAL") | ||
| 71 | cursor.execute( | ||
| 72 | "PRAGMA synchronous = %s" % ("NORMAL" if self.sync else "OFF") | ||
| 73 | ) | ||
| 74 | |||
| 75 | # Drop old indexes | ||
| 76 | cursor.execute("DROP INDEX IF EXISTS taskhash_lookup") | ||
| 77 | cursor.execute("DROP INDEX IF EXISTS outhash_lookup") | ||
| 78 | cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2") | ||
| 79 | cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2") | ||
| 80 | |||
| 81 | # TODO: Upgrade from tasks_v2? | ||
| 82 | cursor.execute("DROP TABLE IF EXISTS tasks_v2") | ||
| 83 | |||
| 84 | # Create new indexes | ||
| 85 | cursor.execute( | ||
| 86 | "CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)" | ||
| 87 | ) | ||
| 88 | cursor.execute( | ||
| 89 | "CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)" | ||
| 90 | ) | ||
| 91 | |||
| 92 | def connect(self, logger): | ||
| 93 | return Database(logger, self.dbname) | ||
| 94 | |||
| 95 | |||
| 96 | class Database(object): | ||
| 97 | def __init__(self, logger, dbname, sync=True): | ||
| 98 | self.dbname = dbname | ||
| 99 | self.logger = logger | ||
| 100 | |||
| 101 | self.db = sqlite3.connect(self.dbname) | ||
| 102 | self.db.row_factory = sqlite3.Row | ||
| 103 | |||
| 104 | async def __aenter__(self): | ||
| 105 | return self | ||
| 106 | |||
| 107 | async def __aexit__(self, exc_type, exc_value, traceback): | ||
| 108 | await self.close() | ||
| 109 | |||
| 110 | async def close(self): | ||
| 111 | self.db.close() | ||
| 112 | |||
| 113 | async def get_unihash_by_taskhash_full(self, method, taskhash): | ||
| 114 | with closing(self.db.cursor()) as cursor: | ||
| 115 | cursor.execute( | ||
| 116 | """ | ||
| 117 | SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2 | ||
| 118 | INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash | ||
| 119 | WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash | ||
| 120 | ORDER BY outhashes_v2.created ASC | ||
| 121 | LIMIT 1 | ||
| 122 | """, | ||
| 123 | { | ||
| 124 | "method": method, | ||
| 125 | "taskhash": taskhash, | ||
| 126 | }, | ||
| 127 | ) | ||
| 128 | return cursor.fetchone() | ||
| 129 | |||
| 130 | async def get_unihash_by_outhash(self, method, outhash): | ||
| 131 | with closing(self.db.cursor()) as cursor: | ||
| 132 | cursor.execute( | ||
| 133 | """ | ||
| 134 | SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2 | ||
| 135 | INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash | ||
| 136 | WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash | ||
| 137 | ORDER BY outhashes_v2.created ASC | ||
| 138 | LIMIT 1 | ||
| 139 | """, | ||
| 140 | { | ||
| 141 | "method": method, | ||
| 142 | "outhash": outhash, | ||
| 143 | }, | ||
| 144 | ) | ||
| 145 | return cursor.fetchone() | ||
| 146 | |||
| 147 | async def get_outhash(self, method, outhash): | ||
| 148 | with closing(self.db.cursor()) as cursor: | ||
| 149 | cursor.execute( | ||
| 150 | """ | ||
| 151 | SELECT * FROM outhashes_v2 | ||
| 152 | WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash | ||
| 153 | ORDER BY outhashes_v2.created ASC | ||
| 154 | LIMIT 1 | ||
| 155 | """, | ||
| 156 | { | ||
| 157 | "method": method, | ||
| 158 | "outhash": outhash, | ||
| 159 | }, | ||
| 160 | ) | ||
| 161 | return cursor.fetchone() | ||
| 162 | |||
| 163 | async def get_equivalent_for_outhash(self, method, outhash, taskhash): | ||
| 164 | with closing(self.db.cursor()) as cursor: | ||
| 165 | cursor.execute( | ||
| 166 | """ | ||
| 167 | SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2 | ||
| 168 | INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash | ||
| 169 | -- Select any matching output hash except the one we just inserted | ||
| 170 | WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash | ||
| 171 | -- Pick the oldest hash | ||
| 172 | ORDER BY outhashes_v2.created ASC | ||
| 173 | LIMIT 1 | ||
| 174 | """, | ||
| 175 | { | ||
| 176 | "method": method, | ||
| 177 | "outhash": outhash, | ||
| 178 | "taskhash": taskhash, | ||
| 179 | }, | ||
| 180 | ) | ||
| 181 | return cursor.fetchone() | ||
| 182 | |||
| 183 | async def get_equivalent(self, method, taskhash): | ||
| 184 | with closing(self.db.cursor()) as cursor: | ||
| 185 | cursor.execute( | ||
| 186 | "SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash", | ||
| 187 | { | ||
| 188 | "method": method, | ||
| 189 | "taskhash": taskhash, | ||
| 190 | }, | ||
| 191 | ) | ||
| 192 | return cursor.fetchone() | ||
| 193 | |||
| 194 | async def remove(self, condition): | ||
| 195 | def do_remove(columns, table_name, cursor): | ||
| 196 | where = {} | ||
| 197 | for c in columns: | ||
| 198 | if c in condition and condition[c] is not None: | ||
| 199 | where[c] = condition[c] | ||
| 200 | |||
| 201 | if where: | ||
| 202 | query = ("DELETE FROM %s WHERE " % table_name) + " AND ".join( | ||
| 203 | "%s=:%s" % (k, k) for k in where.keys() | ||
| 204 | ) | ||
| 205 | cursor.execute(query, where) | ||
| 206 | return cursor.rowcount | ||
| 207 | |||
| 208 | return 0 | ||
| 209 | |||
| 210 | count = 0 | ||
| 211 | with closing(self.db.cursor()) as cursor: | ||
| 212 | count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor) | ||
| 213 | count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v2", cursor) | ||
| 214 | self.db.commit() | ||
| 215 | |||
| 216 | return count | ||
| 217 | |||
| 218 | async def clean_unused(self, oldest): | ||
| 219 | with closing(self.db.cursor()) as cursor: | ||
| 220 | cursor.execute( | ||
| 221 | """ | ||
| 222 | DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS ( | ||
| 223 | SELECT unihashes_v2.id FROM unihashes_v2 WHERE unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash LIMIT 1 | ||
| 224 | ) | ||
| 225 | """, | ||
| 226 | { | ||
| 227 | "oldest": oldest, | ||
| 228 | }, | ||
| 229 | ) | ||
| 230 | return cursor.rowcount | ||
| 231 | |||
| 232 | async def insert_unihash(self, method, taskhash, unihash): | ||
| 233 | with closing(self.db.cursor()) as cursor: | ||
| 234 | prevrowid = cursor.lastrowid | ||
| 235 | cursor.execute( | ||
| 236 | """ | ||
| 237 | INSERT OR IGNORE INTO unihashes_v2 (method, taskhash, unihash) VALUES(:method, :taskhash, :unihash) | ||
| 238 | """, | ||
| 239 | { | ||
| 240 | "method": method, | ||
| 241 | "taskhash": taskhash, | ||
| 242 | "unihash": unihash, | ||
| 243 | }, | ||
| 244 | ) | ||
| 245 | self.db.commit() | ||
| 246 | return cursor.lastrowid != prevrowid | ||
| 247 | |||
| 248 | async def insert_outhash(self, data): | ||
| 249 | data = {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS} | ||
| 250 | keys = sorted(data.keys()) | ||
| 251 | query = "INSERT OR IGNORE INTO outhashes_v2 ({fields}) VALUES({values})".format( | ||
| 252 | fields=", ".join(keys), | ||
| 253 | values=", ".join(":" + k for k in keys), | ||
| 254 | ) | ||
| 255 | with closing(self.db.cursor()) as cursor: | ||
| 256 | prevrowid = cursor.lastrowid | ||
| 257 | cursor.execute(query, data) | ||
| 258 | self.db.commit() | ||
| 259 | return cursor.lastrowid != prevrowid | ||
