From 754d1c69839982b7cdd49839a398e688c0ad9a9b Mon Sep 17 00:00:00 2001 From: Chris Larson Date: Mon, 4 Apr 2011 09:36:45 -0700 Subject: persist_data: implement comparison, same as dict (Bitbake rev: 1190406c526c7bb7cf415867be83e0403812a7dd) Signed-off-by: Chris Larson Signed-off-by: Richard Purdie --- bitbake/lib/bb/fetch/git.py | 6 ++--- bitbake/lib/bb/fetch2/__init__.py | 15 +++++------- bitbake/lib/bb/persist_data.py | 48 ++++++++++++++------------------------- 3 files changed, 25 insertions(+), 44 deletions(-) (limited to 'bitbake/lib') diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 35908ca8d4..9a51ed1387 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py @@ -242,8 +242,7 @@ class Git(Fetch): """ Look in the cache for the latest revision, if not present ask the SCM. """ - persisted = bb.persist_data.persist(d) - revs = persisted['BB_URI_HEADREVS'] + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) key = self.generate_revision_key(url, ud, d, branch=True) @@ -265,8 +264,7 @@ class Git(Fetch): """ """ - pd = bb.persist_data.persist(d) - localcounts = pd['BB_URI_LOCALCOUNT'] + localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d) key = self.generate_revision_key(url, ud, d, branch=True) oldkey = self.generate_revision_key(url, ud, d, branch=False) diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index a31e26bf75..27fcc3cf76 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py @@ -222,18 +222,18 @@ def fetcher_init(d): Called to initialize the fetchers once the configuration data is known. Calls before this must not hit the cache. """ - pd = persist_data.persist(d) # When to drop SCM head revisions controlled by user policy srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear" if srcrev_policy == "cache": logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) elif srcrev_policy == "clear": logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) try: - bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items() + bb.fetch2.saved_headrevs = revs.items() except: pass - del pd['BB_URI_HEADREVS'] + revs.clear() else: raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) @@ -247,8 +247,7 @@ def fetcher_compare_revisions(d): return true/false on whether they've changed. """ - pd = persist_data.persist(d) - data = pd['BB_URI_HEADREVS'].items() + data = bb.persist_data.persist('BB_URI_HEADREVS', d).items() data2 = bb.fetch2.saved_headrevs changed = False @@ -803,8 +802,7 @@ class FetchMethod(object): if not hasattr(self, "_latest_revision"): raise ParameterError("The fetcher for this URL does not support _latest_revision", url) - pd = persist_data.persist(d) - revs = pd['BB_URI_HEADREVS'] + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) key = self.generate_revision_key(url, ud, d, name) try: return revs[key] @@ -819,8 +817,7 @@ class FetchMethod(object): if hasattr(self, "_sortable_revision"): return self._sortable_revision(url, ud, d) - pd = persist_data.persist(d) - localcounts = pd['BB_URI_LOCALCOUNT'] + localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d) key = self.generate_revision_key(url, ud, d, name) latest_rev = self._build_revision(url, ud, d, name) diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index c9e80ba967..0ed0cd28e7 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py @@ -26,7 +26,8 @@ import logging import os.path import sys import warnings -import bb.msg, bb.data, bb.utils +from bb.compat import total_ordering +from collections import Mapping try: import sqlite3 @@ -43,6 +44,7 @@ if hasattr(sqlite3, 'enable_shared_cache'): sqlite3.enable_shared_cache(True) +@total_ordering class SQLTable(collections.MutableMapping): """Object representing a table/domain in the database""" def __init__(self, cursor, table): @@ -105,6 +107,10 @@ class SQLTable(collections.MutableMapping): for row in data: yield row[0] + def __lt__(self, other): + if not isinstance(other, Mapping): + raise NotImplemented + def iteritems(self): data = self._execute("SELECT * FROM %s;" % self.table) for row in data: @@ -118,33 +124,8 @@ class SQLTable(collections.MutableMapping): def has_key(self, key): return key in self - -class SQLData(object): - """Object representing the persistent data""" - def __init__(self, filename): - bb.utils.mkdirhier(os.path.dirname(filename)) - - self.filename = filename - self.connection = sqlite3.connect(filename, timeout=5, - isolation_level=None) - self.cursor = self.connection.cursor() - self._tables = {} - - def __getitem__(self, table): - if not isinstance(table, basestring): - raise TypeError("table argument must be a string, not '%s'" % - type(table)) - - if table in self._tables: - return self._tables[table] - else: - tableobj = self._tables[table] = SQLTable(self.cursor, table) - return tableobj - - def __delitem__(self, table): - if table in self._tables: - del self._tables[table] - self.cursor.execute("DROP TABLE IF EXISTS %s;" % table) + def clear(self): + self._execute("DELETE FROM %s;" % self.table) class PersistData(object): @@ -194,14 +175,19 @@ class PersistData(object): """ del self.data[domain][key] +def connect(database): + return sqlite3.connect(database, timeout=30, isolation_level=None) -def persist(d): - """Convenience factory for construction of SQLData based upon metadata""" +def persist(domain, d): + """Convenience factory for SQLTable objects based upon metadata""" + import bb.data, bb.utils cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)) if not cachedir: logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") sys.exit(1) + bb.utils.mkdirhier(cachedir) cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") - return SQLData(cachefile) + connection = connect(cachefile) + return SQLTable(connection, domain) -- cgit v1.2.3-54-g00ecf