diff options
author | Chris Larson <chris_larson@mentor.com> | 2011-04-04 09:36:45 -0700 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-05-06 18:05:11 +0100 |
commit | 754d1c69839982b7cdd49839a398e688c0ad9a9b (patch) | |
tree | e848f86280222186e0cb7311afe2e55024192dd0 /bitbake/lib | |
parent | 824acff967ff74c0a678bf8accc4a514653f5783 (diff) | |
download | poky-754d1c69839982b7cdd49839a398e688c0ad9a9b.tar.gz |
persist_data: implement comparison, same as dict
(Bitbake rev: 1190406c526c7bb7cf415867be83e0403812a7dd)
Signed-off-by: Chris Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib')
-rw-r--r-- | bitbake/lib/bb/fetch/git.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 15 | ||||
-rw-r--r-- | bitbake/lib/bb/persist_data.py | 48 |
3 files changed, 25 insertions, 44 deletions
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 35908ca8d4..9a51ed1387 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py | |||
@@ -242,8 +242,7 @@ class Git(Fetch): | |||
242 | """ | 242 | """ |
243 | Look in the cache for the latest revision, if not present ask the SCM. | 243 | Look in the cache for the latest revision, if not present ask the SCM. |
244 | """ | 244 | """ |
245 | persisted = bb.persist_data.persist(d) | 245 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) |
246 | revs = persisted['BB_URI_HEADREVS'] | ||
247 | 246 | ||
248 | key = self.generate_revision_key(url, ud, d, branch=True) | 247 | key = self.generate_revision_key(url, ud, d, branch=True) |
249 | 248 | ||
@@ -265,8 +264,7 @@ class Git(Fetch): | |||
265 | """ | 264 | """ |
266 | 265 | ||
267 | """ | 266 | """ |
268 | pd = bb.persist_data.persist(d) | 267 | localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d) |
269 | localcounts = pd['BB_URI_LOCALCOUNT'] | ||
270 | key = self.generate_revision_key(url, ud, d, branch=True) | 268 | key = self.generate_revision_key(url, ud, d, branch=True) |
271 | oldkey = self.generate_revision_key(url, ud, d, branch=False) | 269 | oldkey = self.generate_revision_key(url, ud, d, branch=False) |
272 | 270 | ||
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index a31e26bf75..27fcc3cf76 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -222,18 +222,18 @@ def fetcher_init(d): | |||
222 | Called to initialize the fetchers once the configuration data is known. | 222 | Called to initialize the fetchers once the configuration data is known. |
223 | Calls before this must not hit the cache. | 223 | Calls before this must not hit the cache. |
224 | """ | 224 | """ |
225 | pd = persist_data.persist(d) | ||
226 | # When to drop SCM head revisions controlled by user policy | 225 | # When to drop SCM head revisions controlled by user policy |
227 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear" | 226 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear" |
228 | if srcrev_policy == "cache": | 227 | if srcrev_policy == "cache": |
229 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 228 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
230 | elif srcrev_policy == "clear": | 229 | elif srcrev_policy == "clear": |
231 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 230 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
231 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | ||
232 | try: | 232 | try: |
233 | bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items() | 233 | bb.fetch2.saved_headrevs = revs.items() |
234 | except: | 234 | except: |
235 | pass | 235 | pass |
236 | del pd['BB_URI_HEADREVS'] | 236 | revs.clear() |
237 | else: | 237 | else: |
238 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 238 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
239 | 239 | ||
@@ -247,8 +247,7 @@ def fetcher_compare_revisions(d): | |||
247 | return true/false on whether they've changed. | 247 | return true/false on whether they've changed. |
248 | """ | 248 | """ |
249 | 249 | ||
250 | pd = persist_data.persist(d) | 250 | data = bb.persist_data.persist('BB_URI_HEADREVS', d).items() |
251 | data = pd['BB_URI_HEADREVS'].items() | ||
252 | data2 = bb.fetch2.saved_headrevs | 251 | data2 = bb.fetch2.saved_headrevs |
253 | 252 | ||
254 | changed = False | 253 | changed = False |
@@ -803,8 +802,7 @@ class FetchMethod(object): | |||
803 | if not hasattr(self, "_latest_revision"): | 802 | if not hasattr(self, "_latest_revision"): |
804 | raise ParameterError("The fetcher for this URL does not support _latest_revision", url) | 803 | raise ParameterError("The fetcher for this URL does not support _latest_revision", url) |
805 | 804 | ||
806 | pd = persist_data.persist(d) | 805 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) |
807 | revs = pd['BB_URI_HEADREVS'] | ||
808 | key = self.generate_revision_key(url, ud, d, name) | 806 | key = self.generate_revision_key(url, ud, d, name) |
809 | try: | 807 | try: |
810 | return revs[key] | 808 | return revs[key] |
@@ -819,8 +817,7 @@ class FetchMethod(object): | |||
819 | if hasattr(self, "_sortable_revision"): | 817 | if hasattr(self, "_sortable_revision"): |
820 | return self._sortable_revision(url, ud, d) | 818 | return self._sortable_revision(url, ud, d) |
821 | 819 | ||
822 | pd = persist_data.persist(d) | 820 | localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d) |
823 | localcounts = pd['BB_URI_LOCALCOUNT'] | ||
824 | key = self.generate_revision_key(url, ud, d, name) | 821 | key = self.generate_revision_key(url, ud, d, name) |
825 | 822 | ||
826 | latest_rev = self._build_revision(url, ud, d, name) | 823 | latest_rev = self._build_revision(url, ud, d, name) |
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index c9e80ba967..0ed0cd28e7 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py | |||
@@ -26,7 +26,8 @@ import logging | |||
26 | import os.path | 26 | import os.path |
27 | import sys | 27 | import sys |
28 | import warnings | 28 | import warnings |
29 | import bb.msg, bb.data, bb.utils | 29 | from bb.compat import total_ordering |
30 | from collections import Mapping | ||
30 | 31 | ||
31 | try: | 32 | try: |
32 | import sqlite3 | 33 | import sqlite3 |
@@ -43,6 +44,7 @@ if hasattr(sqlite3, 'enable_shared_cache'): | |||
43 | sqlite3.enable_shared_cache(True) | 44 | sqlite3.enable_shared_cache(True) |
44 | 45 | ||
45 | 46 | ||
47 | @total_ordering | ||
46 | class SQLTable(collections.MutableMapping): | 48 | class SQLTable(collections.MutableMapping): |
47 | """Object representing a table/domain in the database""" | 49 | """Object representing a table/domain in the database""" |
48 | def __init__(self, cursor, table): | 50 | def __init__(self, cursor, table): |
@@ -105,6 +107,10 @@ class SQLTable(collections.MutableMapping): | |||
105 | for row in data: | 107 | for row in data: |
106 | yield row[0] | 108 | yield row[0] |
107 | 109 | ||
110 | def __lt__(self, other): | ||
111 | if not isinstance(other, Mapping): | ||
112 | raise NotImplemented | ||
113 | |||
108 | def iteritems(self): | 114 | def iteritems(self): |
109 | data = self._execute("SELECT * FROM %s;" % self.table) | 115 | data = self._execute("SELECT * FROM %s;" % self.table) |
110 | for row in data: | 116 | for row in data: |
@@ -118,33 +124,8 @@ class SQLTable(collections.MutableMapping): | |||
118 | def has_key(self, key): | 124 | def has_key(self, key): |
119 | return key in self | 125 | return key in self |
120 | 126 | ||
121 | 127 | def clear(self): | |
122 | class SQLData(object): | 128 | self._execute("DELETE FROM %s;" % self.table) |
123 | """Object representing the persistent data""" | ||
124 | def __init__(self, filename): | ||
125 | bb.utils.mkdirhier(os.path.dirname(filename)) | ||
126 | |||
127 | self.filename = filename | ||
128 | self.connection = sqlite3.connect(filename, timeout=5, | ||
129 | isolation_level=None) | ||
130 | self.cursor = self.connection.cursor() | ||
131 | self._tables = {} | ||
132 | |||
133 | def __getitem__(self, table): | ||
134 | if not isinstance(table, basestring): | ||
135 | raise TypeError("table argument must be a string, not '%s'" % | ||
136 | type(table)) | ||
137 | |||
138 | if table in self._tables: | ||
139 | return self._tables[table] | ||
140 | else: | ||
141 | tableobj = self._tables[table] = SQLTable(self.cursor, table) | ||
142 | return tableobj | ||
143 | |||
144 | def __delitem__(self, table): | ||
145 | if table in self._tables: | ||
146 | del self._tables[table] | ||
147 | self.cursor.execute("DROP TABLE IF EXISTS %s;" % table) | ||
148 | 129 | ||
149 | 130 | ||
150 | class PersistData(object): | 131 | class PersistData(object): |
@@ -194,14 +175,19 @@ class PersistData(object): | |||
194 | """ | 175 | """ |
195 | del self.data[domain][key] | 176 | del self.data[domain][key] |
196 | 177 | ||
178 | def connect(database): | ||
179 | return sqlite3.connect(database, timeout=30, isolation_level=None) | ||
197 | 180 | ||
198 | def persist(d): | 181 | def persist(domain, d): |
199 | """Convenience factory for construction of SQLData based upon metadata""" | 182 | """Convenience factory for SQLTable objects based upon metadata""" |
183 | import bb.data, bb.utils | ||
200 | cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or | 184 | cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or |
201 | bb.data.getVar("CACHE", d, True)) | 185 | bb.data.getVar("CACHE", d, True)) |
202 | if not cachedir: | 186 | if not cachedir: |
203 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") | 187 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") |
204 | sys.exit(1) | 188 | sys.exit(1) |
205 | 189 | ||
190 | bb.utils.mkdirhier(cachedir) | ||
206 | cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") | 191 | cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") |
207 | return SQLData(cachefile) | 192 | connection = connect(cachefile) |
193 | return SQLTable(connection, domain) | ||