summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cache.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r--bitbake/lib/bb/cache.py63
1 files changed, 45 insertions, 18 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 7d7e66ebd2..dad82a9b36 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -39,7 +39,7 @@ except ImportError:
39 import pickle 39 import pickle
40 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") 40 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
41 41
42__cache_version__ = "127" 42__cache_version__ = "128"
43 43
44class Cache: 44class Cache:
45 """ 45 """
@@ -50,9 +50,11 @@ class Cache:
50 50
51 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True) 51 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
52 self.clean = {} 52 self.clean = {}
53 self.checked = {}
53 self.depends_cache = {} 54 self.depends_cache = {}
54 self.data = None 55 self.data = None
55 self.data_fn = None 56 self.data_fn = None
57 self.cacheclean = True
56 58
57 if self.cachedir in [None, '']: 59 if self.cachedir in [None, '']:
58 self.has_cache = False 60 self.has_cache = False
@@ -67,9 +69,20 @@ class Cache:
67 except OSError: 69 except OSError:
68 bb.mkdirhier( self.cachedir ) 70 bb.mkdirhier( self.cachedir )
69 71
70 if self.has_cache and (self.mtime(self.cachefile)): 72 if not self.has_cache:
73 return
74
75 # If any of configuration.data's dependencies are newer than the
76 # cache there isn't even any point in loading it...
77 newest_mtime = 0
78 deps = bb.data.getVar("__depends", cooker.configuration.data, True)
79 for f,old_mtime in deps:
80 if old_mtime > newest_mtime:
81 newest_mtime = old_mtime
82
83 if self.mtime(self.cachefile) >= newest_mtime:
71 try: 84 try:
72 p = pickle.Unpickler( file(self.cachefile,"rb")) 85 p = pickle.Unpickler(file(self.cachefile, "rb"))
73 self.depends_cache, version_data = p.load() 86 self.depends_cache, version_data = p.load()
74 if version_data['CACHE_VER'] != __cache_version__: 87 if version_data['CACHE_VER'] != __cache_version__:
75 raise ValueError, 'Cache Version Mismatch' 88 raise ValueError, 'Cache Version Mismatch'
@@ -81,11 +94,8 @@ class Cache:
81 except (ValueError, KeyError): 94 except (ValueError, KeyError):
82 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") 95 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
83 self.depends_cache = {} 96 self.depends_cache = {}
84 97 else:
85 if self.depends_cache: 98 bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
86 for fn in self.depends_cache.keys():
87 self.clean[fn] = ""
88 self.cacheValidUpdate(fn)
89 99
90 def getVar(self, var, fn, exp = 0): 100 def getVar(self, var, fn, exp = 0):
91 """ 101 """
@@ -97,7 +107,6 @@ class Cache:
97 2. We're learning what data to cache - serve from data 107 2. We're learning what data to cache - serve from data
98 backend but add a copy of the data to the cache. 108 backend but add a copy of the data to the cache.
99 """ 109 """
100
101 if fn in self.clean: 110 if fn in self.clean:
102 return self.depends_cache[fn][var] 111 return self.depends_cache[fn][var]
103 112
@@ -109,6 +118,7 @@ class Cache:
109 # yet setData hasn't been called to setup the right access. Very bad. 118 # yet setData hasn't been called to setup the right access. Very bad.
110 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) 119 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
111 120
121 self.cacheclean = False
112 result = bb.data.getVar(var, self.data, exp) 122 result = bb.data.getVar(var, self.data, exp)
113 self.depends_cache[fn][var] = result 123 self.depends_cache[fn][var] = result
114 return result 124 return result
@@ -131,6 +141,8 @@ class Cache:
131 Return a complete set of data for fn. 141 Return a complete set of data for fn.
132 To do this, we need to parse the file. 142 To do this, we need to parse the file.
133 """ 143 """
144 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
145
134 bb_data, skipped = self.load_bbfile(fn, cfgData) 146 bb_data, skipped = self.load_bbfile(fn, cfgData)
135 return bb_data 147 return bb_data
136 148
@@ -142,11 +154,15 @@ class Cache:
142 to record the variables accessed. 154 to record the variables accessed.
143 Return the cache status and whether the file was skipped when parsed 155 Return the cache status and whether the file was skipped when parsed
144 """ 156 """
157 if fn not in self.checked:
158 self.cacheValidUpdate(fn)
145 if self.cacheValid(fn): 159 if self.cacheValid(fn):
146 if "SKIPPED" in self.depends_cache[fn]: 160 if "SKIPPED" in self.depends_cache[fn]:
147 return True, True 161 return True, True
148 return True, False 162 return True, False
149 163
164 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
165
150 bb_data, skipped = self.load_bbfile(fn, cfgData) 166 bb_data, skipped = self.load_bbfile(fn, cfgData)
151 self.setData(fn, bb_data) 167 self.setData(fn, bb_data)
152 return False, skipped 168 return False, skipped
@@ -172,11 +188,10 @@ class Cache:
172 if not self.has_cache: 188 if not self.has_cache:
173 return False 189 return False
174 190
175 # Check file still exists 191 self.checked[fn] = ""
176 if self.mtime(fn) == 0: 192
177 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) 193 # Pretend we're clean so getVar works
178 self.remove(fn) 194 self.clean[fn] = ""
179 return False
180 195
181 # File isn't in depends_cache 196 # File isn't in depends_cache
182 if not fn in self.depends_cache: 197 if not fn in self.depends_cache:
@@ -184,6 +199,12 @@ class Cache:
184 self.remove(fn) 199 self.remove(fn)
185 return False 200 return False
186 201
202 # Check file still exists
203 if self.mtime(fn) == 0:
204 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
205 self.remove(fn)
206 return False
207
187 # Check the file's timestamp 208 # Check the file's timestamp
188 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): 209 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
189 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) 210 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
@@ -195,6 +216,7 @@ class Cache:
195 for f,old_mtime in depends: 216 for f,old_mtime in depends:
196 # Check if file still exists 217 # Check if file still exists
197 if self.mtime(f) == 0: 218 if self.mtime(f) == 0:
219 self.remove(fn)
198 return False 220 return False
199 221
200 new_mtime = bb.parse.cached_mtime(f) 222 new_mtime = bb.parse.cached_mtime(f)
@@ -203,7 +225,7 @@ class Cache:
203 self.remove(fn) 225 self.remove(fn)
204 return False 226 return False
205 227
206 bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) 228 #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
207 if not fn in self.clean: 229 if not fn in self.clean:
208 self.clean[fn] = "" 230 self.clean[fn] = ""
209 231
@@ -238,6 +260,10 @@ class Cache:
238 if not self.has_cache: 260 if not self.has_cache:
239 return 261 return
240 262
263 if self.cacheclean:
264 bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.")
265 return
266
241 version_data = {} 267 version_data = {}
242 version_data['CACHE_VER'] = __cache_version__ 268 version_data['CACHE_VER'] = __cache_version__
243 version_data['BITBAKE_VER'] = bb.__version__ 269 version_data['BITBAKE_VER'] = bb.__version__
@@ -264,7 +290,6 @@ class Cache:
264 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() 290 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
265 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() 291 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
266 292
267 cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
268 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) 293 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
269 294
270 # build PackageName to FileName lookup table 295 # build PackageName to FileName lookup table
@@ -328,14 +353,16 @@ class Cache:
328 if not file_name in cacheData.runrecs: 353 if not file_name in cacheData.runrecs:
329 cacheData.runrecs[file_name] = {} 354 cacheData.runrecs[file_name] = {}
330 355
356 rdepends = bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")
357 rrecommends = bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")
331 for package in packages + [pn]: 358 for package in packages + [pn]:
332 if not package in cacheData.rundeps[file_name]: 359 if not package in cacheData.rundeps[file_name]:
333 cacheData.rundeps[file_name][package] = {} 360 cacheData.rundeps[file_name][package] = {}
334 if not package in cacheData.runrecs[file_name]: 361 if not package in cacheData.runrecs[file_name]:
335 cacheData.runrecs[file_name][package] = {} 362 cacheData.runrecs[file_name][package] = {}
336 363
337 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")) 364 add_dep(cacheData.rundeps[file_name][package], rdepends)
338 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")) 365 add_dep(cacheData.runrecs[file_name][package], rrecommends)
339 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or "")) 366 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
340 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) 367 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
341 368