diff options
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 207 |
1 files changed, 166 insertions, 41 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 921a9f7589..05c42518a7 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -33,15 +33,15 @@ Place, Suite 330, Boston, MA 02111-1307 USA. | |||
33 | import os, re | 33 | import os, re |
34 | import bb.data | 34 | import bb.data |
35 | import bb.utils | 35 | import bb.utils |
36 | from sets import Set | ||
36 | 37 | ||
37 | try: | 38 | try: |
38 | import cPickle as pickle | 39 | import cPickle as pickle |
39 | except ImportError: | 40 | except ImportError: |
40 | import pickle | 41 | import pickle |
41 | print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." | 42 | bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") |
42 | 43 | ||
43 | # __cache_version__ = "123" | 44 | __cache_version__ = "125" |
44 | __cache_version__ = "124" # changes the __depends structure | ||
45 | 45 | ||
46 | class Cache: | 46 | class Cache: |
47 | """ | 47 | """ |
@@ -58,14 +58,12 @@ class Cache: | |||
58 | 58 | ||
59 | if self.cachedir in [None, '']: | 59 | if self.cachedir in [None, '']: |
60 | self.has_cache = False | 60 | self.has_cache = False |
61 | if cooker.cb is not None: | 61 | bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.") |
62 | print "NOTE: Not using a cache. Set CACHE = <directory> to enable." | ||
63 | else: | 62 | else: |
64 | self.has_cache = True | 63 | self.has_cache = True |
65 | self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") | 64 | self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") |
66 | 65 | ||
67 | if cooker.cb is not None: | 66 | bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) |
68 | print "NOTE: Using cache in '%s'" % self.cachedir | ||
69 | try: | 67 | try: |
70 | os.stat( self.cachedir ) | 68 | os.stat( self.cachedir ) |
71 | except OSError: | 69 | except OSError: |
@@ -80,7 +78,7 @@ class Cache: | |||
80 | if version_data['BITBAKE_VER'] != bb.__version__: | 78 | if version_data['BITBAKE_VER'] != bb.__version__: |
81 | raise ValueError, 'Bitbake Version Mismatch' | 79 | raise ValueError, 'Bitbake Version Mismatch' |
82 | except (ValueError, KeyError): | 80 | except (ValueError, KeyError): |
83 | bb.note("Invalid cache found, rebuilding...") | 81 | bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") |
84 | self.depends_cache = {} | 82 | self.depends_cache = {} |
85 | 83 | ||
86 | if self.depends_cache: | 84 | if self.depends_cache: |
@@ -108,7 +106,7 @@ class Cache: | |||
108 | if fn != self.data_fn: | 106 | if fn != self.data_fn: |
109 | # We're trying to access data in the cache which doesn't exist | 107 | # We're trying to access data in the cache which doesn't exist |
110 | # yet setData hasn't been called to setup the right access. Very bad. | 108 | # yet setData hasn't been called to setup the right access. Very bad. |
111 | bb.error("Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) | 109 | bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) |
112 | 110 | ||
113 | result = bb.data.getVar(var, self.data, exp) | 111 | result = bb.data.getVar(var, self.data, exp) |
114 | self.depends_cache[fn][var] = result | 112 | self.depends_cache[fn][var] = result |
@@ -127,15 +125,15 @@ class Cache: | |||
127 | self.getVar("__depends", fn, True) | 125 | self.getVar("__depends", fn, True) |
128 | self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) | 126 | self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) |
129 | 127 | ||
130 | def loadDataFull(self, fn, cooker): | 128 | def loadDataFull(self, fn, cfgData): |
131 | """ | 129 | """ |
132 | Return a complete set of data for fn. | 130 | Return a complete set of data for fn. |
133 | To do this, we need to parse the file. | 131 | To do this, we need to parse the file. |
134 | """ | 132 | """ |
135 | bb_data, skipped = self.load_bbfile(fn, cooker) | 133 | bb_data, skipped = self.load_bbfile(fn, cfgData) |
136 | return bb_data | 134 | return bb_data |
137 | 135 | ||
138 | def loadData(self, fn, cooker): | 136 | def loadData(self, fn, cfgData): |
139 | """ | 137 | """ |
140 | Load a subset of data for fn. | 138 | Load a subset of data for fn. |
141 | If the cached data is valid we do nothing, | 139 | If the cached data is valid we do nothing, |
@@ -148,7 +146,7 @@ class Cache: | |||
148 | return True, True | 146 | return True, True |
149 | return True, False | 147 | return True, False |
150 | 148 | ||
151 | bb_data, skipped = self.load_bbfile(fn, cooker) | 149 | bb_data, skipped = self.load_bbfile(fn, cfgData) |
152 | self.setData(fn, bb_data) | 150 | self.setData(fn, bb_data) |
153 | return False, skipped | 151 | return False, skipped |
154 | 152 | ||
@@ -175,32 +173,36 @@ class Cache: | |||
175 | 173 | ||
176 | # Check file still exists | 174 | # Check file still exists |
177 | if self.mtime(fn) == 0: | 175 | if self.mtime(fn) == 0: |
178 | bb.debug(2, "Cache: %s not longer exists" % fn) | 176 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) |
179 | self.remove(fn) | 177 | self.remove(fn) |
180 | return False | 178 | return False |
181 | 179 | ||
182 | # File isn't in depends_cache | 180 | # File isn't in depends_cache |
183 | if not fn in self.depends_cache: | 181 | if not fn in self.depends_cache: |
184 | bb.debug(2, "Cache: %s is not cached" % fn) | 182 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn) |
185 | self.remove(fn) | 183 | self.remove(fn) |
186 | return False | 184 | return False |
187 | 185 | ||
188 | # Check the file's timestamp | 186 | # Check the file's timestamp |
189 | if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): | 187 | if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): |
190 | bb.debug(2, "Cache: %s changed" % fn) | 188 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) |
191 | self.remove(fn) | 189 | self.remove(fn) |
192 | return False | 190 | return False |
193 | 191 | ||
194 | # Check dependencies are still valid | 192 | # Check dependencies are still valid |
195 | depends = self.getVar("__depends", fn, True) | 193 | depends = self.getVar("__depends", fn, True) |
196 | for f,old_mtime in depends: | 194 | for f,old_mtime in depends: |
195 | # Check if file still exists | ||
196 | if self.mtime(f) == 0: | ||
197 | return False | ||
198 | |||
197 | new_mtime = bb.parse.cached_mtime(f) | 199 | new_mtime = bb.parse.cached_mtime(f) |
198 | if (new_mtime > old_mtime): | 200 | if (new_mtime > old_mtime): |
199 | bb.debug(2, "Cache: %s's dependency %s changed" % (fn, f)) | 201 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) |
200 | self.remove(fn) | 202 | self.remove(fn) |
201 | return False | 203 | return False |
202 | 204 | ||
203 | bb.debug(2, "Depends Cache: %s is clean" % fn) | 205 | bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) |
204 | if not fn in self.clean: | 206 | if not fn in self.clean: |
205 | self.clean[fn] = "" | 207 | self.clean[fn] = "" |
206 | 208 | ||
@@ -220,7 +222,7 @@ class Cache: | |||
220 | Remove a fn from the cache | 222 | Remove a fn from the cache |
221 | Called from the parser in error cases | 223 | Called from the parser in error cases |
222 | """ | 224 | """ |
223 | bb.debug(1, "Removing %s from cache" % fn) | 225 | bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn) |
224 | if fn in self.depends_cache: | 226 | if fn in self.depends_cache: |
225 | del self.depends_cache[fn] | 227 | del self.depends_cache[fn] |
226 | if fn in self.clean: | 228 | if fn in self.clean: |
@@ -229,7 +231,7 @@ class Cache: | |||
229 | def sync(self): | 231 | def sync(self): |
230 | """ | 232 | """ |
231 | Save the cache | 233 | Save the cache |
232 | Called from the parser when complete (or exitting) | 234 | Called from the parser when complete (or exiting) |
233 | """ | 235 | """ |
234 | 236 | ||
235 | if not self.has_cache: | 237 | if not self.has_cache: |
@@ -243,12 +245,103 @@ class Cache: | |||
243 | p.dump([self.depends_cache, version_data]) | 245 | p.dump([self.depends_cache, version_data]) |
244 | 246 | ||
245 | def mtime(self, cachefile): | 247 | def mtime(self, cachefile): |
246 | try: | 248 | return bb.parse.cached_mtime_noerror(cachefile) |
247 | return os.stat(cachefile)[8] | ||
248 | except OSError: | ||
249 | return 0 | ||
250 | 249 | ||
251 | def load_bbfile( self, bbfile , cooker): | 250 | def handle_data(self, file_name, cacheData): |
251 | """ | ||
252 | Save data we need into the cache | ||
253 | """ | ||
254 | |||
255 | pn = self.getVar('PN', file_name, True) | ||
256 | pv = self.getVar('PV', file_name, True) | ||
257 | pr = self.getVar('PR', file_name, True) | ||
258 | dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") | ||
259 | provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split()) | ||
260 | depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") | ||
261 | packages = (self.getVar('PACKAGES', file_name, True) or "").split() | ||
262 | packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() | ||
263 | rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() | ||
264 | |||
265 | cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True) | ||
266 | cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) | ||
267 | |||
268 | # build PackageName to FileName lookup table | ||
269 | if pn not in cacheData.pkg_pn: | ||
270 | cacheData.pkg_pn[pn] = [] | ||
271 | cacheData.pkg_pn[pn].append(file_name) | ||
272 | |||
273 | cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) | ||
274 | |||
275 | # build FileName to PackageName lookup table | ||
276 | cacheData.pkg_fn[file_name] = pn | ||
277 | cacheData.pkg_pvpr[file_name] = (pv,pr) | ||
278 | cacheData.pkg_dp[file_name] = dp | ||
279 | |||
280 | # Build forward and reverse provider hashes | ||
281 | # Forward: virtual -> [filenames] | ||
282 | # Reverse: PN -> [virtuals] | ||
283 | if pn not in cacheData.pn_provides: | ||
284 | cacheData.pn_provides[pn] = Set() | ||
285 | cacheData.pn_provides[pn] |= provides | ||
286 | |||
287 | for provide in provides: | ||
288 | if provide not in cacheData.providers: | ||
289 | cacheData.providers[provide] = [] | ||
290 | cacheData.providers[provide].append(file_name) | ||
291 | |||
292 | cacheData.deps[file_name] = Set() | ||
293 | for dep in depends: | ||
294 | cacheData.all_depends.add(dep) | ||
295 | cacheData.deps[file_name].add(dep) | ||
296 | |||
297 | # Build reverse hash for PACKAGES, so runtime dependencies | ||
298 | # can be be resolved (RDEPENDS, RRECOMMENDS etc.) | ||
299 | for package in packages: | ||
300 | if not package in cacheData.packages: | ||
301 | cacheData.packages[package] = [] | ||
302 | cacheData.packages[package].append(file_name) | ||
303 | rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() | ||
304 | |||
305 | for package in packages_dynamic: | ||
306 | if not package in cacheData.packages_dynamic: | ||
307 | cacheData.packages_dynamic[package] = [] | ||
308 | cacheData.packages_dynamic[package].append(file_name) | ||
309 | |||
310 | for rprovide in rprovides: | ||
311 | if not rprovide in cacheData.rproviders: | ||
312 | cacheData.rproviders[rprovide] = [] | ||
313 | cacheData.rproviders[rprovide].append(file_name) | ||
314 | |||
315 | # Build hash of runtime depends and rececommends | ||
316 | |||
317 | def add_dep(deplist, deps): | ||
318 | for dep in deps: | ||
319 | if not dep in deplist: | ||
320 | deplist[dep] = "" | ||
321 | |||
322 | if not file_name in cacheData.rundeps: | ||
323 | cacheData.rundeps[file_name] = {} | ||
324 | if not file_name in cacheData.runrecs: | ||
325 | cacheData.runrecs[file_name] = {} | ||
326 | |||
327 | for package in packages + [pn]: | ||
328 | if not package in cacheData.rundeps[file_name]: | ||
329 | cacheData.rundeps[file_name][package] = {} | ||
330 | if not package in cacheData.runrecs[file_name]: | ||
331 | cacheData.runrecs[file_name][package] = {} | ||
332 | |||
333 | add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")) | ||
334 | add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")) | ||
335 | add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or "")) | ||
336 | add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) | ||
337 | |||
338 | # Collect files we may need for possible world-dep | ||
339 | # calculations | ||
340 | if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): | ||
341 | cacheData.possible_world.append(file_name) | ||
342 | |||
343 | |||
344 | def load_bbfile( self, bbfile , config): | ||
252 | """ | 345 | """ |
253 | Load and parse one .bb build file | 346 | Load and parse one .bb build file |
254 | Return the data and whether parsing resulted in the file being skipped | 347 | Return the data and whether parsing resulted in the file being skipped |
@@ -257,25 +350,15 @@ class Cache: | |||
257 | import bb | 350 | import bb |
258 | from bb import utils, data, parse, debug, event, fatal | 351 | from bb import utils, data, parse, debug, event, fatal |
259 | 352 | ||
260 | topdir = data.getVar('TOPDIR', cooker.configuration.data) | ||
261 | if not topdir: | ||
262 | topdir = os.path.abspath(os.getcwd()) | ||
263 | # set topdir to here | ||
264 | data.setVar('TOPDIR', topdir, cooker.configuration) | ||
265 | bbfile = os.path.abspath(bbfile) | ||
266 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
267 | # expand tmpdir to include this topdir | 353 | # expand tmpdir to include this topdir |
268 | data.setVar('TMPDIR', data.getVar('TMPDIR', cooker.configuration.data, 1) or "", cooker.configuration.data) | 354 | data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) |
269 | # set topdir to location of .bb file | 355 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) |
270 | topdir = bbfile_loc | ||
271 | #data.setVar('TOPDIR', topdir, cfg) | ||
272 | # go there | ||
273 | oldpath = os.path.abspath(os.getcwd()) | 356 | oldpath = os.path.abspath(os.getcwd()) |
274 | if self.mtime(topdir): | 357 | if self.mtime(bbfile_loc): |
275 | os.chdir(topdir) | 358 | os.chdir(bbfile_loc) |
276 | bb_data = data.init_db(cooker.configuration.data) | 359 | bb_data = data.init_db(config) |
277 | try: | 360 | try: |
278 | parse.handle(bbfile, bb_data) # read .bb data | 361 | bb_data = parse.handle(bbfile, bb_data) # read .bb data |
279 | os.chdir(oldpath) | 362 | os.chdir(oldpath) |
280 | return bb_data, False | 363 | return bb_data, False |
281 | except bb.parse.SkipPackage: | 364 | except bb.parse.SkipPackage: |
@@ -304,3 +387,45 @@ def init(cooker): | |||
304 | """ | 387 | """ |
305 | return Cache(cooker) | 388 | return Cache(cooker) |
306 | 389 | ||
390 | |||
391 | |||
392 | #============================================================================# | ||
393 | # CacheData | ||
394 | #============================================================================# | ||
395 | class CacheData: | ||
396 | """ | ||
397 | The data structures we compile from the cached data | ||
398 | """ | ||
399 | |||
400 | def __init__(self): | ||
401 | """ | ||
402 | Direct cache variables | ||
403 | (from Cache.handle_data) | ||
404 | """ | ||
405 | self.providers = {} | ||
406 | self.rproviders = {} | ||
407 | self.packages = {} | ||
408 | self.packages_dynamic = {} | ||
409 | self.possible_world = [] | ||
410 | self.pkg_pn = {} | ||
411 | self.pkg_fn = {} | ||
412 | self.pkg_pvpr = {} | ||
413 | self.pkg_dp = {} | ||
414 | self.pn_provides = {} | ||
415 | self.all_depends = Set() | ||
416 | self.deps = {} | ||
417 | self.rundeps = {} | ||
418 | self.runrecs = {} | ||
419 | self.task_queues = {} | ||
420 | self.task_deps = {} | ||
421 | self.stamp = {} | ||
422 | self.preferred = {} | ||
423 | |||
424 | """ | ||
425 | Indirect Cache variables | ||
426 | (set elsewhere) | ||
427 | """ | ||
428 | self.ignored_dependencies = [] | ||
429 | self.world_target = Set() | ||
430 | self.bbfile_priority = {} | ||
431 | self.bbfile_config_priorities = [] | ||