diff options
author | Dongxiao Xu <dongxiao.xu@intel.com> | 2012-02-23 21:47:13 +0800 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-02-23 22:52:15 +0000 |
commit | 8e737db4fc2ab90850c2fe91733011dc4e0a24df (patch) | |
tree | 5e4c649a13f7a3fcfab6373b7305407b71d56881 /bitbake/lib/bb/cache.py | |
parent | 99d326a818a49faf457c707ceeec6163bf8c8e16 (diff) | |
download | poky-8e737db4fc2ab90850c2fe91733011dc4e0a24df.tar.gz |
cache: Use configuration's hash value to validate cache
Previously we use the file time stamp to judge if a cache is valid.
Here this commit introduce a new method, which calculates the total
hash value for a certain configuration's key/value paris, and tag
it into cache filename, for example, bb_cache.dat.xxxyyyzzz.
This mechanism also ensures the cache's correctness if user
dynamically setting variables from some frontend GUI, like HOB.
(Bitbake rev: 1c1df03a6c4717bfd5faab144c4f8bbfcbae0b57)
Signed-off-by: Dongxiao Xu <dongxiao.xu@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 32 |
1 files changed, 12 insertions, 20 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 99e0f34956..3d89435211 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -42,10 +42,10 @@ except ImportError: | |||
42 | logger.info("Importing cPickle failed. " | 42 | logger.info("Importing cPickle failed. " |
43 | "Falling back to a very slow implementation.") | 43 | "Falling back to a very slow implementation.") |
44 | 44 | ||
45 | __cache_version__ = "142" | 45 | __cache_version__ = "143" |
46 | 46 | ||
47 | def getCacheFile(path, filename): | 47 | def getCacheFile(path, filename, data_hash): |
48 | return os.path.join(path, filename) | 48 | return os.path.join(path, filename + "." + data_hash) |
49 | 49 | ||
50 | # RecipeInfoCommon defines common data retrieving methods | 50 | # RecipeInfoCommon defines common data retrieving methods |
51 | # from meta data for caches. CoreRecipeInfo as well as other | 51 | # from meta data for caches. CoreRecipeInfo as well as other |
@@ -245,7 +245,7 @@ class Cache(object): | |||
245 | BitBake Cache implementation | 245 | BitBake Cache implementation |
246 | """ | 246 | """ |
247 | 247 | ||
248 | def __init__(self, data, caches_array): | 248 | def __init__(self, data, data_hash, caches_array): |
249 | # Pass caches_array information into Cache Constructor | 249 | # Pass caches_array information into Cache Constructor |
250 | # It will be used in later for deciding whether we | 250 | # It will be used in later for deciding whether we |
251 | # need extra cache file dump/load support | 251 | # need extra cache file dump/load support |
@@ -257,6 +257,7 @@ class Cache(object): | |||
257 | self.data = None | 257 | self.data = None |
258 | self.data_fn = None | 258 | self.data_fn = None |
259 | self.cacheclean = True | 259 | self.cacheclean = True |
260 | self.data_hash = data_hash | ||
260 | 261 | ||
261 | if self.cachedir in [None, '']: | 262 | if self.cachedir in [None, '']: |
262 | self.has_cache = False | 263 | self.has_cache = False |
@@ -265,26 +266,17 @@ class Cache(object): | |||
265 | return | 266 | return |
266 | 267 | ||
267 | self.has_cache = True | 268 | self.has_cache = True |
268 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat") | 269 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) |
269 | 270 | ||
270 | logger.debug(1, "Using cache in '%s'", self.cachedir) | 271 | logger.debug(1, "Using cache in '%s'", self.cachedir) |
271 | bb.utils.mkdirhier(self.cachedir) | 272 | bb.utils.mkdirhier(self.cachedir) |
272 | 273 | ||
273 | # If any of configuration.data's dependencies are newer than the | ||
274 | # cache there isn't even any point in loading it... | ||
275 | newest_mtime = 0 | ||
276 | deps = data.getVar("__base_depends") | ||
277 | |||
278 | old_mtimes = [old_mtime for _, old_mtime in deps] | ||
279 | old_mtimes.append(newest_mtime) | ||
280 | newest_mtime = max(old_mtimes) | ||
281 | |||
282 | cache_ok = True | 274 | cache_ok = True |
283 | if self.caches_array: | 275 | if self.caches_array: |
284 | for cache_class in self.caches_array: | 276 | for cache_class in self.caches_array: |
285 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 277 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
286 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 278 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
287 | cache_ok = cache_ok and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime) | 279 | cache_ok = cache_ok and os.path.exists(cachefile) |
288 | cache_class.init_cacheData(self) | 280 | cache_class.init_cacheData(self) |
289 | if cache_ok: | 281 | if cache_ok: |
290 | self.load_cachefile() | 282 | self.load_cachefile() |
@@ -318,7 +310,7 @@ class Cache(object): | |||
318 | # Calculate the correct cachesize of all those cache files | 310 | # Calculate the correct cachesize of all those cache files |
319 | for cache_class in self.caches_array: | 311 | for cache_class in self.caches_array: |
320 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 312 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
321 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 313 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
322 | with open(cachefile, "rb") as cachefile: | 314 | with open(cachefile, "rb") as cachefile: |
323 | cachesize += os.fstat(cachefile.fileno()).st_size | 315 | cachesize += os.fstat(cachefile.fileno()).st_size |
324 | 316 | ||
@@ -326,7 +318,7 @@ class Cache(object): | |||
326 | 318 | ||
327 | for cache_class in self.caches_array: | 319 | for cache_class in self.caches_array: |
328 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 320 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
329 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 321 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
330 | with open(cachefile, "rb") as cachefile: | 322 | with open(cachefile, "rb") as cachefile: |
331 | pickled = pickle.Unpickler(cachefile) | 323 | pickled = pickle.Unpickler(cachefile) |
332 | while cachefile: | 324 | while cachefile: |
@@ -579,7 +571,7 @@ class Cache(object): | |||
579 | for cache_class in self.caches_array: | 571 | for cache_class in self.caches_array: |
580 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 572 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
581 | cache_class_name = cache_class.__name__ | 573 | cache_class_name = cache_class.__name__ |
582 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 574 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
583 | file_dict[cache_class_name] = open(cachefile, "wb") | 575 | file_dict[cache_class_name] = open(cachefile, "wb") |
584 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) | 576 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) |
585 | 577 | ||
@@ -684,7 +676,7 @@ def init(cooker): | |||
684 | Files causing parsing errors are evicted from the cache. | 676 | Files causing parsing errors are evicted from the cache. |
685 | 677 | ||
686 | """ | 678 | """ |
687 | return Cache(cooker.configuration.data) | 679 | return Cache(cooker.configuration.data, cooker.configuration.data_hash) |
688 | 680 | ||
689 | 681 | ||
690 | class CacheData(object): | 682 | class CacheData(object): |