From 8e737db4fc2ab90850c2fe91733011dc4e0a24df Mon Sep 17 00:00:00 2001 From: Dongxiao Xu Date: Thu, 23 Feb 2012 21:47:13 +0800 Subject: cache: Use configuration's hash value to validate cache Previously we use the file time stamp to judge if a cache is valid. Here this commit introduce a new method, which calculates the total hash value for a certain configuration's key/value paris, and tag it into cache filename, for example, bb_cache.dat.xxxyyyzzz. This mechanism also ensures the cache's correctness if user dynamically setting variables from some frontend GUI, like HOB. (Bitbake rev: 1c1df03a6c4717bfd5faab144c4f8bbfcbae0b57) Signed-off-by: Dongxiao Xu Signed-off-by: Richard Purdie --- bitbake/lib/bb/cache.py | 32 ++++++++++++-------------------- 1 file changed, 12 insertions(+), 20 deletions(-) (limited to 'bitbake/lib/bb/cache.py') diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 99e0f34956..3d89435211 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -42,10 +42,10 @@ except ImportError: logger.info("Importing cPickle failed. " "Falling back to a very slow implementation.") -__cache_version__ = "142" +__cache_version__ = "143" -def getCacheFile(path, filename): - return os.path.join(path, filename) +def getCacheFile(path, filename, data_hash): + return os.path.join(path, filename + "." + data_hash) # RecipeInfoCommon defines common data retrieving methods # from meta data for caches. CoreRecipeInfo as well as other @@ -245,7 +245,7 @@ class Cache(object): BitBake Cache implementation """ - def __init__(self, data, caches_array): + def __init__(self, data, data_hash, caches_array): # Pass caches_array information into Cache Constructor # It will be used in later for deciding whether we # need extra cache file dump/load support @@ -257,6 +257,7 @@ class Cache(object): self.data = None self.data_fn = None self.cacheclean = True + self.data_hash = data_hash if self.cachedir in [None, '']: self.has_cache = False @@ -265,26 +266,17 @@ class Cache(object): return self.has_cache = True - self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat") + self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) logger.debug(1, "Using cache in '%s'", self.cachedir) bb.utils.mkdirhier(self.cachedir) - # If any of configuration.data's dependencies are newer than the - # cache there isn't even any point in loading it... - newest_mtime = 0 - deps = data.getVar("__base_depends") - - old_mtimes = [old_mtime for _, old_mtime in deps] - old_mtimes.append(newest_mtime) - newest_mtime = max(old_mtimes) - cache_ok = True if self.caches_array: for cache_class in self.caches_array: if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): - cachefile = getCacheFile(self.cachedir, cache_class.cachefile) - cache_ok = cache_ok and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime) + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) + cache_ok = cache_ok and os.path.exists(cachefile) cache_class.init_cacheData(self) if cache_ok: self.load_cachefile() @@ -318,7 +310,7 @@ class Cache(object): # Calculate the correct cachesize of all those cache files for cache_class in self.caches_array: if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): - cachefile = getCacheFile(self.cachedir, cache_class.cachefile) + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) with open(cachefile, "rb") as cachefile: cachesize += os.fstat(cachefile.fileno()).st_size @@ -326,7 +318,7 @@ class Cache(object): for cache_class in self.caches_array: if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): - cachefile = getCacheFile(self.cachedir, cache_class.cachefile) + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) with open(cachefile, "rb") as cachefile: pickled = pickle.Unpickler(cachefile) while cachefile: @@ -579,7 +571,7 @@ class Cache(object): for cache_class in self.caches_array: if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): cache_class_name = cache_class.__name__ - cachefile = getCacheFile(self.cachedir, cache_class.cachefile) + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) file_dict[cache_class_name] = open(cachefile, "wb") pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) @@ -684,7 +676,7 @@ def init(cooker): Files causing parsing errors are evicted from the cache. """ - return Cache(cooker.configuration.data) + return Cache(cooker.configuration.data, cooker.configuration.data_hash) class CacheData(object): -- cgit v1.2.3-54-g00ecf