diff options
author | Dongxiao Xu <dongxiao.xu@intel.com> | 2012-02-23 21:47:13 +0800 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-02-23 22:52:15 +0000 |
commit | 8e737db4fc2ab90850c2fe91733011dc4e0a24df (patch) | |
tree | 5e4c649a13f7a3fcfab6373b7305407b71d56881 /bitbake | |
parent | 99d326a818a49faf457c707ceeec6163bf8c8e16 (diff) | |
download | poky-8e737db4fc2ab90850c2fe91733011dc4e0a24df.tar.gz |
cache: Use configuration's hash value to validate cache
Previously we use the file time stamp to judge if a cache is valid.
Here this commit introduce a new method, which calculates the total
hash value for a certain configuration's key/value paris, and tag
it into cache filename, for example, bb_cache.dat.xxxyyyzzz.
This mechanism also ensures the cache's correctness if user
dynamically setting variables from some frontend GUI, like HOB.
(Bitbake rev: 1c1df03a6c4717bfd5faab144c4f8bbfcbae0b57)
Signed-off-by: Dongxiao Xu <dongxiao.xu@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r-- | bitbake/lib/bb/cache.py | 32 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 4 | ||||
-rw-r--r-- | bitbake/lib/bb/data_smart.py | 21 |
3 files changed, 36 insertions, 21 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 99e0f34956..3d89435211 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -42,10 +42,10 @@ except ImportError: | |||
42 | logger.info("Importing cPickle failed. " | 42 | logger.info("Importing cPickle failed. " |
43 | "Falling back to a very slow implementation.") | 43 | "Falling back to a very slow implementation.") |
44 | 44 | ||
45 | __cache_version__ = "142" | 45 | __cache_version__ = "143" |
46 | 46 | ||
47 | def getCacheFile(path, filename): | 47 | def getCacheFile(path, filename, data_hash): |
48 | return os.path.join(path, filename) | 48 | return os.path.join(path, filename + "." + data_hash) |
49 | 49 | ||
50 | # RecipeInfoCommon defines common data retrieving methods | 50 | # RecipeInfoCommon defines common data retrieving methods |
51 | # from meta data for caches. CoreRecipeInfo as well as other | 51 | # from meta data for caches. CoreRecipeInfo as well as other |
@@ -245,7 +245,7 @@ class Cache(object): | |||
245 | BitBake Cache implementation | 245 | BitBake Cache implementation |
246 | """ | 246 | """ |
247 | 247 | ||
248 | def __init__(self, data, caches_array): | 248 | def __init__(self, data, data_hash, caches_array): |
249 | # Pass caches_array information into Cache Constructor | 249 | # Pass caches_array information into Cache Constructor |
250 | # It will be used in later for deciding whether we | 250 | # It will be used in later for deciding whether we |
251 | # need extra cache file dump/load support | 251 | # need extra cache file dump/load support |
@@ -257,6 +257,7 @@ class Cache(object): | |||
257 | self.data = None | 257 | self.data = None |
258 | self.data_fn = None | 258 | self.data_fn = None |
259 | self.cacheclean = True | 259 | self.cacheclean = True |
260 | self.data_hash = data_hash | ||
260 | 261 | ||
261 | if self.cachedir in [None, '']: | 262 | if self.cachedir in [None, '']: |
262 | self.has_cache = False | 263 | self.has_cache = False |
@@ -265,26 +266,17 @@ class Cache(object): | |||
265 | return | 266 | return |
266 | 267 | ||
267 | self.has_cache = True | 268 | self.has_cache = True |
268 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat") | 269 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) |
269 | 270 | ||
270 | logger.debug(1, "Using cache in '%s'", self.cachedir) | 271 | logger.debug(1, "Using cache in '%s'", self.cachedir) |
271 | bb.utils.mkdirhier(self.cachedir) | 272 | bb.utils.mkdirhier(self.cachedir) |
272 | 273 | ||
273 | # If any of configuration.data's dependencies are newer than the | ||
274 | # cache there isn't even any point in loading it... | ||
275 | newest_mtime = 0 | ||
276 | deps = data.getVar("__base_depends") | ||
277 | |||
278 | old_mtimes = [old_mtime for _, old_mtime in deps] | ||
279 | old_mtimes.append(newest_mtime) | ||
280 | newest_mtime = max(old_mtimes) | ||
281 | |||
282 | cache_ok = True | 274 | cache_ok = True |
283 | if self.caches_array: | 275 | if self.caches_array: |
284 | for cache_class in self.caches_array: | 276 | for cache_class in self.caches_array: |
285 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 277 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
286 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 278 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
287 | cache_ok = cache_ok and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime) | 279 | cache_ok = cache_ok and os.path.exists(cachefile) |
288 | cache_class.init_cacheData(self) | 280 | cache_class.init_cacheData(self) |
289 | if cache_ok: | 281 | if cache_ok: |
290 | self.load_cachefile() | 282 | self.load_cachefile() |
@@ -318,7 +310,7 @@ class Cache(object): | |||
318 | # Calculate the correct cachesize of all those cache files | 310 | # Calculate the correct cachesize of all those cache files |
319 | for cache_class in self.caches_array: | 311 | for cache_class in self.caches_array: |
320 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 312 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
321 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 313 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
322 | with open(cachefile, "rb") as cachefile: | 314 | with open(cachefile, "rb") as cachefile: |
323 | cachesize += os.fstat(cachefile.fileno()).st_size | 315 | cachesize += os.fstat(cachefile.fileno()).st_size |
324 | 316 | ||
@@ -326,7 +318,7 @@ class Cache(object): | |||
326 | 318 | ||
327 | for cache_class in self.caches_array: | 319 | for cache_class in self.caches_array: |
328 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 320 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
329 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 321 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
330 | with open(cachefile, "rb") as cachefile: | 322 | with open(cachefile, "rb") as cachefile: |
331 | pickled = pickle.Unpickler(cachefile) | 323 | pickled = pickle.Unpickler(cachefile) |
332 | while cachefile: | 324 | while cachefile: |
@@ -579,7 +571,7 @@ class Cache(object): | |||
579 | for cache_class in self.caches_array: | 571 | for cache_class in self.caches_array: |
580 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 572 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
581 | cache_class_name = cache_class.__name__ | 573 | cache_class_name = cache_class.__name__ |
582 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile) | 574 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
583 | file_dict[cache_class_name] = open(cachefile, "wb") | 575 | file_dict[cache_class_name] = open(cachefile, "wb") |
584 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) | 576 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) |
585 | 577 | ||
@@ -684,7 +676,7 @@ def init(cooker): | |||
684 | Files causing parsing errors are evicted from the cache. | 676 | Files causing parsing errors are evicted from the cache. |
685 | 677 | ||
686 | """ | 678 | """ |
687 | return Cache(cooker.configuration.data) | 679 | return Cache(cooker.configuration.data, cooker.configuration.data_hash) |
688 | 680 | ||
689 | 681 | ||
690 | class CacheData(object): | 682 | class CacheData(object): |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index bf25a8320e..b0b58a6fdb 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -849,6 +849,7 @@ class BBCooker: | |||
849 | bb.event.fire(bb.event.ConfigParsed(), data) | 849 | bb.event.fire(bb.event.ConfigParsed(), data) |
850 | bb.parse.init_parser(data) | 850 | bb.parse.init_parser(data) |
851 | self.configuration.data = data | 851 | self.configuration.data = data |
852 | self.configuration.data_hash = data.get_hash() | ||
852 | 853 | ||
853 | def handleCollections( self, collections ): | 854 | def handleCollections( self, collections ): |
854 | """Handle collections""" | 855 | """Handle collections""" |
@@ -1494,6 +1495,7 @@ class CookerParser(object): | |||
1494 | self.filelist = filelist | 1495 | self.filelist = filelist |
1495 | self.cooker = cooker | 1496 | self.cooker = cooker |
1496 | self.cfgdata = cooker.configuration.data | 1497 | self.cfgdata = cooker.configuration.data |
1498 | self.cfghash = cooker.configuration.data_hash | ||
1497 | 1499 | ||
1498 | # Accounting statistics | 1500 | # Accounting statistics |
1499 | self.parsed = 0 | 1501 | self.parsed = 0 |
@@ -1509,7 +1511,7 @@ class CookerParser(object): | |||
1509 | self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or | 1511 | self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or |
1510 | multiprocessing.cpu_count()) | 1512 | multiprocessing.cpu_count()) |
1511 | 1513 | ||
1512 | self.bb_cache = bb.cache.Cache(self.cfgdata, cooker.caches_array) | 1514 | self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array) |
1513 | self.fromcache = [] | 1515 | self.fromcache = [] |
1514 | self.willparse = [] | 1516 | self.willparse = [] |
1515 | for filename in self.filelist: | 1517 | for filename in self.filelist: |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index ea1347837c..24c7a8fd64 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
@@ -31,6 +31,7 @@ BitBake build tools. | |||
31 | import copy, re | 31 | import copy, re |
32 | from collections import MutableMapping | 32 | from collections import MutableMapping |
33 | import logging | 33 | import logging |
34 | import hashlib | ||
34 | import bb, bb.codeparser | 35 | import bb, bb.codeparser |
35 | from bb import utils | 36 | from bb import utils |
36 | from bb.COW import COWDictBase | 37 | from bb.COW import COWDictBase |
@@ -459,3 +460,23 @@ class DataSmart(MutableMapping): | |||
459 | 460 | ||
460 | def __delitem__(self, var): | 461 | def __delitem__(self, var): |
461 | self.delVar(var) | 462 | self.delVar(var) |
463 | |||
464 | def get_hash(self): | ||
465 | data = "" | ||
466 | keys = iter(self) | ||
467 | for key in keys: | ||
468 | if key in ["TIME", "DATE"]: | ||
469 | continue | ||
470 | if key == "__depends": | ||
471 | deps = list(self.getVar(key, False)) | ||
472 | deps.sort() | ||
473 | value = [deps[i][0] for i in range(len(deps))] | ||
474 | elif key == "PATH": | ||
475 | path = list(set(self.getVar(key, False).split(':'))) | ||
476 | path.sort() | ||
477 | value = " ".join(path) | ||
478 | else: | ||
479 | value = self.getVar(key, False) or "" | ||
480 | data = data + key + ': ' + str(value) + '\n' | ||
481 | |||
482 | return hashlib.md5(data).hexdigest() | ||