diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-07-22 11:27:51 +0100 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-07-26 08:10:35 +0100 |
commit | 40d45cf7a651bf60f684e3aab32195593ba1172b (patch) | |
tree | c98004f4974f8fd1a6219e90da92805ad16f9e71 /bitbake/lib/bb/cache.py | |
parent | c9e65c5d29afa5a9fc89b929e4f2a3d5954428c0 (diff) | |
download | poky-40d45cf7a651bf60f684e3aab32195593ba1172b.tar.gz |
bitbake: cache: Drop/simplify pointless type checking
Since we no longer have random data like version fields in these structures
and we can assume any extra cache data subclasses our class, simplify the
code.
This is mostly reindenting after removal of the pointless type checks.
(Bitbake rev: 5eb36278ac9975de1945f6da8161187320d90ba7)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 127 |
1 files changed, 60 insertions, 67 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index d0fb234f89..cce12d12e9 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -280,10 +280,9 @@ class Cache(object): | |||
280 | cache_ok = True | 280 | cache_ok = True |
281 | if self.caches_array: | 281 | if self.caches_array: |
282 | for cache_class in self.caches_array: | 282 | for cache_class in self.caches_array: |
283 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 283 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
284 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 284 | cache_ok = cache_ok and os.path.exists(cachefile) |
285 | cache_ok = cache_ok and os.path.exists(cachefile) | 285 | cache_class.init_cacheData(self) |
286 | cache_class.init_cacheData(self) | ||
287 | if cache_ok: | 286 | if cache_ok: |
288 | self.load_cachefile() | 287 | self.load_cachefile() |
289 | elif os.path.isfile(self.cachefile): | 288 | elif os.path.isfile(self.cachefile): |
@@ -296,54 +295,52 @@ class Cache(object): | |||
296 | 295 | ||
297 | # Calculate the correct cachesize of all those cache files | 296 | # Calculate the correct cachesize of all those cache files |
298 | for cache_class in self.caches_array: | 297 | for cache_class in self.caches_array: |
299 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 298 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
300 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 299 | with open(cachefile, "rb") as cachefile: |
301 | with open(cachefile, "rb") as cachefile: | 300 | cachesize += os.fstat(cachefile.fileno()).st_size |
302 | cachesize += os.fstat(cachefile.fileno()).st_size | ||
303 | 301 | ||
304 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) | 302 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) |
305 | 303 | ||
306 | for cache_class in self.caches_array: | 304 | for cache_class in self.caches_array: |
307 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 305 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
308 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 306 | with open(cachefile, "rb") as cachefile: |
309 | with open(cachefile, "rb") as cachefile: | 307 | pickled = pickle.Unpickler(cachefile) |
310 | pickled = pickle.Unpickler(cachefile) | 308 | # Check cache version information |
311 | # Check cache version information | 309 | try: |
310 | cache_ver = pickled.load() | ||
311 | bitbake_ver = pickled.load() | ||
312 | except Exception: | ||
313 | logger.info('Invalid cache, rebuilding...') | ||
314 | return | ||
315 | |||
316 | if cache_ver != __cache_version__: | ||
317 | logger.info('Cache version mismatch, rebuilding...') | ||
318 | return | ||
319 | elif bitbake_ver != bb.__version__: | ||
320 | logger.info('Bitbake version mismatch, rebuilding...') | ||
321 | return | ||
322 | |||
323 | # Load the rest of the cache file | ||
324 | current_progress = 0 | ||
325 | while cachefile: | ||
312 | try: | 326 | try: |
313 | cache_ver = pickled.load() | 327 | key = pickled.load() |
314 | bitbake_ver = pickled.load() | 328 | value = pickled.load() |
315 | except Exception: | 329 | except Exception: |
316 | logger.info('Invalid cache, rebuilding...') | 330 | break |
317 | return | 331 | if key in self.depends_cache: |
318 | 332 | self.depends_cache[key].append(value) | |
319 | if cache_ver != __cache_version__: | 333 | else: |
320 | logger.info('Cache version mismatch, rebuilding...') | 334 | self.depends_cache[key] = [value] |
321 | return | 335 | # only fire events on even percentage boundaries |
322 | elif bitbake_ver != bb.__version__: | 336 | current_progress = cachefile.tell() + previous_progress |
323 | logger.info('Bitbake version mismatch, rebuilding...') | 337 | current_percent = 100 * current_progress / cachesize |
324 | return | 338 | if current_percent > previous_percent: |
325 | 339 | previous_percent = current_percent | |
326 | # Load the rest of the cache file | 340 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), |
327 | current_progress = 0 | 341 | self.data) |
328 | while cachefile: | 342 | |
329 | try: | 343 | previous_progress += current_progress |
330 | key = pickled.load() | ||
331 | value = pickled.load() | ||
332 | except Exception: | ||
333 | break | ||
334 | if key in self.depends_cache: | ||
335 | self.depends_cache[key].append(value) | ||
336 | else: | ||
337 | self.depends_cache[key] = [value] | ||
338 | # only fire events on even percentage boundaries | ||
339 | current_progress = cachefile.tell() + previous_progress | ||
340 | current_percent = 100 * current_progress / cachesize | ||
341 | if current_percent > previous_percent: | ||
342 | previous_percent = current_percent | ||
343 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
344 | self.data) | ||
345 | |||
346 | previous_progress += current_progress | ||
347 | 344 | ||
348 | # Note: depends cache number is corresponding to the parsing file numbers. | 345 | # Note: depends cache number is corresponding to the parsing file numbers. |
349 | # The same file has several caches, still regarded as one item in the cache | 346 | # The same file has several caches, still regarded as one item in the cache |
@@ -407,9 +404,8 @@ class Cache(object): | |||
407 | 404 | ||
408 | info_array = [] | 405 | info_array = [] |
409 | for cache_class in caches_array: | 406 | for cache_class in caches_array: |
410 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 407 | info = cache_class(filename, data) |
411 | info = cache_class(filename, data) | 408 | info_array.append(info) |
412 | info_array.append(info) | ||
413 | infos.append((virtualfn, info_array)) | 409 | infos.append((virtualfn, info_array)) |
414 | 410 | ||
415 | return infos | 411 | return infos |
@@ -601,26 +597,23 @@ class Cache(object): | |||
601 | file_dict = {} | 597 | file_dict = {} |
602 | pickler_dict = {} | 598 | pickler_dict = {} |
603 | for cache_class in self.caches_array: | 599 | for cache_class in self.caches_array: |
604 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 600 | cache_class_name = cache_class.__name__ |
605 | cache_class_name = cache_class.__name__ | 601 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
606 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 602 | file_dict[cache_class_name] = open(cachefile, "wb") |
607 | file_dict[cache_class_name] = open(cachefile, "wb") | 603 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) |
608 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) | 604 | pickler_dict[cache_class_name].dump(__cache_version__) |
609 | pickler_dict[cache_class_name].dump(__cache_version__) | 605 | pickler_dict[cache_class_name].dump(bb.__version__) |
610 | pickler_dict[cache_class_name].dump(bb.__version__) | ||
611 | 606 | ||
612 | try: | 607 | try: |
613 | for key, info_array in self.depends_cache.items(): | 608 | for key, info_array in self.depends_cache.items(): |
614 | for info in info_array: | 609 | for info in info_array: |
615 | if isinstance(info, RecipeInfoCommon): | 610 | cache_class_name = info.__class__.__name__ |
616 | cache_class_name = info.__class__.__name__ | 611 | pickler_dict[cache_class_name].dump(key) |
617 | pickler_dict[cache_class_name].dump(key) | 612 | pickler_dict[cache_class_name].dump(info) |
618 | pickler_dict[cache_class_name].dump(info) | ||
619 | finally: | 613 | finally: |
620 | for cache_class in self.caches_array: | 614 | for cache_class in self.caches_array: |
621 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 615 | cache_class_name = cache_class.__name__ |
622 | cache_class_name = cache_class.__name__ | 616 | file_dict[cache_class_name].close() |
623 | file_dict[cache_class_name].close() | ||
624 | 617 | ||
625 | del self.depends_cache | 618 | del self.depends_cache |
626 | 619 | ||
@@ -652,8 +645,7 @@ class Cache(object): | |||
652 | 645 | ||
653 | info_array = [] | 646 | info_array = [] |
654 | for cache_class in self.caches_array: | 647 | for cache_class in self.caches_array: |
655 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 648 | info_array.append(cache_class(realfn, data)) |
656 | info_array.append(cache_class(realfn, data)) | ||
657 | self.add_info(file_name, info_array, cacheData, parsed) | 649 | self.add_info(file_name, info_array, cacheData, parsed) |
658 | 650 | ||
659 | @staticmethod | 651 | @staticmethod |
@@ -721,8 +713,9 @@ class CacheData(object): | |||
721 | def __init__(self, caches_array): | 713 | def __init__(self, caches_array): |
722 | self.caches_array = caches_array | 714 | self.caches_array = caches_array |
723 | for cache_class in self.caches_array: | 715 | for cache_class in self.caches_array: |
724 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | 716 | if not issubclass(cache_class, RecipeInfoCommon): |
725 | cache_class.init_cacheData(self) | 717 | bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class) |
718 | cache_class.init_cacheData(self) | ||
726 | 719 | ||
727 | # Direct cache variables | 720 | # Direct cache variables |
728 | self.task_queues = {} | 721 | self.task_queues = {} |