summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cache.py
diff options
context:
space:
mode:
authorLiping Ke <liping.ke@intel.com>2011-06-03 08:22:40 +0800
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-06-07 22:40:01 +0100
commit5af197b55a4b779f1ec93186f0723026949ba2b5 (patch)
tree935a789f32e2f284cd71359461785f1f6eaa751a /bitbake/lib/bb/cache.py
parentb3c41b1f469a1d4d558e5dbef827322444d3ba54 (diff)
downloadpoky-5af197b55a4b779f1ec93186f0723026949ba2b5.tar.gz
cache: Implement multiple extra cache fields request support
This patch is to support extra cache. If user needs to request extra cache fields besides CoreRecipeInfo fields, just add a new XXXRecipeInfo class definition as Hob Does. Currently supported Extra RecipeInfo name is an array. We can support multiple extra cache fields at the same time besides CoreRecipeInfo which is needed by all clients. (Bitbake rev: 077657e50ad032c0fa876bf54e9802af2686e0fb) Signed-off-by: Liping Ke <liping.ke@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r--bitbake/lib/bb/cache.py177
1 files changed, 119 insertions, 58 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 0620621d0b..5b8e3ee8f4 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -43,8 +43,10 @@ except ImportError:
43 logger.info("Importing cPickle failed. " 43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.") 44 "Falling back to a very slow implementation.")
45 45
46__cache_version__ = "138" 46__cache_version__ = "139"
47 47
48def getCacheFile(path, filename):
49 return os.path.join(path, filename)
48 50
49# RecipeInfoCommon defines common data retrieving methods 51# RecipeInfoCommon defines common data retrieving methods
50# from meta data for caches. CoreRecipeInfo as well as other 52# from meta data for caches. CoreRecipeInfo as well as other
@@ -86,12 +88,9 @@ class RecipeInfoCommon(object):
86class CoreRecipeInfo(RecipeInfoCommon): 88class CoreRecipeInfo(RecipeInfoCommon):
87 __slots__ = () 89 __slots__ = ()
88 90
89 def __init__(self, filename, metadata): 91 cachefile = "bb_cache.dat"
90 self.name = "base"
91 # please override this member with the correct data cache file
92 # such as (bb_cache.dat, bb_extracache_hob.dat)
93 self.cachefile = "bb_cache.dat"
94 92
93 def __init__(self, filename, metadata):
95 self.file_depends = metadata.getVar('__depends', False) 94 self.file_depends = metadata.getVar('__depends', False)
96 self.timestamp = bb.parse.cached_mtime(filename) 95 self.timestamp = bb.parse.cached_mtime(filename)
97 self.variants = self.listvar('__VARIANTS', metadata) + [''] 96 self.variants = self.listvar('__VARIANTS', metadata) + ['']
@@ -265,7 +264,7 @@ class Cache(object):
265 return 264 return
266 265
267 self.has_cache = True 266 self.has_cache = True
268 self.cachefile = os.path.join(self.cachedir, "bb_cache.dat") 267 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat")
269 268
270 logger.debug(1, "Using cache in '%s'", self.cachedir) 269 logger.debug(1, "Using cache in '%s'", self.cachedir)
271 bb.utils.mkdirhier(self.cachedir) 270 bb.utils.mkdirhier(self.cachedir)
@@ -279,12 +278,21 @@ class Cache(object):
279 old_mtimes.append(newest_mtime) 278 old_mtimes.append(newest_mtime)
280 newest_mtime = max(old_mtimes) 279 newest_mtime = max(old_mtimes)
281 280
282 if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: 281 bNeedUpdate = True
282 if self.caches_array:
283 for cache_class in self.caches_array:
284 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
285 cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
286 bNeedUpdate = bNeedUpdate and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime)
287 cache_class.init_cacheData(self)
288 if bNeedUpdate:
283 self.load_cachefile() 289 self.load_cachefile()
284 elif os.path.isfile(self.cachefile): 290 elif os.path.isfile(self.cachefile):
285 logger.info("Out of date cache found, rebuilding...") 291 logger.info("Out of date cache found, rebuilding...")
286 292
287 def load_cachefile(self): 293 def load_cachefile(self):
294 # Firstly, using core cache file information for
295 # valid checking
288 with open(self.cachefile, "rb") as cachefile: 296 with open(self.cachefile, "rb") as cachefile:
289 pickled = pickle.Unpickler(cachefile) 297 pickled = pickle.Unpickler(cachefile)
290 try: 298 try:
@@ -301,31 +309,52 @@ class Cache(object):
301 logger.info('Bitbake version mismatch, rebuilding...') 309 logger.info('Bitbake version mismatch, rebuilding...')
302 return 310 return
303 311
304 cachesize = os.fstat(cachefile.fileno()).st_size
305 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
306 312
307 previous_percent = 0 313 cachesize = 0
308 while cachefile: 314 previous_progress = 0
309 try: 315 previous_percent = 0
310 key = pickled.load()
311 value = pickled.load()
312 except Exception:
313 break
314 316
315 self.depends_cache[key] = value 317 # Calculate the correct cachesize of all those cache files
316 318 for cache_class in self.caches_array:
317 # only fire events on even percentage boundaries 319 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
318 current_progress = cachefile.tell() 320 cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
319 current_percent = 100 * current_progress / cachesize 321 with open(cachefile, "rb") as cachefile:
320 if current_percent > previous_percent: 322 cachesize += os.fstat(cachefile.fileno()).st_size
321 previous_percent = current_percent
322 bb.event.fire(bb.event.CacheLoadProgress(current_progress),
323 self.data)
324
325 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
326 len(self.depends_cache)),
327 self.data)
328 323
324 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
325
326 for cache_class in self.caches_array:
327 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
328 cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
329 with open(cachefile, "rb") as cachefile:
330 pickled = pickle.Unpickler(cachefile)
331 while cachefile:
332 try:
333 key = pickled.load()
334 value = pickled.load()
335 except Exception:
336 break
337 if self.depends_cache.has_key(key):
338 self.depends_cache[key].append(value)
339 else:
340 self.depends_cache[key] = [value]
341 # only fire events on even percentage boundaries
342 current_progress = cachefile.tell() + previous_progress
343 current_percent = 100 * current_progress / cachesize
344 if current_percent > previous_percent:
345 previous_percent = current_percent
346 bb.event.fire(bb.event.CacheLoadProgress(current_progress),
347 self.data)
348
349 previous_progress += current_progress
350
351 # Note: depends cache number is corresponding to the parsing file numbers.
352 # The same file has several caches, still regarded as one item in the cache
353 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
354 len(self.depends_cache)),
355 self.data)
356
357
329 @staticmethod 358 @staticmethod
330 def virtualfn2realfn(virtualfn): 359 def virtualfn2realfn(virtualfn):
331 """ 360 """
@@ -376,8 +405,14 @@ class Cache(object):
376 depends |= (data.getVar("__depends", False) or set()) 405 depends |= (data.getVar("__depends", False) or set())
377 if depends and not variant: 406 if depends and not variant:
378 data.setVar("__depends", depends) 407 data.setVar("__depends", depends)
379 info = CoreRecipeInfo(filename, data) 408
380 infos.append((virtualfn, info)) 409 info_array = []
410 for cache_class in caches_array:
411 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
412 info = cache_class(filename, data)
413 info_array.append(info)
414 infos.append((virtualfn, info_array))
415
381 return infos 416 return infos
382 417
383 def load(self, filename, appends, configdata): 418 def load(self, filename, appends, configdata):
@@ -391,8 +426,9 @@ class Cache(object):
391 cached = self.cacheValid(filename) 426 cached = self.cacheValid(filename)
392 if cached: 427 if cached:
393 infos = [] 428 infos = []
394 info = self.depends_cache[filename] 429 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
395 for variant in info.variants: 430 info_array = self.depends_cache[filename]
431 for variant in info_array[0].variants:
396 virtualfn = self.realfn2virtual(filename, variant) 432 virtualfn = self.realfn2virtual(filename, variant)
397 infos.append((virtualfn, self.depends_cache[virtualfn])) 433 infos.append((virtualfn, self.depends_cache[virtualfn]))
398 else: 434 else:
@@ -408,12 +444,12 @@ class Cache(object):
408 skipped, virtuals = 0, 0 444 skipped, virtuals = 0, 0
409 445
410 cached, infos = self.load(fn, appends, cfgData) 446 cached, infos = self.load(fn, appends, cfgData)
411 for virtualfn, info in infos: 447 for virtualfn, info_array in infos:
412 if info.skipped: 448 if info_array[0].skipped:
413 logger.debug(1, "Skipping %s", virtualfn) 449 logger.debug(1, "Skipping %s", virtualfn)
414 skipped += 1 450 skipped += 1
415 else: 451 else:
416 self.add_info(virtualfn, info, cacheData, not cached) 452 self.add_info(virtualfn, info_array, cacheData, not cached)
417 virtuals += 1 453 virtuals += 1
418 454
419 return cached, skipped, virtuals 455 return cached, skipped, virtuals
@@ -457,15 +493,15 @@ class Cache(object):
457 self.remove(fn) 493 self.remove(fn)
458 return False 494 return False
459 495
460 info = self.depends_cache[fn] 496 info_array = self.depends_cache[fn]
461 # Check the file's timestamp 497 # Check the file's timestamp
462 if mtime != info.timestamp: 498 if mtime != info_array[0].timestamp:
463 logger.debug(2, "Cache: %s changed", fn) 499 logger.debug(2, "Cache: %s changed", fn)
464 self.remove(fn) 500 self.remove(fn)
465 return False 501 return False
466 502
467 # Check dependencies are still valid 503 # Check dependencies are still valid
468 depends = info.file_depends 504 depends = info_array[0].file_depends
469 if depends: 505 if depends:
470 for f, old_mtime in depends: 506 for f, old_mtime in depends:
471 fmtime = bb.parse.cached_mtime_noerror(f) 507 fmtime = bb.parse.cached_mtime_noerror(f)
@@ -483,7 +519,7 @@ class Cache(object):
483 return False 519 return False
484 520
485 invalid = False 521 invalid = False
486 for cls in info.variants: 522 for cls in info_array[0].variants:
487 virtualfn = self.realfn2virtual(fn, cls) 523 virtualfn = self.realfn2virtual(fn, cls)
488 self.clean.add(virtualfn) 524 self.clean.add(virtualfn)
489 if virtualfn not in self.depends_cache: 525 if virtualfn not in self.depends_cache:
@@ -530,13 +566,30 @@ class Cache(object):
530 logger.debug(2, "Cache is clean, not saving.") 566 logger.debug(2, "Cache is clean, not saving.")
531 return 567 return
532 568
533 with open(self.cachefile, "wb") as cachefile: 569 file_dict = {}
534 pickler = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) 570 pickler_dict = {}
535 pickler.dump(__cache_version__) 571 for cache_class in self.caches_array:
536 pickler.dump(bb.__version__) 572 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
537 for key, value in self.depends_cache.iteritems(): 573 cache_class_name = cache_class.__name__
538 pickler.dump(key) 574 cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
539 pickler.dump(value) 575 file_dict[cache_class_name] = open(cachefile, "wb")
576 pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
577
578 pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
579 pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
580
581 try:
582 for key, info_array in self.depends_cache.iteritems():
583 for info in info_array:
584 if isinstance(info, RecipeInfoCommon):
585 cache_class_name = info.__class__.__name__
586 pickler_dict[cache_class_name].dump(key)
587 pickler_dict[cache_class_name].dump(info)
588 finally:
589 for cache_class in self.caches_array:
590 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
591 cache_class_name = cache_class.__name__
592 file_dict[cache_class_name].close()
540 593
541 del self.depends_cache 594 del self.depends_cache
542 595
@@ -544,17 +597,17 @@ class Cache(object):
544 def mtime(cachefile): 597 def mtime(cachefile):
545 return bb.parse.cached_mtime_noerror(cachefile) 598 return bb.parse.cached_mtime_noerror(cachefile)
546 599
547 def add_info(self, filename, info, cacheData, parsed=None): 600 def add_info(self, filename, info_array, cacheData, parsed=None):
548 if not info.skipped: 601 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
549 cacheData.add_from_recipeinfo(filename, info) 602 cacheData.add_from_recipeinfo(filename, info_array)
550 603
551 if not self.has_cache: 604 if not self.has_cache:
552 return 605 return
553 606
554 if (info.skipped or 'SRCREVINACTION' not in info.pv) and not info.nocache: 607 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
555 if parsed: 608 if parsed:
556 self.cacheclean = False 609 self.cacheclean = False
557 self.depends_cache[filename] = info 610 self.depends_cache[filename] = info_array
558 611
559 def add(self, file_name, data, cacheData, parsed=None): 612 def add(self, file_name, data, cacheData, parsed=None):
560 """ 613 """
@@ -562,8 +615,12 @@ class Cache(object):
562 """ 615 """
563 616
564 realfn = self.virtualfn2realfn(file_name)[0] 617 realfn = self.virtualfn2realfn(file_name)[0]
565 info = CoreRecipeInfo(realfn, data) 618
566 self.add_info(file_name, info, cacheData, parsed) 619 info_array = []
620 for cache_class in self.caches_array:
621 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
622 info_array.append(cache_class(realfn, data))
623 self.add_info(file_name, info_array, cacheData, parsed)
567 624
568 @staticmethod 625 @staticmethod
569 def load_bbfile(bbfile, appends, config): 626 def load_bbfile(bbfile, appends, config):
@@ -629,7 +686,10 @@ class CacheData(object):
629 686
630 def __init__(self, caches_array): 687 def __init__(self, caches_array):
631 self.caches_array = caches_array 688 self.caches_array = caches_array
632 CoreRecipeInfo.init_cacheData(self) 689 for cache_class in self.caches_array:
690 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
691 cache_class.init_cacheData(self)
692
633 # Direct cache variables 693 # Direct cache variables
634 self.task_queues = {} 694 self.task_queues = {}
635 self.preferred = {} 695 self.preferred = {}
@@ -640,7 +700,8 @@ class CacheData(object):
640 self.bbfile_priority = {} 700 self.bbfile_priority = {}
641 self.bbfile_config_priorities = [] 701 self.bbfile_config_priorities = []
642 702
643 def add_from_recipeinfo(self, fn, info): 703 def add_from_recipeinfo(self, fn, info_array):
644 info.add_cacheData(self, fn) 704 for info in info_array:
705 info.add_cacheData(self, fn)
645 706
646 707