From d7b818b51f3e6dded0c0885cdfed5a24cda3b428 Mon Sep 17 00:00:00 2001 From: Paul Eggleton Date: Wed, 23 May 2012 00:23:31 +0100 Subject: bitbake: refactor out codeparser cache into a separate class We want to be able to reuse most this functionality for the file checksum cache. (Bitbake rev: 0fe3cb1438d297f90dd0fc6b26362ecbff75c76d) Signed-off-by: Paul Eggleton Signed-off-by: Richard Purdie --- bitbake/lib/bb/cache.py | 116 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 114 insertions(+), 2 deletions(-) (limited to 'bitbake/lib/bb/cache.py') diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 47e814b577..36e6356f51 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -1,11 +1,12 @@ # ex:ts=4:sw=4:sts=4:et # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- # -# BitBake 'Event' implementation +# BitBake Cache implementation # # Caching of bitbake variables before task execution # Copyright (C) 2006 Richard Purdie +# Copyright (C) 2012 Intel Corporation # but small sections based on code from bin/bitbake: # Copyright (C) 2003, 2004 Chris Larson @@ -703,4 +704,115 @@ class CacheData(object): for info in info_array: info.add_cacheData(self, fn) - + +class MultiProcessCache(object): + """ + BitBake multi-process cache implementation + + Used by the codeparser & file checksum caches + """ + + def __init__(self): + self.cachefile = None + self.cachedata = self.create_cachedata() + self.cachedata_extras = self.create_cachedata() + + def init_cache(self, d): + cachedir = (d.getVar("PERSISTENT_DIR", True) or + d.getVar("CACHE", True)) + if cachedir in [None, '']: + return + bb.utils.mkdirhier(cachedir) + self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name) + logger.debug(1, "Using cache in '%s'", self.cachefile) + + try: + p = pickle.Unpickler(file(self.cachefile, "rb")) + data, version = p.load() + except: + return + + if version != self.__class__.CACHE_VERSION: + return + + self.cachedata = data + + def internSet(self, items): + new = set() + for i in items: + new.add(intern(i)) + return new + + def compress_keys(self, data): + # Override in subclasses if desired + return + + def create_cachedata(self): + data = [{}] + return data + + def save_extras(self, d): + if not self.cachefile: + return + + glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) + + i = os.getpid() + lf = None + while not lf: + lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False) + if not lf or os.path.exists(self.cachefile + "-" + str(i)): + if lf: + bb.utils.unlockfile(lf) + lf = None + i = i + 1 + continue + + p = pickle.Pickler(file(self.cachefile + "-" + str(i), "wb"), -1) + p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION]) + + bb.utils.unlockfile(lf) + bb.utils.unlockfile(glf) + + def merge_data(self, source, dest): + for j in range(0,len(dest)): + for h in source[j]: + if h not in dest[j]: + dest[j][h] = source[j][h] + + def save_merge(self, d): + if not self.cachefile: + return + + glf = bb.utils.lockfile(self.cachefile + ".lock") + + try: + p = pickle.Unpickler(file(self.cachefile, "rb")) + data, version = p.load() + except (IOError, EOFError): + data, version = None, None + + if version != self.__class__.CACHE_VERSION: + data = self.create_cachedata() + + for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: + f = os.path.join(os.path.dirname(self.cachefile), f) + try: + p = pickle.Unpickler(file(f, "rb")) + extradata, version = p.load() + except (IOError, EOFError): + extradata, version = self.create_cachedata(), None + + if version != self.__class__.CACHE_VERSION: + continue + + self.merge_data(extradata, data) + os.unlink(f) + + self.compress_keys(data) + + p = pickle.Pickler(file(self.cachefile, "wb"), -1) + p.dump([data, self.__class__.CACHE_VERSION]) + + bb.utils.unlockfile(glf) + -- cgit v1.2.3-54-g00ecf