summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorChris Larson <chris_larson@mentor.com>2010-11-17 20:27:25 -0700
committerRichard Purdie <rpurdie@linux.intel.com>2011-01-04 14:46:40 +0000
commitc4d939079e19659540145c55ad44cc23fa254c27 (patch)
tree851786be512a8506290b09f2031e91e486906c07 /bitbake
parent77d52cb1b8310251e7b33cf16eb00dab7d8cc7ad (diff)
downloadpoky-c4d939079e19659540145c55ad44cc23fa254c27.tar.gz
cache: pyflakes/pep8/pylint cleanup
(Bitbake rev: 06420ff839ffc37de3e42474b8b0e47c4608a985) Signed-off-by: Chris Larson <chris_larson@mentor.com> Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/cache.py69
1 files changed, 40 insertions, 29 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index ef8af4e0da..05d8b1e47e 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -40,7 +40,8 @@ try:
40 import cPickle as pickle 40 import cPickle as pickle
41except ImportError: 41except ImportError:
42 import pickle 42 import pickle
43 logger.info("Importing cPickle failed. Falling back to a very slow implementation.") 43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.")
44 45
45__cache_version__ = "132" 46__cache_version__ = "132"
46 47
@@ -48,9 +49,8 @@ class Cache:
48 """ 49 """
49 BitBake Cache implementation 50 BitBake Cache implementation
50 """ 51 """
51 def __init__(self, data):
52
53 52
53 def __init__(self, data):
54 self.cachedir = bb.data.getVar("CACHE", data, True) 54 self.cachedir = bb.data.getVar("CACHE", data, True)
55 self.clean = set() 55 self.clean = set()
56 self.checked = set() 56 self.checked = set()
@@ -61,7 +61,8 @@ class Cache:
61 61
62 if self.cachedir in [None, '']: 62 if self.cachedir in [None, '']:
63 self.has_cache = False 63 self.has_cache = False
64 logger.info("Not using a cache. Set CACHE = <directory> to enable.") 64 logger.info("Not using a cache. "
65 "Set CACHE = <directory> to enable.")
65 return 66 return
66 67
67 self.has_cache = True 68 self.has_cache = True
@@ -75,7 +76,7 @@ class Cache:
75 newest_mtime = 0 76 newest_mtime = 0
76 deps = bb.data.getVar("__depends", data) 77 deps = bb.data.getVar("__depends", data)
77 78
78 old_mtimes = [old_mtime for f, old_mtime in deps] 79 old_mtimes = [old_mtime for _, old_mtime in deps]
79 old_mtimes.append(newest_mtime) 80 old_mtimes.append(newest_mtime)
80 newest_mtime = max(old_mtimes) 81 newest_mtime = max(old_mtimes)
81 82
@@ -97,7 +98,7 @@ class Cache:
97 if os.path.isfile(self.cachefile): 98 if os.path.isfile(self.cachefile):
98 logger.info("Out of date cache found, rebuilding...") 99 logger.info("Out of date cache found, rebuilding...")
99 100
100 def getVar(self, var, fn, exp = 0): 101 def getVar(self, var, fn, exp=0):
101 """ 102 """
102 Gets the value of a variable 103 Gets the value of a variable
103 (similar to getVar in the data class) 104 (similar to getVar in the data class)
@@ -114,7 +115,7 @@ class Cache:
114 115
115 if fn != self.data_fn: 116 if fn != self.data_fn:
116 # We're trying to access data in the cache which doesn't exist 117 # We're trying to access data in the cache which doesn't exist
117 # yet setData hasn't been called to setup the right access. Very bad. 118 # yet setData hasn't been called to setup the right access
118 logger.error("data_fn %s and fn %s don't match", self.data_fn, fn) 119 logger.error("data_fn %s and fn %s don't match", self.data_fn, fn)
119 120
120 self.cacheclean = False 121 self.cacheclean = False
@@ -133,8 +134,8 @@ class Cache:
133 self.data = data 134 self.data = data
134 135
135 # Make sure __depends makes the depends_cache 136 # Make sure __depends makes the depends_cache
136 # If we're a virtual class we need to make sure all our depends are appended 137 # If we're a virtual class we need to make sure all our depends are
137 # to the depends of fn. 138 # appended to the depends of fn.
138 depends = self.getVar("__depends", virtualfn) or set() 139 depends = self.getVar("__depends", virtualfn) or set()
139 self.depends_cache.setdefault(fn, {}) 140 self.depends_cache.setdefault(fn, {})
140 if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]: 141 if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]:
@@ -147,7 +148,8 @@ class Cache:
147 148
148 self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) 149 self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
149 150
150 def virtualfn2realfn(self, virtualfn): 151 @staticmethod
152 def virtualfn2realfn(virtualfn):
151 """ 153 """
152 Convert a virtual file name to a real one + the associated subclass keyword 154 Convert a virtual file name to a real one + the associated subclass keyword
153 """ 155 """
@@ -159,7 +161,8 @@ class Cache:
159 fn = virtualfn.replace('virtual:' + cls + ':', '') 161 fn = virtualfn.replace('virtual:' + cls + ':', '')
160 return (fn, cls) 162 return (fn, cls)
161 163
162 def realfn2virtual(self, realfn, cls): 164 @staticmethod
165 def realfn2virtual(realfn, cls):
163 """ 166 """
164 Convert a real filename + the associated subclass keyword to a virtual filename 167 Convert a real filename + the associated subclass keyword to a virtual filename
165 """ 168 """
@@ -278,7 +281,8 @@ class Cache:
278 return False 281 return False
279 282
280 if (fmtime != old_mtime): 283 if (fmtime != old_mtime):
281 logger.debug(2, "Cache: %s's dependency %s changed", fn, f) 284 logger.debug(2, "Cache: %s's dependency %s changed",
285 fn, f)
282 self.remove(fn) 286 self.remove(fn)
283 return False 287 return False
284 288
@@ -293,7 +297,7 @@ class Cache:
293 logger.debug(2, "Cache: %s is not cached", virtualfn) 297 logger.debug(2, "Cache: %s is not cached", virtualfn)
294 invalid = True 298 invalid = True
295 299
296 # If any one of the varients is not present, mark cache as invalid for all 300 # If any one of the variants is not present, mark as invalid for all
297 if invalid: 301 if invalid:
298 for cls in (multi or "").split(): 302 for cls in (multi or "").split():
299 virtualfn = self.realfn2virtual(fn, cls) 303 virtualfn = self.realfn2virtual(fn, cls)
@@ -342,15 +346,18 @@ class Cache:
342 if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']: 346 if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
343 logger.debug(2, "Not caching %s, marked as not cacheable", fn) 347 logger.debug(2, "Not caching %s, marked as not cacheable", fn)
344 del cache_data[fn] 348 del cache_data[fn]
345 elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']: 349 elif ('PV' in self.depends_cache[fn] and
346 logger.error("Not caching %s as it had SRCREVINACTION in PV. Please report this bug", fn) 350 'SRCREVINACTION' in self.depends_cache[fn]['PV']):
351 logger.error("Not caching %s as it had SRCREVINACTION in PV. "
352 "Please report this bug", fn)
347 del cache_data[fn] 353 del cache_data[fn]
348 354
349 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 ) 355 p = pickle.Pickler(file(self.cachefile, "wb"), -1)
350 p.dump([cache_data, version_data]) 356 p.dump([cache_data, version_data])
351 del self.depends_cache 357 del self.depends_cache
352 358
353 def mtime(self, cachefile): 359 @staticmethod
360 def mtime(cachefile):
354 return bb.parse.cached_mtime_noerror(cachefile) 361 return bb.parse.cached_mtime_noerror(cachefile)
355 362
356 def handle_data(self, file_name, cacheData): 363 def handle_data(self, file_name, cacheData):
@@ -358,15 +365,15 @@ class Cache:
358 Save data we need into the cache 365 Save data we need into the cache
359 """ 366 """
360 367
361 pn = self.getVar('PN', file_name, True) 368 pn = self.getVar('PN', file_name, True)
362 pe = self.getVar('PE', file_name, True) or "0" 369 pe = self.getVar('PE', file_name, True) or "0"
363 pv = self.getVar('PV', file_name, True) 370 pv = self.getVar('PV', file_name, True)
364 if 'SRCREVINACTION' in pv: 371 if 'SRCREVINACTION' in pv:
365 logger.info("Found SRCREVINACTION in PV (%s) or %s. Please report this bug.", pv, file_name) 372 logger.info("Found SRCREVINACTION in PV (%s) or %s. Please report this bug.", pv, file_name)
366 pr = self.getVar('PR', file_name, True) 373 pr = self.getVar('PR', file_name, True)
367 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") 374 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
368 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") 375 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
369 packages = (self.getVar('PACKAGES', file_name, True) or "").split() 376 packages = (self.getVar('PACKAGES', file_name, True) or "").split()
370 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() 377 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
371 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() 378 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
372 379
@@ -438,7 +445,8 @@ class Cache:
438 self.getVar('__BB_DONT_CACHE', file_name, True) 445 self.getVar('__BB_DONT_CACHE', file_name, True)
439 self.getVar('__VARIANTS', file_name, True) 446 self.getVar('__VARIANTS', file_name, True)
440 447
441 def load_bbfile(self, bbfile, appends, config): 448 @staticmethod
449 def load_bbfile(bbfile, appends, config):
442 """ 450 """
443 Load and parse one .bb build file 451 Load and parse one .bb build file
444 Return the data and whether parsing resulted in the file being skipped 452 Return the data and whether parsing resulted in the file being skipped
@@ -464,13 +472,16 @@ class Cache:
464 try: 472 try:
465 if appends: 473 if appends:
466 data.setVar('__BBAPPEND', " ".join(appends), bb_data) 474 data.setVar('__BBAPPEND', " ".join(appends), bb_data)
467 bb_data = parse.handle(bbfile, bb_data) # read .bb data 475 bb_data = parse.handle(bbfile, bb_data)
468 if chdir_back: os.chdir(oldpath) 476 if chdir_back:
477 os.chdir(oldpath)
469 return bb_data 478 return bb_data
470 except: 479 except:
471 if chdir_back: os.chdir(oldpath) 480 if chdir_back:
481 os.chdir(oldpath)
472 raise 482 raise
473 483
484
474def init(cooker): 485def init(cooker):
475 """ 486 """
476 The Objective: Cache the minimum amount of data possible yet get to the 487 The Objective: Cache the minimum amount of data possible yet get to the
@@ -505,7 +516,7 @@ class CacheData:
505 Direct cache variables 516 Direct cache variables
506 (from Cache.handle_data) 517 (from Cache.handle_data)
507 """ 518 """
508 self.providers = defaultdict(list) 519 self.providers = defaultdict(list)
509 self.rproviders = defaultdict(list) 520 self.rproviders = defaultdict(list)
510 self.packages = defaultdict(list) 521 self.packages = defaultdict(list)
511 self.packages_dynamic = defaultdict(list) 522 self.packages_dynamic = defaultdict(list)