summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cache.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r--bitbake/lib/bb/cache.py837
1 files changed, 837 insertions, 0 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
new file mode 100644
index 0000000000..a1dde96425
--- /dev/null
+++ b/bitbake/lib/bb/cache.py
@@ -0,0 +1,837 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
31
32import os
33import logging
34from collections import defaultdict
35import bb.utils
36
37logger = logging.getLogger("BitBake.Cache")
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.")
45
46__cache_version__ = "148"
47
48def getCacheFile(path, filename, data_hash):
49 return os.path.join(path, filename + "." + data_hash)
50
51# RecipeInfoCommon defines common data retrieving methods
52# from meta data for caches. CoreRecipeInfo as well as other
53# Extra RecipeInfo needs to inherit this class
54class RecipeInfoCommon(object):
55
56 @classmethod
57 def listvar(cls, var, metadata):
58 return cls.getvar(var, metadata).split()
59
60 @classmethod
61 def intvar(cls, var, metadata):
62 return int(cls.getvar(var, metadata) or 0)
63
64 @classmethod
65 def depvar(cls, var, metadata):
66 return bb.utils.explode_deps(cls.getvar(var, metadata))
67
68 @classmethod
69 def pkgvar(cls, var, packages, metadata):
70 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
71 for pkg in packages)
72
73 @classmethod
74 def taskvar(cls, var, tasks, metadata):
75 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
76 for task in tasks)
77
78 @classmethod
79 def flaglist(cls, flag, varlist, metadata, squash=False):
80 out_dict = dict((var, metadata.getVarFlag(var, flag, True))
81 for var in varlist)
82 if squash:
83 return dict((k,v) for (k,v) in out_dict.iteritems() if v)
84 else:
85 return out_dict
86
87 @classmethod
88 def getvar(cls, var, metadata):
89 return metadata.getVar(var, True) or ''
90
91
92class CoreRecipeInfo(RecipeInfoCommon):
93 __slots__ = ()
94
95 cachefile = "bb_cache.dat"
96
97 def __init__(self, filename, metadata):
98 self.file_depends = metadata.getVar('__depends', False)
99 self.timestamp = bb.parse.cached_mtime(filename)
100 self.variants = self.listvar('__VARIANTS', metadata) + ['']
101 self.appends = self.listvar('__BBAPPEND', metadata)
102 self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
103
104 self.skipreason = self.getvar('__SKIPPED', metadata)
105 if self.skipreason:
106 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
107 self.skipped = True
108 self.provides = self.depvar('PROVIDES', metadata)
109 self.rprovides = self.depvar('RPROVIDES', metadata)
110 return
111
112 self.tasks = metadata.getVar('__BBTASKS', False)
113
114 self.pn = self.getvar('PN', metadata)
115 self.packages = self.listvar('PACKAGES', metadata)
116 if not self.pn in self.packages:
117 self.packages.append(self.pn)
118
119 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
120 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
121
122 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
123
124 self.skipped = False
125 self.pe = self.getvar('PE', metadata)
126 self.pv = self.getvar('PV', metadata)
127 self.pr = self.getvar('PR', metadata)
128 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
129 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
130 self.stamp = self.getvar('STAMP', metadata)
131 self.stampclean = self.getvar('STAMPCLEAN', metadata)
132 self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
133 self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
134 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
135 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
136 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
137 self.depends = self.depvar('DEPENDS', metadata)
138 self.provides = self.depvar('PROVIDES', metadata)
139 self.rdepends = self.depvar('RDEPENDS', metadata)
140 self.rprovides = self.depvar('RPROVIDES', metadata)
141 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
142 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
143 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
144 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
145 self.inherits = self.getvar('__inherit_cache', metadata)
146 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
147 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
148 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
149
150 @classmethod
151 def init_cacheData(cls, cachedata):
152 # CacheData in Core RecipeInfo Class
153 cachedata.task_deps = {}
154 cachedata.pkg_fn = {}
155 cachedata.pkg_pn = defaultdict(list)
156 cachedata.pkg_pepvpr = {}
157 cachedata.pkg_dp = {}
158
159 cachedata.stamp = {}
160 cachedata.stampclean = {}
161 cachedata.stamp_base = {}
162 cachedata.stamp_base_clean = {}
163 cachedata.stamp_extrainfo = {}
164 cachedata.file_checksums = {}
165 cachedata.fn_provides = {}
166 cachedata.pn_provides = defaultdict(list)
167 cachedata.all_depends = []
168
169 cachedata.deps = defaultdict(list)
170 cachedata.packages = defaultdict(list)
171 cachedata.providers = defaultdict(list)
172 cachedata.rproviders = defaultdict(list)
173 cachedata.packages_dynamic = defaultdict(list)
174
175 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
176 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
177 cachedata.possible_world = []
178 cachedata.universe_target = []
179 cachedata.hashfn = {}
180
181 cachedata.basetaskhash = {}
182 cachedata.inherits = {}
183 cachedata.fakerootenv = {}
184 cachedata.fakerootnoenv = {}
185 cachedata.fakerootdirs = {}
186
187 def add_cacheData(self, cachedata, fn):
188 cachedata.task_deps[fn] = self.task_deps
189 cachedata.pkg_fn[fn] = self.pn
190 cachedata.pkg_pn[self.pn].append(fn)
191 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
192 cachedata.pkg_dp[fn] = self.defaultpref
193 cachedata.stamp[fn] = self.stamp
194 cachedata.stampclean[fn] = self.stampclean
195 cachedata.stamp_base[fn] = self.stamp_base
196 cachedata.stamp_base_clean[fn] = self.stamp_base_clean
197 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
198 cachedata.file_checksums[fn] = self.file_checksums
199
200 provides = [self.pn]
201 for provide in self.provides:
202 if provide not in provides:
203 provides.append(provide)
204 cachedata.fn_provides[fn] = provides
205
206 for provide in provides:
207 cachedata.providers[provide].append(fn)
208 if provide not in cachedata.pn_provides[self.pn]:
209 cachedata.pn_provides[self.pn].append(provide)
210
211 for dep in self.depends:
212 if dep not in cachedata.deps[fn]:
213 cachedata.deps[fn].append(dep)
214 if dep not in cachedata.all_depends:
215 cachedata.all_depends.append(dep)
216
217 rprovides = self.rprovides
218 for package in self.packages:
219 cachedata.packages[package].append(fn)
220 rprovides += self.rprovides_pkg[package]
221
222 for rprovide in rprovides:
223 cachedata.rproviders[rprovide].append(fn)
224
225 for package in self.packages_dynamic:
226 cachedata.packages_dynamic[package].append(fn)
227
228 # Build hash of runtime depends and recommends
229 for package in self.packages + [self.pn]:
230 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
231 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
232
233 # Collect files we may need for possible world-dep
234 # calculations
235 if self.not_world:
236 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
237 else:
238 cachedata.possible_world.append(fn)
239
240 # create a collection of all targets for sanity checking
241 # tasks, such as upstream versions, license, and tools for
242 # task and image creation.
243 cachedata.universe_target.append(self.pn)
244
245 cachedata.hashfn[fn] = self.hashfilename
246 for task, taskhash in self.basetaskhashes.iteritems():
247 identifier = '%s.%s' % (fn, task)
248 cachedata.basetaskhash[identifier] = taskhash
249
250 cachedata.inherits[fn] = self.inherits
251 cachedata.fakerootenv[fn] = self.fakerootenv
252 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
253 cachedata.fakerootdirs[fn] = self.fakerootdirs
254
255
256
257class Cache(object):
258 """
259 BitBake Cache implementation
260 """
261
262 def __init__(self, data, data_hash, caches_array):
263 # Pass caches_array information into Cache Constructor
264 # It will be used later for deciding whether we
265 # need extra cache file dump/load support
266 self.caches_array = caches_array
267 self.cachedir = data.getVar("CACHE", True)
268 self.clean = set()
269 self.checked = set()
270 self.depends_cache = {}
271 self.data = None
272 self.data_fn = None
273 self.cacheclean = True
274 self.data_hash = data_hash
275
276 if self.cachedir in [None, '']:
277 self.has_cache = False
278 logger.info("Not using a cache. "
279 "Set CACHE = <directory> to enable.")
280 return
281
282 self.has_cache = True
283 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
284
285 logger.debug(1, "Using cache in '%s'", self.cachedir)
286 bb.utils.mkdirhier(self.cachedir)
287
288 cache_ok = True
289 if self.caches_array:
290 for cache_class in self.caches_array:
291 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
292 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
293 cache_ok = cache_ok and os.path.exists(cachefile)
294 cache_class.init_cacheData(self)
295 if cache_ok:
296 self.load_cachefile()
297 elif os.path.isfile(self.cachefile):
298 logger.info("Out of date cache found, rebuilding...")
299
300 def load_cachefile(self):
301 # Firstly, using core cache file information for
302 # valid checking
303 with open(self.cachefile, "rb") as cachefile:
304 pickled = pickle.Unpickler(cachefile)
305 try:
306 cache_ver = pickled.load()
307 bitbake_ver = pickled.load()
308 except Exception:
309 logger.info('Invalid cache, rebuilding...')
310 return
311
312 if cache_ver != __cache_version__:
313 logger.info('Cache version mismatch, rebuilding...')
314 return
315 elif bitbake_ver != bb.__version__:
316 logger.info('Bitbake version mismatch, rebuilding...')
317 return
318
319
320 cachesize = 0
321 previous_progress = 0
322 previous_percent = 0
323
324 # Calculate the correct cachesize of all those cache files
325 for cache_class in self.caches_array:
326 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
327 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
328 with open(cachefile, "rb") as cachefile:
329 cachesize += os.fstat(cachefile.fileno()).st_size
330
331 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
332
333 for cache_class in self.caches_array:
334 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
335 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
336 with open(cachefile, "rb") as cachefile:
337 pickled = pickle.Unpickler(cachefile)
338 while cachefile:
339 try:
340 key = pickled.load()
341 value = pickled.load()
342 except Exception:
343 break
344 if self.depends_cache.has_key(key):
345 self.depends_cache[key].append(value)
346 else:
347 self.depends_cache[key] = [value]
348 # only fire events on even percentage boundaries
349 current_progress = cachefile.tell() + previous_progress
350 current_percent = 100 * current_progress / cachesize
351 if current_percent > previous_percent:
352 previous_percent = current_percent
353 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
354 self.data)
355
356 previous_progress += current_progress
357
358 # Note: depends cache number is corresponding to the parsing file numbers.
359 # The same file has several caches, still regarded as one item in the cache
360 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
361 len(self.depends_cache)),
362 self.data)
363
364
365 @staticmethod
366 def virtualfn2realfn(virtualfn):
367 """
368 Convert a virtual file name to a real one + the associated subclass keyword
369 """
370
371 fn = virtualfn
372 cls = ""
373 if virtualfn.startswith('virtual:'):
374 elems = virtualfn.split(':')
375 cls = ":".join(elems[1:-1])
376 fn = elems[-1]
377 return (fn, cls)
378
379 @staticmethod
380 def realfn2virtual(realfn, cls):
381 """
382 Convert a real filename + the associated subclass keyword to a virtual filename
383 """
384 if cls == "":
385 return realfn
386 return "virtual:" + cls + ":" + realfn
387
388 @classmethod
389 def loadDataFull(cls, virtualfn, appends, cfgData):
390 """
391 Return a complete set of data for fn.
392 To do this, we need to parse the file.
393 """
394
395 (fn, virtual) = cls.virtualfn2realfn(virtualfn)
396
397 logger.debug(1, "Parsing %s (full)", fn)
398
399 cfgData.setVar("__ONLYFINALISE", virtual or "default")
400 bb_data = cls.load_bbfile(fn, appends, cfgData)
401 return bb_data[virtual]
402
403 @classmethod
404 def parse(cls, filename, appends, configdata, caches_array):
405 """Parse the specified filename, returning the recipe information"""
406 infos = []
407 datastores = cls.load_bbfile(filename, appends, configdata)
408 depends = []
409 for variant, data in sorted(datastores.iteritems(),
410 key=lambda i: i[0],
411 reverse=True):
412 virtualfn = cls.realfn2virtual(filename, variant)
413 depends = depends + (data.getVar("__depends", False) or [])
414 if depends and not variant:
415 data.setVar("__depends", depends)
416
417 info_array = []
418 for cache_class in caches_array:
419 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
420 info = cache_class(filename, data)
421 info_array.append(info)
422 infos.append((virtualfn, info_array))
423
424 return infos
425
426 def load(self, filename, appends, configdata):
427 """Obtain the recipe information for the specified filename,
428 using cached values if available, otherwise parsing.
429
430 Note that if it does parse to obtain the info, it will not
431 automatically add the information to the cache or to your
432 CacheData. Use the add or add_info method to do so after
433 running this, or use loadData instead."""
434 cached = self.cacheValid(filename, appends)
435 if cached:
436 infos = []
437 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
438 info_array = self.depends_cache[filename]
439 for variant in info_array[0].variants:
440 virtualfn = self.realfn2virtual(filename, variant)
441 infos.append((virtualfn, self.depends_cache[virtualfn]))
442 else:
443 logger.debug(1, "Parsing %s", filename)
444 return self.parse(filename, appends, configdata, self.caches_array)
445
446 return cached, infos
447
448 def loadData(self, fn, appends, cfgData, cacheData):
449 """Load the recipe info for the specified filename,
450 parsing and adding to the cache if necessary, and adding
451 the recipe information to the supplied CacheData instance."""
452 skipped, virtuals = 0, 0
453
454 cached, infos = self.load(fn, appends, cfgData)
455 for virtualfn, info_array in infos:
456 if info_array[0].skipped:
457 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
458 skipped += 1
459 else:
460 self.add_info(virtualfn, info_array, cacheData, not cached)
461 virtuals += 1
462
463 return cached, skipped, virtuals
464
465 def cacheValid(self, fn, appends):
466 """
467 Is the cache valid for fn?
468 Fast version, no timestamps checked.
469 """
470 if fn not in self.checked:
471 self.cacheValidUpdate(fn, appends)
472
473 # Is cache enabled?
474 if not self.has_cache:
475 return False
476 if fn in self.clean:
477 return True
478 return False
479
480 def cacheValidUpdate(self, fn, appends):
481 """
482 Is the cache valid for fn?
483 Make thorough (slower) checks including timestamps.
484 """
485 # Is cache enabled?
486 if not self.has_cache:
487 return False
488
489 self.checked.add(fn)
490
491 # File isn't in depends_cache
492 if not fn in self.depends_cache:
493 logger.debug(2, "Cache: %s is not cached", fn)
494 return False
495
496 mtime = bb.parse.cached_mtime_noerror(fn)
497
498 # Check file still exists
499 if mtime == 0:
500 logger.debug(2, "Cache: %s no longer exists", fn)
501 self.remove(fn)
502 return False
503
504 info_array = self.depends_cache[fn]
505 # Check the file's timestamp
506 if mtime != info_array[0].timestamp:
507 logger.debug(2, "Cache: %s changed", fn)
508 self.remove(fn)
509 return False
510
511 # Check dependencies are still valid
512 depends = info_array[0].file_depends
513 if depends:
514 for f, old_mtime in depends:
515 fmtime = bb.parse.cached_mtime_noerror(f)
516 # Check if file still exists
517 if old_mtime != 0 and fmtime == 0:
518 logger.debug(2, "Cache: %s's dependency %s was removed",
519 fn, f)
520 self.remove(fn)
521 return False
522
523 if (fmtime != old_mtime):
524 logger.debug(2, "Cache: %s's dependency %s changed",
525 fn, f)
526 self.remove(fn)
527 return False
528
529 if hasattr(info_array[0], 'file_checksums'):
530 for _, fl in info_array[0].file_checksums.items():
531 for f in fl.split():
532 if "*" in f:
533 continue
534 f, exist = f.split(":")
535 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
536 logger.debug(2, "Cache: %s's file checksum list file %s changed",
537 fn, f)
538 self.remove(fn)
539 return False
540
541 if appends != info_array[0].appends:
542 logger.debug(2, "Cache: appends for %s changed", fn)
543 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
544 self.remove(fn)
545 return False
546
547 invalid = False
548 for cls in info_array[0].variants:
549 virtualfn = self.realfn2virtual(fn, cls)
550 self.clean.add(virtualfn)
551 if virtualfn not in self.depends_cache:
552 logger.debug(2, "Cache: %s is not cached", virtualfn)
553 invalid = True
554
555 # If any one of the variants is not present, mark as invalid for all
556 if invalid:
557 for cls in info_array[0].variants:
558 virtualfn = self.realfn2virtual(fn, cls)
559 if virtualfn in self.clean:
560 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
561 self.clean.remove(virtualfn)
562 if fn in self.clean:
563 logger.debug(2, "Cache: Marking %s as not clean", fn)
564 self.clean.remove(fn)
565 return False
566
567 self.clean.add(fn)
568 return True
569
570 def remove(self, fn):
571 """
572 Remove a fn from the cache
573 Called from the parser in error cases
574 """
575 if fn in self.depends_cache:
576 logger.debug(1, "Removing %s from cache", fn)
577 del self.depends_cache[fn]
578 if fn in self.clean:
579 logger.debug(1, "Marking %s as unclean", fn)
580 self.clean.remove(fn)
581
582 def sync(self):
583 """
584 Save the cache
585 Called from the parser when complete (or exiting)
586 """
587
588 if not self.has_cache:
589 return
590
591 if self.cacheclean:
592 logger.debug(2, "Cache is clean, not saving.")
593 return
594
595 file_dict = {}
596 pickler_dict = {}
597 for cache_class in self.caches_array:
598 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
599 cache_class_name = cache_class.__name__
600 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
601 file_dict[cache_class_name] = open(cachefile, "wb")
602 pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
603
604 pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
605 pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
606
607 try:
608 for key, info_array in self.depends_cache.iteritems():
609 for info in info_array:
610 if isinstance(info, RecipeInfoCommon):
611 cache_class_name = info.__class__.__name__
612 pickler_dict[cache_class_name].dump(key)
613 pickler_dict[cache_class_name].dump(info)
614 finally:
615 for cache_class in self.caches_array:
616 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
617 cache_class_name = cache_class.__name__
618 file_dict[cache_class_name].close()
619
620 del self.depends_cache
621
622 @staticmethod
623 def mtime(cachefile):
624 return bb.parse.cached_mtime_noerror(cachefile)
625
626 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
627 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
628 cacheData.add_from_recipeinfo(filename, info_array)
629
630 if watcher:
631 watcher(info_array[0].file_depends)
632
633 if not self.has_cache:
634 return
635
636 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
637 if parsed:
638 self.cacheclean = False
639 self.depends_cache[filename] = info_array
640
641 def add(self, file_name, data, cacheData, parsed=None):
642 """
643 Save data we need into the cache
644 """
645
646 realfn = self.virtualfn2realfn(file_name)[0]
647
648 info_array = []
649 for cache_class in self.caches_array:
650 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
651 info_array.append(cache_class(realfn, data))
652 self.add_info(file_name, info_array, cacheData, parsed)
653
654 @staticmethod
655 def load_bbfile(bbfile, appends, config):
656 """
657 Load and parse one .bb build file
658 Return the data and whether parsing resulted in the file being skipped
659 """
660 chdir_back = False
661
662 from bb import data, parse
663
664 # expand tmpdir to include this topdir
665 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
666 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
667 oldpath = os.path.abspath(os.getcwd())
668 parse.cached_mtime_noerror(bbfile_loc)
669 bb_data = data.init_db(config)
670 # The ConfHandler first looks if there is a TOPDIR and if not
671 # then it would call getcwd().
672 # Previously, we chdir()ed to bbfile_loc, called the handler
673 # and finally chdir()ed back, a couple of thousand times. We now
674 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
675 if not data.getVar('TOPDIR', bb_data):
676 chdir_back = True
677 data.setVar('TOPDIR', bbfile_loc, bb_data)
678 try:
679 if appends:
680 data.setVar('__BBAPPEND', " ".join(appends), bb_data)
681 bb_data = parse.handle(bbfile, bb_data)
682 if chdir_back:
683 os.chdir(oldpath)
684 return bb_data
685 except:
686 if chdir_back:
687 os.chdir(oldpath)
688 raise
689
690
691def init(cooker):
692 """
693 The Objective: Cache the minimum amount of data possible yet get to the
694 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
695
696 To do this, we intercept getVar calls and only cache the variables we see
697 being accessed. We rely on the cache getVar calls being made for all
698 variables bitbake might need to use to reach this stage. For each cached
699 file we need to track:
700
701 * Its mtime
702 * The mtimes of all its dependencies
703 * Whether it caused a parse.SkipRecipe exception
704
705 Files causing parsing errors are evicted from the cache.
706
707 """
708 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
709
710
711class CacheData(object):
712 """
713 The data structures we compile from the cached data
714 """
715
716 def __init__(self, caches_array):
717 self.caches_array = caches_array
718 for cache_class in self.caches_array:
719 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
720 cache_class.init_cacheData(self)
721
722 # Direct cache variables
723 self.task_queues = {}
724 self.preferred = {}
725 self.tasks = {}
726 # Indirect Cache variables (set elsewhere)
727 self.ignored_dependencies = []
728 self.world_target = set()
729 self.bbfile_priority = {}
730
731 def add_from_recipeinfo(self, fn, info_array):
732 for info in info_array:
733 info.add_cacheData(self, fn)
734
735class MultiProcessCache(object):
736 """
737 BitBake multi-process cache implementation
738
739 Used by the codeparser & file checksum caches
740 """
741
742 def __init__(self):
743 self.cachefile = None
744 self.cachedata = self.create_cachedata()
745 self.cachedata_extras = self.create_cachedata()
746
747 def init_cache(self, d):
748 cachedir = (d.getVar("PERSISTENT_DIR", True) or
749 d.getVar("CACHE", True))
750 if cachedir in [None, '']:
751 return
752 bb.utils.mkdirhier(cachedir)
753 self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
754 logger.debug(1, "Using cache in '%s'", self.cachefile)
755
756 glf = bb.utils.lockfile(self.cachefile + ".lock")
757
758 try:
759 with open(self.cachefile, "rb") as f:
760 p = pickle.Unpickler(f)
761 data, version = p.load()
762 except:
763 bb.utils.unlockfile(glf)
764 return
765
766 bb.utils.unlockfile(glf)
767
768 if version != self.__class__.CACHE_VERSION:
769 return
770
771 self.cachedata = data
772
773 def create_cachedata(self):
774 data = [{}]
775 return data
776
777 def save_extras(self, d):
778 if not self.cachefile:
779 return
780
781 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
782
783 i = os.getpid()
784 lf = None
785 while not lf:
786 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
787 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
788 if lf:
789 bb.utils.unlockfile(lf)
790 lf = None
791 i = i + 1
792 continue
793
794 with open(self.cachefile + "-" + str(i), "wb") as f:
795 p = pickle.Pickler(f, -1)
796 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
797
798 bb.utils.unlockfile(lf)
799 bb.utils.unlockfile(glf)
800
801 def merge_data(self, source, dest):
802 for j in range(0,len(dest)):
803 for h in source[j]:
804 if h not in dest[j]:
805 dest[j][h] = source[j][h]
806
807 def save_merge(self, d):
808 if not self.cachefile:
809 return
810
811 glf = bb.utils.lockfile(self.cachefile + ".lock")
812
813 data = self.cachedata
814
815 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
816 f = os.path.join(os.path.dirname(self.cachefile), f)
817 try:
818 with open(f, "rb") as fd:
819 p = pickle.Unpickler(fd)
820 extradata, version = p.load()
821 except (IOError, EOFError):
822 os.unlink(f)
823 continue
824
825 if version != self.__class__.CACHE_VERSION:
826 os.unlink(f)
827 continue
828
829 self.merge_data(extradata, data)
830 os.unlink(f)
831
832 with open(self.cachefile, "wb") as f:
833 p = pickle.Pickler(f, -1)
834 p.dump([data, self.__class__.CACHE_VERSION])
835
836 bb.utils.unlockfile(glf)
837