diff options
author | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
commit | c527fd1f14c27855a37f2e8ac5346ce8d940ced2 (patch) | |
tree | bb002c1fdf011c41dbd2f0927bed23ecb5f83c97 /bitbake/lib/bb/cache.py | |
download | poky-daisy-140929.tar.gz |
initial commit for Enea Linux 4.0-140929daisy-140929
Migrated from the internal git server on the daisy-enea-point-release branch
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 847 |
1 files changed, 847 insertions, 0 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py new file mode 100644 index 0000000000..431fc079e4 --- /dev/null +++ b/bitbake/lib/bb/cache.py | |||
@@ -0,0 +1,847 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # BitBake Cache implementation | ||
5 | # | ||
6 | # Caching of bitbake variables before task execution | ||
7 | |||
8 | # Copyright (C) 2006 Richard Purdie | ||
9 | # Copyright (C) 2012 Intel Corporation | ||
10 | |||
11 | # but small sections based on code from bin/bitbake: | ||
12 | # Copyright (C) 2003, 2004 Chris Larson | ||
13 | # Copyright (C) 2003, 2004 Phil Blundell | ||
14 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
15 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
16 | # Copyright (C) 2005 ROAD GmbH | ||
17 | # | ||
18 | # This program is free software; you can redistribute it and/or modify | ||
19 | # it under the terms of the GNU General Public License version 2 as | ||
20 | # published by the Free Software Foundation. | ||
21 | # | ||
22 | # This program is distributed in the hope that it will be useful, | ||
23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
25 | # GNU General Public License for more details. | ||
26 | # | ||
27 | # You should have received a copy of the GNU General Public License along | ||
28 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
29 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
30 | |||
31 | |||
32 | import os | ||
33 | import logging | ||
34 | from collections import defaultdict | ||
35 | import bb.utils | ||
36 | |||
37 | logger = logging.getLogger("BitBake.Cache") | ||
38 | |||
39 | try: | ||
40 | import cPickle as pickle | ||
41 | except ImportError: | ||
42 | import pickle | ||
43 | logger.info("Importing cPickle failed. " | ||
44 | "Falling back to a very slow implementation.") | ||
45 | |||
46 | __cache_version__ = "147" | ||
47 | |||
48 | def getCacheFile(path, filename, data_hash): | ||
49 | return os.path.join(path, filename + "." + data_hash) | ||
50 | |||
51 | # RecipeInfoCommon defines common data retrieving methods | ||
52 | # from meta data for caches. CoreRecipeInfo as well as other | ||
53 | # Extra RecipeInfo needs to inherit this class | ||
54 | class RecipeInfoCommon(object): | ||
55 | |||
56 | @classmethod | ||
57 | def listvar(cls, var, metadata): | ||
58 | return cls.getvar(var, metadata).split() | ||
59 | |||
60 | @classmethod | ||
61 | def intvar(cls, var, metadata): | ||
62 | return int(cls.getvar(var, metadata) or 0) | ||
63 | |||
64 | @classmethod | ||
65 | def depvar(cls, var, metadata): | ||
66 | return bb.utils.explode_deps(cls.getvar(var, metadata)) | ||
67 | |||
68 | @classmethod | ||
69 | def pkgvar(cls, var, packages, metadata): | ||
70 | return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) | ||
71 | for pkg in packages) | ||
72 | |||
73 | @classmethod | ||
74 | def taskvar(cls, var, tasks, metadata): | ||
75 | return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) | ||
76 | for task in tasks) | ||
77 | |||
78 | @classmethod | ||
79 | def flaglist(cls, flag, varlist, metadata, squash=False): | ||
80 | out_dict = dict((var, metadata.getVarFlag(var, flag, True)) | ||
81 | for var in varlist) | ||
82 | if squash: | ||
83 | return dict((k,v) for (k,v) in out_dict.iteritems() if v) | ||
84 | else: | ||
85 | return out_dict | ||
86 | |||
87 | @classmethod | ||
88 | def getvar(cls, var, metadata): | ||
89 | return metadata.getVar(var, True) or '' | ||
90 | |||
91 | |||
92 | class CoreRecipeInfo(RecipeInfoCommon): | ||
93 | __slots__ = () | ||
94 | |||
95 | cachefile = "bb_cache.dat" | ||
96 | |||
97 | def __init__(self, filename, metadata): | ||
98 | self.file_depends = metadata.getVar('__depends', False) | ||
99 | self.timestamp = bb.parse.cached_mtime(filename) | ||
100 | self.variants = self.listvar('__VARIANTS', metadata) + [''] | ||
101 | self.appends = self.listvar('__BBAPPEND', metadata) | ||
102 | self.nocache = self.getvar('__BB_DONT_CACHE', metadata) | ||
103 | |||
104 | self.skipreason = self.getvar('__SKIPPED', metadata) | ||
105 | if self.skipreason: | ||
106 | self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0] | ||
107 | self.skipped = True | ||
108 | self.provides = self.depvar('PROVIDES', metadata) | ||
109 | self.rprovides = self.depvar('RPROVIDES', metadata) | ||
110 | return | ||
111 | |||
112 | self.tasks = metadata.getVar('__BBTASKS', False) | ||
113 | |||
114 | self.pn = self.getvar('PN', metadata) | ||
115 | self.packages = self.listvar('PACKAGES', metadata) | ||
116 | if not self.pn in self.packages: | ||
117 | self.packages.append(self.pn) | ||
118 | |||
119 | self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) | ||
120 | self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) | ||
121 | |||
122 | self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} | ||
123 | |||
124 | self.skipped = False | ||
125 | self.pe = self.getvar('PE', metadata) | ||
126 | self.pv = self.getvar('PV', metadata) | ||
127 | self.pr = self.getvar('PR', metadata) | ||
128 | self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata) | ||
129 | self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata) | ||
130 | self.stamp = self.getvar('STAMP', metadata) | ||
131 | self.stampclean = self.getvar('STAMPCLEAN', metadata) | ||
132 | self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata) | ||
133 | self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata) | ||
134 | self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata) | ||
135 | self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True) | ||
136 | self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata) | ||
137 | self.depends = self.depvar('DEPENDS', metadata) | ||
138 | self.provides = self.depvar('PROVIDES', metadata) | ||
139 | self.rdepends = self.depvar('RDEPENDS', metadata) | ||
140 | self.rprovides = self.depvar('RPROVIDES', metadata) | ||
141 | self.rrecommends = self.depvar('RRECOMMENDS', metadata) | ||
142 | self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata) | ||
143 | self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata) | ||
144 | self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata) | ||
145 | self.inherits = self.getvar('__inherit_cache', metadata) | ||
146 | self.fakerootenv = self.getvar('FAKEROOTENV', metadata) | ||
147 | self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) | ||
148 | self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) | ||
149 | |||
150 | @classmethod | ||
151 | def init_cacheData(cls, cachedata): | ||
152 | # CacheData in Core RecipeInfo Class | ||
153 | cachedata.task_deps = {} | ||
154 | cachedata.pkg_fn = {} | ||
155 | cachedata.pkg_pn = defaultdict(list) | ||
156 | cachedata.pkg_pepvpr = {} | ||
157 | cachedata.pkg_dp = {} | ||
158 | |||
159 | cachedata.stamp = {} | ||
160 | cachedata.stampclean = {} | ||
161 | cachedata.stamp_base = {} | ||
162 | cachedata.stamp_base_clean = {} | ||
163 | cachedata.stamp_extrainfo = {} | ||
164 | cachedata.file_checksums = {} | ||
165 | cachedata.fn_provides = {} | ||
166 | cachedata.pn_provides = defaultdict(list) | ||
167 | cachedata.all_depends = [] | ||
168 | |||
169 | cachedata.deps = defaultdict(list) | ||
170 | cachedata.packages = defaultdict(list) | ||
171 | cachedata.providers = defaultdict(list) | ||
172 | cachedata.rproviders = defaultdict(list) | ||
173 | cachedata.packages_dynamic = defaultdict(list) | ||
174 | |||
175 | cachedata.rundeps = defaultdict(lambda: defaultdict(list)) | ||
176 | cachedata.runrecs = defaultdict(lambda: defaultdict(list)) | ||
177 | cachedata.possible_world = [] | ||
178 | cachedata.universe_target = [] | ||
179 | cachedata.hashfn = {} | ||
180 | |||
181 | cachedata.basetaskhash = {} | ||
182 | cachedata.inherits = {} | ||
183 | cachedata.fakerootenv = {} | ||
184 | cachedata.fakerootnoenv = {} | ||
185 | cachedata.fakerootdirs = {} | ||
186 | |||
187 | def add_cacheData(self, cachedata, fn): | ||
188 | cachedata.task_deps[fn] = self.task_deps | ||
189 | cachedata.pkg_fn[fn] = self.pn | ||
190 | cachedata.pkg_pn[self.pn].append(fn) | ||
191 | cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr) | ||
192 | cachedata.pkg_dp[fn] = self.defaultpref | ||
193 | cachedata.stamp[fn] = self.stamp | ||
194 | cachedata.stampclean[fn] = self.stampclean | ||
195 | cachedata.stamp_base[fn] = self.stamp_base | ||
196 | cachedata.stamp_base_clean[fn] = self.stamp_base_clean | ||
197 | cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo | ||
198 | cachedata.file_checksums[fn] = self.file_checksums | ||
199 | |||
200 | provides = [self.pn] | ||
201 | for provide in self.provides: | ||
202 | if provide not in provides: | ||
203 | provides.append(provide) | ||
204 | cachedata.fn_provides[fn] = provides | ||
205 | |||
206 | for provide in provides: | ||
207 | cachedata.providers[provide].append(fn) | ||
208 | if provide not in cachedata.pn_provides[self.pn]: | ||
209 | cachedata.pn_provides[self.pn].append(provide) | ||
210 | |||
211 | for dep in self.depends: | ||
212 | if dep not in cachedata.deps[fn]: | ||
213 | cachedata.deps[fn].append(dep) | ||
214 | if dep not in cachedata.all_depends: | ||
215 | cachedata.all_depends.append(dep) | ||
216 | |||
217 | rprovides = self.rprovides | ||
218 | for package in self.packages: | ||
219 | cachedata.packages[package].append(fn) | ||
220 | rprovides += self.rprovides_pkg[package] | ||
221 | |||
222 | for rprovide in rprovides: | ||
223 | cachedata.rproviders[rprovide].append(fn) | ||
224 | |||
225 | for package in self.packages_dynamic: | ||
226 | cachedata.packages_dynamic[package].append(fn) | ||
227 | |||
228 | # Build hash of runtime depends and rececommends | ||
229 | for package in self.packages + [self.pn]: | ||
230 | cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package] | ||
231 | cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package] | ||
232 | |||
233 | # Collect files we may need for possible world-dep | ||
234 | # calculations | ||
235 | if not self.not_world: | ||
236 | cachedata.possible_world.append(fn) | ||
237 | |||
238 | # create a collection of all targets for sanity checking | ||
239 | # tasks, such as upstream versions, license, and tools for | ||
240 | # task and image creation. | ||
241 | cachedata.universe_target.append(self.pn) | ||
242 | |||
243 | cachedata.hashfn[fn] = self.hashfilename | ||
244 | for task, taskhash in self.basetaskhashes.iteritems(): | ||
245 | identifier = '%s.%s' % (fn, task) | ||
246 | cachedata.basetaskhash[identifier] = taskhash | ||
247 | |||
248 | cachedata.inherits[fn] = self.inherits | ||
249 | cachedata.fakerootenv[fn] = self.fakerootenv | ||
250 | cachedata.fakerootnoenv[fn] = self.fakerootnoenv | ||
251 | cachedata.fakerootdirs[fn] = self.fakerootdirs | ||
252 | |||
253 | |||
254 | |||
255 | class Cache(object): | ||
256 | """ | ||
257 | BitBake Cache implementation | ||
258 | """ | ||
259 | |||
260 | def __init__(self, data, data_hash, caches_array): | ||
261 | # Pass caches_array information into Cache Constructor | ||
262 | # It will be used in later for deciding whether we | ||
263 | # need extra cache file dump/load support | ||
264 | self.caches_array = caches_array | ||
265 | self.cachedir = data.getVar("CACHE", True) | ||
266 | self.clean = set() | ||
267 | self.checked = set() | ||
268 | self.depends_cache = {} | ||
269 | self.data = None | ||
270 | self.data_fn = None | ||
271 | self.cacheclean = True | ||
272 | self.data_hash = data_hash | ||
273 | |||
274 | if self.cachedir in [None, '']: | ||
275 | self.has_cache = False | ||
276 | logger.info("Not using a cache. " | ||
277 | "Set CACHE = <directory> to enable.") | ||
278 | return | ||
279 | |||
280 | self.has_cache = True | ||
281 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) | ||
282 | |||
283 | logger.debug(1, "Using cache in '%s'", self.cachedir) | ||
284 | bb.utils.mkdirhier(self.cachedir) | ||
285 | |||
286 | cache_ok = True | ||
287 | if self.caches_array: | ||
288 | for cache_class in self.caches_array: | ||
289 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
290 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
291 | cache_ok = cache_ok and os.path.exists(cachefile) | ||
292 | cache_class.init_cacheData(self) | ||
293 | if cache_ok: | ||
294 | self.load_cachefile() | ||
295 | elif os.path.isfile(self.cachefile): | ||
296 | logger.info("Out of date cache found, rebuilding...") | ||
297 | |||
298 | def load_cachefile(self): | ||
299 | # Firstly, using core cache file information for | ||
300 | # valid checking | ||
301 | with open(self.cachefile, "rb") as cachefile: | ||
302 | pickled = pickle.Unpickler(cachefile) | ||
303 | try: | ||
304 | cache_ver = pickled.load() | ||
305 | bitbake_ver = pickled.load() | ||
306 | except Exception: | ||
307 | logger.info('Invalid cache, rebuilding...') | ||
308 | return | ||
309 | |||
310 | if cache_ver != __cache_version__: | ||
311 | logger.info('Cache version mismatch, rebuilding...') | ||
312 | return | ||
313 | elif bitbake_ver != bb.__version__: | ||
314 | logger.info('Bitbake version mismatch, rebuilding...') | ||
315 | return | ||
316 | |||
317 | |||
318 | cachesize = 0 | ||
319 | previous_progress = 0 | ||
320 | previous_percent = 0 | ||
321 | |||
322 | # Calculate the correct cachesize of all those cache files | ||
323 | for cache_class in self.caches_array: | ||
324 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
325 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
326 | with open(cachefile, "rb") as cachefile: | ||
327 | cachesize += os.fstat(cachefile.fileno()).st_size | ||
328 | |||
329 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) | ||
330 | |||
331 | for cache_class in self.caches_array: | ||
332 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
333 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
334 | with open(cachefile, "rb") as cachefile: | ||
335 | pickled = pickle.Unpickler(cachefile) | ||
336 | while cachefile: | ||
337 | try: | ||
338 | key = pickled.load() | ||
339 | value = pickled.load() | ||
340 | except Exception: | ||
341 | break | ||
342 | if self.depends_cache.has_key(key): | ||
343 | self.depends_cache[key].append(value) | ||
344 | else: | ||
345 | self.depends_cache[key] = [value] | ||
346 | # only fire events on even percentage boundaries | ||
347 | current_progress = cachefile.tell() + previous_progress | ||
348 | current_percent = 100 * current_progress / cachesize | ||
349 | if current_percent > previous_percent: | ||
350 | previous_percent = current_percent | ||
351 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
352 | self.data) | ||
353 | |||
354 | previous_progress += current_progress | ||
355 | |||
356 | # Note: depends cache number is corresponding to the parsing file numbers. | ||
357 | # The same file has several caches, still regarded as one item in the cache | ||
358 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, | ||
359 | len(self.depends_cache)), | ||
360 | self.data) | ||
361 | |||
362 | |||
363 | @staticmethod | ||
364 | def virtualfn2realfn(virtualfn): | ||
365 | """ | ||
366 | Convert a virtual file name to a real one + the associated subclass keyword | ||
367 | """ | ||
368 | |||
369 | fn = virtualfn | ||
370 | cls = "" | ||
371 | if virtualfn.startswith('virtual:'): | ||
372 | elems = virtualfn.split(':') | ||
373 | cls = ":".join(elems[1:-1]) | ||
374 | fn = elems[-1] | ||
375 | return (fn, cls) | ||
376 | |||
377 | @staticmethod | ||
378 | def realfn2virtual(realfn, cls): | ||
379 | """ | ||
380 | Convert a real filename + the associated subclass keyword to a virtual filename | ||
381 | """ | ||
382 | if cls == "": | ||
383 | return realfn | ||
384 | return "virtual:" + cls + ":" + realfn | ||
385 | |||
386 | @classmethod | ||
387 | def loadDataFull(cls, virtualfn, appends, cfgData): | ||
388 | """ | ||
389 | Return a complete set of data for fn. | ||
390 | To do this, we need to parse the file. | ||
391 | """ | ||
392 | |||
393 | (fn, virtual) = cls.virtualfn2realfn(virtualfn) | ||
394 | |||
395 | logger.debug(1, "Parsing %s (full)", fn) | ||
396 | |||
397 | cfgData.setVar("__ONLYFINALISE", virtual or "default") | ||
398 | bb_data = cls.load_bbfile(fn, appends, cfgData) | ||
399 | return bb_data[virtual] | ||
400 | |||
401 | @classmethod | ||
402 | def parse(cls, filename, appends, configdata, caches_array): | ||
403 | """Parse the specified filename, returning the recipe information""" | ||
404 | infos = [] | ||
405 | datastores = cls.load_bbfile(filename, appends, configdata) | ||
406 | depends = [] | ||
407 | for variant, data in sorted(datastores.iteritems(), | ||
408 | key=lambda i: i[0], | ||
409 | reverse=True): | ||
410 | virtualfn = cls.realfn2virtual(filename, variant) | ||
411 | depends = depends + (data.getVar("__depends", False) or []) | ||
412 | if depends and not variant: | ||
413 | data.setVar("__depends", depends) | ||
414 | |||
415 | info_array = [] | ||
416 | for cache_class in caches_array: | ||
417 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
418 | info = cache_class(filename, data) | ||
419 | info_array.append(info) | ||
420 | infos.append((virtualfn, info_array)) | ||
421 | |||
422 | return infos | ||
423 | |||
424 | def load(self, filename, appends, configdata): | ||
425 | """Obtain the recipe information for the specified filename, | ||
426 | using cached values if available, otherwise parsing. | ||
427 | |||
428 | Note that if it does parse to obtain the info, it will not | ||
429 | automatically add the information to the cache or to your | ||
430 | CacheData. Use the add or add_info method to do so after | ||
431 | running this, or use loadData instead.""" | ||
432 | cached = self.cacheValid(filename, appends) | ||
433 | if cached: | ||
434 | infos = [] | ||
435 | # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo] | ||
436 | info_array = self.depends_cache[filename] | ||
437 | for variant in info_array[0].variants: | ||
438 | virtualfn = self.realfn2virtual(filename, variant) | ||
439 | infos.append((virtualfn, self.depends_cache[virtualfn])) | ||
440 | else: | ||
441 | logger.debug(1, "Parsing %s", filename) | ||
442 | return self.parse(filename, appends, configdata, self.caches_array) | ||
443 | |||
444 | return cached, infos | ||
445 | |||
446 | def loadData(self, fn, appends, cfgData, cacheData): | ||
447 | """Load the recipe info for the specified filename, | ||
448 | parsing and adding to the cache if necessary, and adding | ||
449 | the recipe information to the supplied CacheData instance.""" | ||
450 | skipped, virtuals = 0, 0 | ||
451 | |||
452 | cached, infos = self.load(fn, appends, cfgData) | ||
453 | for virtualfn, info_array in infos: | ||
454 | if info_array[0].skipped: | ||
455 | logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) | ||
456 | skipped += 1 | ||
457 | else: | ||
458 | self.add_info(virtualfn, info_array, cacheData, not cached) | ||
459 | virtuals += 1 | ||
460 | |||
461 | return cached, skipped, virtuals | ||
462 | |||
463 | def cacheValid(self, fn, appends): | ||
464 | """ | ||
465 | Is the cache valid for fn? | ||
466 | Fast version, no timestamps checked. | ||
467 | """ | ||
468 | if fn not in self.checked: | ||
469 | self.cacheValidUpdate(fn, appends) | ||
470 | |||
471 | # Is cache enabled? | ||
472 | if not self.has_cache: | ||
473 | return False | ||
474 | if fn in self.clean: | ||
475 | return True | ||
476 | return False | ||
477 | |||
478 | def cacheValidUpdate(self, fn, appends): | ||
479 | """ | ||
480 | Is the cache valid for fn? | ||
481 | Make thorough (slower) checks including timestamps. | ||
482 | """ | ||
483 | # Is cache enabled? | ||
484 | if not self.has_cache: | ||
485 | return False | ||
486 | |||
487 | self.checked.add(fn) | ||
488 | |||
489 | # File isn't in depends_cache | ||
490 | if not fn in self.depends_cache: | ||
491 | logger.debug(2, "Cache: %s is not cached", fn) | ||
492 | return False | ||
493 | |||
494 | mtime = bb.parse.cached_mtime_noerror(fn) | ||
495 | |||
496 | # Check file still exists | ||
497 | if mtime == 0: | ||
498 | logger.debug(2, "Cache: %s no longer exists", fn) | ||
499 | self.remove(fn) | ||
500 | return False | ||
501 | |||
502 | info_array = self.depends_cache[fn] | ||
503 | # Check the file's timestamp | ||
504 | if mtime != info_array[0].timestamp: | ||
505 | logger.debug(2, "Cache: %s changed", fn) | ||
506 | self.remove(fn) | ||
507 | return False | ||
508 | |||
509 | # Check dependencies are still valid | ||
510 | depends = info_array[0].file_depends | ||
511 | if depends: | ||
512 | for f, old_mtime in depends: | ||
513 | fmtime = bb.parse.cached_mtime_noerror(f) | ||
514 | # Check if file still exists | ||
515 | if old_mtime != 0 and fmtime == 0: | ||
516 | logger.debug(2, "Cache: %s's dependency %s was removed", | ||
517 | fn, f) | ||
518 | self.remove(fn) | ||
519 | return False | ||
520 | |||
521 | if (fmtime != old_mtime): | ||
522 | logger.debug(2, "Cache: %s's dependency %s changed", | ||
523 | fn, f) | ||
524 | self.remove(fn) | ||
525 | return False | ||
526 | |||
527 | if hasattr(info_array[0], 'file_checksums'): | ||
528 | for _, fl in info_array[0].file_checksums.items(): | ||
529 | for f in fl.split(): | ||
530 | if not ('*' in f or os.path.exists(f)): | ||
531 | logger.debug(2, "Cache: %s's file checksum list file %s was removed", | ||
532 | fn, f) | ||
533 | self.remove(fn) | ||
534 | return False | ||
535 | |||
536 | if appends != info_array[0].appends: | ||
537 | logger.debug(2, "Cache: appends for %s changed", fn) | ||
538 | logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) | ||
539 | self.remove(fn) | ||
540 | return False | ||
541 | |||
542 | invalid = False | ||
543 | for cls in info_array[0].variants: | ||
544 | virtualfn = self.realfn2virtual(fn, cls) | ||
545 | self.clean.add(virtualfn) | ||
546 | if virtualfn not in self.depends_cache: | ||
547 | logger.debug(2, "Cache: %s is not cached", virtualfn) | ||
548 | invalid = True | ||
549 | |||
550 | # If any one of the variants is not present, mark as invalid for all | ||
551 | if invalid: | ||
552 | for cls in info_array[0].variants: | ||
553 | virtualfn = self.realfn2virtual(fn, cls) | ||
554 | if virtualfn in self.clean: | ||
555 | logger.debug(2, "Cache: Removing %s from cache", virtualfn) | ||
556 | self.clean.remove(virtualfn) | ||
557 | if fn in self.clean: | ||
558 | logger.debug(2, "Cache: Marking %s as not clean", fn) | ||
559 | self.clean.remove(fn) | ||
560 | return False | ||
561 | |||
562 | self.clean.add(fn) | ||
563 | return True | ||
564 | |||
565 | def remove(self, fn): | ||
566 | """ | ||
567 | Remove a fn from the cache | ||
568 | Called from the parser in error cases | ||
569 | """ | ||
570 | if fn in self.depends_cache: | ||
571 | logger.debug(1, "Removing %s from cache", fn) | ||
572 | del self.depends_cache[fn] | ||
573 | if fn in self.clean: | ||
574 | logger.debug(1, "Marking %s as unclean", fn) | ||
575 | self.clean.remove(fn) | ||
576 | |||
577 | def sync(self): | ||
578 | """ | ||
579 | Save the cache | ||
580 | Called from the parser when complete (or exiting) | ||
581 | """ | ||
582 | |||
583 | if not self.has_cache: | ||
584 | return | ||
585 | |||
586 | if self.cacheclean: | ||
587 | logger.debug(2, "Cache is clean, not saving.") | ||
588 | return | ||
589 | |||
590 | file_dict = {} | ||
591 | pickler_dict = {} | ||
592 | for cache_class in self.caches_array: | ||
593 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
594 | cache_class_name = cache_class.__name__ | ||
595 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
596 | file_dict[cache_class_name] = open(cachefile, "wb") | ||
597 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) | ||
598 | |||
599 | pickler_dict['CoreRecipeInfo'].dump(__cache_version__) | ||
600 | pickler_dict['CoreRecipeInfo'].dump(bb.__version__) | ||
601 | |||
602 | try: | ||
603 | for key, info_array in self.depends_cache.iteritems(): | ||
604 | for info in info_array: | ||
605 | if isinstance(info, RecipeInfoCommon): | ||
606 | cache_class_name = info.__class__.__name__ | ||
607 | pickler_dict[cache_class_name].dump(key) | ||
608 | pickler_dict[cache_class_name].dump(info) | ||
609 | finally: | ||
610 | for cache_class in self.caches_array: | ||
611 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
612 | cache_class_name = cache_class.__name__ | ||
613 | file_dict[cache_class_name].close() | ||
614 | |||
615 | del self.depends_cache | ||
616 | |||
617 | @staticmethod | ||
618 | def mtime(cachefile): | ||
619 | return bb.parse.cached_mtime_noerror(cachefile) | ||
620 | |||
621 | def add_info(self, filename, info_array, cacheData, parsed=None): | ||
622 | if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): | ||
623 | cacheData.add_from_recipeinfo(filename, info_array) | ||
624 | |||
625 | if not self.has_cache: | ||
626 | return | ||
627 | |||
628 | if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: | ||
629 | if parsed: | ||
630 | self.cacheclean = False | ||
631 | self.depends_cache[filename] = info_array | ||
632 | |||
633 | def add(self, file_name, data, cacheData, parsed=None): | ||
634 | """ | ||
635 | Save data we need into the cache | ||
636 | """ | ||
637 | |||
638 | realfn = self.virtualfn2realfn(file_name)[0] | ||
639 | |||
640 | info_array = [] | ||
641 | for cache_class in self.caches_array: | ||
642 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
643 | info_array.append(cache_class(realfn, data)) | ||
644 | self.add_info(file_name, info_array, cacheData, parsed) | ||
645 | |||
646 | @staticmethod | ||
647 | def load_bbfile(bbfile, appends, config): | ||
648 | """ | ||
649 | Load and parse one .bb build file | ||
650 | Return the data and whether parsing resulted in the file being skipped | ||
651 | """ | ||
652 | chdir_back = False | ||
653 | |||
654 | from bb import data, parse | ||
655 | |||
656 | # expand tmpdir to include this topdir | ||
657 | data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) | ||
658 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
659 | oldpath = os.path.abspath(os.getcwd()) | ||
660 | parse.cached_mtime_noerror(bbfile_loc) | ||
661 | bb_data = data.init_db(config) | ||
662 | # The ConfHandler first looks if there is a TOPDIR and if not | ||
663 | # then it would call getcwd(). | ||
664 | # Previously, we chdir()ed to bbfile_loc, called the handler | ||
665 | # and finally chdir()ed back, a couple of thousand times. We now | ||
666 | # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. | ||
667 | if not data.getVar('TOPDIR', bb_data): | ||
668 | chdir_back = True | ||
669 | data.setVar('TOPDIR', bbfile_loc, bb_data) | ||
670 | try: | ||
671 | if appends: | ||
672 | data.setVar('__BBAPPEND', " ".join(appends), bb_data) | ||
673 | bb_data = parse.handle(bbfile, bb_data) | ||
674 | if chdir_back: | ||
675 | os.chdir(oldpath) | ||
676 | return bb_data | ||
677 | except: | ||
678 | if chdir_back: | ||
679 | os.chdir(oldpath) | ||
680 | raise | ||
681 | |||
682 | |||
683 | def init(cooker): | ||
684 | """ | ||
685 | The Objective: Cache the minimum amount of data possible yet get to the | ||
686 | stage of building packages (i.e. tryBuild) without reparsing any .bb files. | ||
687 | |||
688 | To do this, we intercept getVar calls and only cache the variables we see | ||
689 | being accessed. We rely on the cache getVar calls being made for all | ||
690 | variables bitbake might need to use to reach this stage. For each cached | ||
691 | file we need to track: | ||
692 | |||
693 | * Its mtime | ||
694 | * The mtimes of all its dependencies | ||
695 | * Whether it caused a parse.SkipPackage exception | ||
696 | |||
697 | Files causing parsing errors are evicted from the cache. | ||
698 | |||
699 | """ | ||
700 | return Cache(cooker.configuration.data, cooker.configuration.data_hash) | ||
701 | |||
702 | |||
703 | class CacheData(object): | ||
704 | """ | ||
705 | The data structures we compile from the cached data | ||
706 | """ | ||
707 | |||
708 | def __init__(self, caches_array): | ||
709 | self.caches_array = caches_array | ||
710 | for cache_class in self.caches_array: | ||
711 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
712 | cache_class.init_cacheData(self) | ||
713 | |||
714 | # Direct cache variables | ||
715 | self.task_queues = {} | ||
716 | self.preferred = {} | ||
717 | self.tasks = {} | ||
718 | # Indirect Cache variables (set elsewhere) | ||
719 | self.ignored_dependencies = [] | ||
720 | self.world_target = set() | ||
721 | self.bbfile_priority = {} | ||
722 | |||
723 | def add_from_recipeinfo(self, fn, info_array): | ||
724 | for info in info_array: | ||
725 | info.add_cacheData(self, fn) | ||
726 | |||
727 | class MultiProcessCache(object): | ||
728 | """ | ||
729 | BitBake multi-process cache implementation | ||
730 | |||
731 | Used by the codeparser & file checksum caches | ||
732 | """ | ||
733 | |||
734 | def __init__(self): | ||
735 | self.cachefile = None | ||
736 | self.cachedata = self.create_cachedata() | ||
737 | self.cachedata_extras = self.create_cachedata() | ||
738 | |||
739 | def init_cache(self, d): | ||
740 | cachedir = (d.getVar("PERSISTENT_DIR", True) or | ||
741 | d.getVar("CACHE", True)) | ||
742 | if cachedir in [None, '']: | ||
743 | return | ||
744 | bb.utils.mkdirhier(cachedir) | ||
745 | self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name) | ||
746 | logger.debug(1, "Using cache in '%s'", self.cachefile) | ||
747 | |||
748 | glf = bb.utils.lockfile(self.cachefile + ".lock") | ||
749 | |||
750 | try: | ||
751 | with open(self.cachefile, "rb") as f: | ||
752 | p = pickle.Unpickler(f) | ||
753 | data, version = p.load() | ||
754 | except: | ||
755 | bb.utils.unlockfile(glf) | ||
756 | return | ||
757 | |||
758 | bb.utils.unlockfile(glf) | ||
759 | |||
760 | if version != self.__class__.CACHE_VERSION: | ||
761 | return | ||
762 | |||
763 | self.cachedata = data | ||
764 | |||
765 | def internSet(self, items): | ||
766 | new = set() | ||
767 | for i in items: | ||
768 | new.add(intern(i)) | ||
769 | return new | ||
770 | |||
771 | def compress_keys(self, data): | ||
772 | # Override in subclasses if desired | ||
773 | return | ||
774 | |||
775 | def create_cachedata(self): | ||
776 | data = [{}] | ||
777 | return data | ||
778 | |||
779 | def save_extras(self, d): | ||
780 | if not self.cachefile: | ||
781 | return | ||
782 | |||
783 | glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) | ||
784 | |||
785 | i = os.getpid() | ||
786 | lf = None | ||
787 | while not lf: | ||
788 | lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False) | ||
789 | if not lf or os.path.exists(self.cachefile + "-" + str(i)): | ||
790 | if lf: | ||
791 | bb.utils.unlockfile(lf) | ||
792 | lf = None | ||
793 | i = i + 1 | ||
794 | continue | ||
795 | |||
796 | with open(self.cachefile + "-" + str(i), "wb") as f: | ||
797 | p = pickle.Pickler(f, -1) | ||
798 | p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION]) | ||
799 | |||
800 | bb.utils.unlockfile(lf) | ||
801 | bb.utils.unlockfile(glf) | ||
802 | |||
803 | def merge_data(self, source, dest): | ||
804 | for j in range(0,len(dest)): | ||
805 | for h in source[j]: | ||
806 | if h not in dest[j]: | ||
807 | dest[j][h] = source[j][h] | ||
808 | |||
809 | def save_merge(self, d): | ||
810 | if not self.cachefile: | ||
811 | return | ||
812 | |||
813 | glf = bb.utils.lockfile(self.cachefile + ".lock") | ||
814 | |||
815 | try: | ||
816 | with open(self.cachefile, "rb") as f: | ||
817 | p = pickle.Unpickler(f) | ||
818 | data, version = p.load() | ||
819 | except (IOError, EOFError): | ||
820 | data, version = None, None | ||
821 | |||
822 | if version != self.__class__.CACHE_VERSION: | ||
823 | data = self.create_cachedata() | ||
824 | |||
825 | for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: | ||
826 | f = os.path.join(os.path.dirname(self.cachefile), f) | ||
827 | try: | ||
828 | with open(f, "rb") as fd: | ||
829 | p = pickle.Unpickler(fd) | ||
830 | extradata, version = p.load() | ||
831 | except (IOError, EOFError): | ||
832 | extradata, version = self.create_cachedata(), None | ||
833 | |||
834 | if version != self.__class__.CACHE_VERSION: | ||
835 | continue | ||
836 | |||
837 | self.merge_data(extradata, data) | ||
838 | os.unlink(f) | ||
839 | |||
840 | self.compress_keys(data) | ||
841 | |||
842 | with open(self.cachefile, "wb") as f: | ||
843 | p = pickle.Pickler(f, -1) | ||
844 | p.dump([data, self.__class__.CACHE_VERSION]) | ||
845 | |||
846 | bb.utils.unlockfile(glf) | ||
847 | |||