diff options
Diffstat (limited to 'bitbake-dev/lib/bb/cache.py')
-rw-r--r-- | bitbake-dev/lib/bb/cache.py | 533 |
1 files changed, 0 insertions, 533 deletions
diff --git a/bitbake-dev/lib/bb/cache.py b/bitbake-dev/lib/bb/cache.py deleted file mode 100644 index 2f1b8fa601..0000000000 --- a/bitbake-dev/lib/bb/cache.py +++ /dev/null | |||
@@ -1,533 +0,0 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # BitBake 'Event' implementation | ||
5 | # | ||
6 | # Caching of bitbake variables before task execution | ||
7 | |||
8 | # Copyright (C) 2006 Richard Purdie | ||
9 | |||
10 | # but small sections based on code from bin/bitbake: | ||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # Copyright (C) 2003, 2004 Phil Blundell | ||
13 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
14 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
15 | # Copyright (C) 2005 ROAD GmbH | ||
16 | # | ||
17 | # This program is free software; you can redistribute it and/or modify | ||
18 | # it under the terms of the GNU General Public License version 2 as | ||
19 | # published by the Free Software Foundation. | ||
20 | # | ||
21 | # This program is distributed in the hope that it will be useful, | ||
22 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
23 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
24 | # GNU General Public License for more details. | ||
25 | # | ||
26 | # You should have received a copy of the GNU General Public License along | ||
27 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
28 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
29 | |||
30 | |||
31 | import os, re | ||
32 | import bb.data | ||
33 | import bb.utils | ||
34 | |||
35 | try: | ||
36 | import cPickle as pickle | ||
37 | except ImportError: | ||
38 | import pickle | ||
39 | bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") | ||
40 | |||
41 | __cache_version__ = "130" | ||
42 | |||
43 | class Cache: | ||
44 | """ | ||
45 | BitBake Cache implementation | ||
46 | """ | ||
47 | def __init__(self, cooker): | ||
48 | |||
49 | |||
50 | self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True) | ||
51 | self.clean = {} | ||
52 | self.checked = {} | ||
53 | self.depends_cache = {} | ||
54 | self.data = None | ||
55 | self.data_fn = None | ||
56 | self.cacheclean = True | ||
57 | |||
58 | if self.cachedir in [None, '']: | ||
59 | self.has_cache = False | ||
60 | bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.") | ||
61 | return | ||
62 | |||
63 | self.has_cache = True | ||
64 | self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") | ||
65 | |||
66 | bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) | ||
67 | try: | ||
68 | os.stat( self.cachedir ) | ||
69 | except OSError: | ||
70 | bb.mkdirhier( self.cachedir ) | ||
71 | |||
72 | # If any of configuration.data's dependencies are newer than the | ||
73 | # cache there isn't even any point in loading it... | ||
74 | newest_mtime = 0 | ||
75 | deps = bb.data.getVar("__depends", cooker.configuration.data, True) | ||
76 | for f,old_mtime in deps: | ||
77 | if old_mtime > newest_mtime: | ||
78 | newest_mtime = old_mtime | ||
79 | |||
80 | if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: | ||
81 | try: | ||
82 | p = pickle.Unpickler(file(self.cachefile, "rb")) | ||
83 | self.depends_cache, version_data = p.load() | ||
84 | if version_data['CACHE_VER'] != __cache_version__: | ||
85 | raise ValueError, 'Cache Version Mismatch' | ||
86 | if version_data['BITBAKE_VER'] != bb.__version__: | ||
87 | raise ValueError, 'Bitbake Version Mismatch' | ||
88 | except EOFError: | ||
89 | bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...") | ||
90 | self.depends_cache = {} | ||
91 | except: | ||
92 | bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") | ||
93 | self.depends_cache = {} | ||
94 | else: | ||
95 | try: | ||
96 | os.stat( self.cachefile ) | ||
97 | bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...") | ||
98 | except OSError: | ||
99 | pass | ||
100 | |||
101 | def getVar(self, var, fn, exp = 0): | ||
102 | """ | ||
103 | Gets the value of a variable | ||
104 | (similar to getVar in the data class) | ||
105 | |||
106 | There are two scenarios: | ||
107 | 1. We have cached data - serve from depends_cache[fn] | ||
108 | 2. We're learning what data to cache - serve from data | ||
109 | backend but add a copy of the data to the cache. | ||
110 | """ | ||
111 | if fn in self.clean: | ||
112 | return self.depends_cache[fn][var] | ||
113 | |||
114 | if not fn in self.depends_cache: | ||
115 | self.depends_cache[fn] = {} | ||
116 | |||
117 | if fn != self.data_fn: | ||
118 | # We're trying to access data in the cache which doesn't exist | ||
119 | # yet setData hasn't been called to setup the right access. Very bad. | ||
120 | bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) | ||
121 | |||
122 | self.cacheclean = False | ||
123 | result = bb.data.getVar(var, self.data, exp) | ||
124 | self.depends_cache[fn][var] = result | ||
125 | return result | ||
126 | |||
127 | def setData(self, virtualfn, fn, data): | ||
128 | """ | ||
129 | Called to prime bb_cache ready to learn which variables to cache. | ||
130 | Will be followed by calls to self.getVar which aren't cached | ||
131 | but can be fulfilled from self.data. | ||
132 | """ | ||
133 | self.data_fn = virtualfn | ||
134 | self.data = data | ||
135 | |||
136 | # Make sure __depends makes the depends_cache | ||
137 | # If we're a virtual class we need to make sure all our depends are appended | ||
138 | # to the depends of fn. | ||
139 | depends = self.getVar("__depends", virtualfn, True) or [] | ||
140 | if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]: | ||
141 | self.depends_cache[fn]["__depends"] = depends | ||
142 | for dep in depends: | ||
143 | if dep not in self.depends_cache[fn]["__depends"]: | ||
144 | self.depends_cache[fn]["__depends"].append(dep) | ||
145 | |||
146 | # Make sure BBCLASSEXTEND always makes the cache too | ||
147 | self.getVar('BBCLASSEXTEND', virtualfn, True) | ||
148 | |||
149 | self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) | ||
150 | |||
151 | def virtualfn2realfn(self, virtualfn): | ||
152 | """ | ||
153 | Convert a virtual file name to a real one + the associated subclass keyword | ||
154 | """ | ||
155 | |||
156 | fn = virtualfn | ||
157 | cls = "" | ||
158 | if virtualfn.startswith('virtual:'): | ||
159 | cls = virtualfn.split(':', 2)[1] | ||
160 | fn = virtualfn.replace('virtual:' + cls + ':', '') | ||
161 | #bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls)) | ||
162 | return (fn, cls) | ||
163 | |||
164 | def realfn2virtual(self, realfn, cls): | ||
165 | """ | ||
166 | Convert a real filename + the associated subclass keyword to a virtual filename | ||
167 | """ | ||
168 | if cls == "": | ||
169 | #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn)) | ||
170 | return realfn | ||
171 | #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn)) | ||
172 | return "virtual:" + cls + ":" + realfn | ||
173 | |||
174 | def loadDataFull(self, virtualfn, cfgData): | ||
175 | """ | ||
176 | Return a complete set of data for fn. | ||
177 | To do this, we need to parse the file. | ||
178 | """ | ||
179 | |||
180 | (fn, cls) = self.virtualfn2realfn(virtualfn) | ||
181 | |||
182 | bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn) | ||
183 | |||
184 | bb_data = self.load_bbfile(fn, cfgData) | ||
185 | return bb_data[cls] | ||
186 | |||
187 | def loadData(self, fn, cfgData, cacheData): | ||
188 | """ | ||
189 | Load a subset of data for fn. | ||
190 | If the cached data is valid we do nothing, | ||
191 | To do this, we need to parse the file and set the system | ||
192 | to record the variables accessed. | ||
193 | Return the cache status and whether the file was skipped when parsed | ||
194 | """ | ||
195 | skipped = 0 | ||
196 | virtuals = 0 | ||
197 | |||
198 | if fn not in self.checked: | ||
199 | self.cacheValidUpdate(fn) | ||
200 | |||
201 | if self.cacheValid(fn): | ||
202 | multi = self.getVar('BBCLASSEXTEND', fn, True) | ||
203 | for cls in (multi or "").split() + [""]: | ||
204 | virtualfn = self.realfn2virtual(fn, cls) | ||
205 | if self.depends_cache[virtualfn]["__SKIPPED"]: | ||
206 | skipped += 1 | ||
207 | bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) | ||
208 | continue | ||
209 | self.handle_data(virtualfn, cacheData) | ||
210 | virtuals += 1 | ||
211 | return True, skipped, virtuals | ||
212 | |||
213 | bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn) | ||
214 | |||
215 | bb_data = self.load_bbfile(fn, cfgData) | ||
216 | |||
217 | for data in bb_data: | ||
218 | virtualfn = self.realfn2virtual(fn, data) | ||
219 | self.setData(virtualfn, fn, bb_data[data]) | ||
220 | if self.getVar("__SKIPPED", virtualfn, True): | ||
221 | skipped += 1 | ||
222 | bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) | ||
223 | else: | ||
224 | self.handle_data(virtualfn, cacheData) | ||
225 | virtuals += 1 | ||
226 | return False, skipped, virtuals | ||
227 | |||
228 | |||
229 | def cacheValid(self, fn): | ||
230 | """ | ||
231 | Is the cache valid for fn? | ||
232 | Fast version, no timestamps checked. | ||
233 | """ | ||
234 | # Is cache enabled? | ||
235 | if not self.has_cache: | ||
236 | return False | ||
237 | if fn in self.clean: | ||
238 | return True | ||
239 | return False | ||
240 | |||
241 | def cacheValidUpdate(self, fn): | ||
242 | """ | ||
243 | Is the cache valid for fn? | ||
244 | Make thorough (slower) checks including timestamps. | ||
245 | """ | ||
246 | # Is cache enabled? | ||
247 | if not self.has_cache: | ||
248 | return False | ||
249 | |||
250 | self.checked[fn] = "" | ||
251 | |||
252 | # Pretend we're clean so getVar works | ||
253 | self.clean[fn] = "" | ||
254 | |||
255 | # File isn't in depends_cache | ||
256 | if not fn in self.depends_cache: | ||
257 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn) | ||
258 | self.remove(fn) | ||
259 | return False | ||
260 | |||
261 | mtime = bb.parse.cached_mtime_noerror(fn) | ||
262 | |||
263 | # Check file still exists | ||
264 | if mtime == 0: | ||
265 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) | ||
266 | self.remove(fn) | ||
267 | return False | ||
268 | |||
269 | # Check the file's timestamp | ||
270 | if mtime != self.getVar("CACHETIMESTAMP", fn, True): | ||
271 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) | ||
272 | self.remove(fn) | ||
273 | return False | ||
274 | |||
275 | # Check dependencies are still valid | ||
276 | depends = self.getVar("__depends", fn, True) | ||
277 | if depends: | ||
278 | for f,old_mtime in depends: | ||
279 | fmtime = bb.parse.cached_mtime_noerror(f) | ||
280 | # Check if file still exists | ||
281 | if old_mtime != 0 and fmtime == 0: | ||
282 | self.remove(fn) | ||
283 | return False | ||
284 | |||
285 | if (fmtime != old_mtime): | ||
286 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) | ||
287 | self.remove(fn) | ||
288 | return False | ||
289 | |||
290 | #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) | ||
291 | if not fn in self.clean: | ||
292 | self.clean[fn] = "" | ||
293 | |||
294 | # Mark extended class data as clean too | ||
295 | multi = self.getVar('BBCLASSEXTEND', fn, True) | ||
296 | for cls in (multi or "").split(): | ||
297 | virtualfn = self.realfn2virtual(fn, cls) | ||
298 | self.clean[virtualfn] = "" | ||
299 | |||
300 | return True | ||
301 | |||
302 | def remove(self, fn): | ||
303 | """ | ||
304 | Remove a fn from the cache | ||
305 | Called from the parser in error cases | ||
306 | """ | ||
307 | bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn) | ||
308 | if fn in self.depends_cache: | ||
309 | del self.depends_cache[fn] | ||
310 | if fn in self.clean: | ||
311 | del self.clean[fn] | ||
312 | |||
313 | def sync(self): | ||
314 | """ | ||
315 | Save the cache | ||
316 | Called from the parser when complete (or exiting) | ||
317 | """ | ||
318 | import copy | ||
319 | |||
320 | if not self.has_cache: | ||
321 | return | ||
322 | |||
323 | if self.cacheclean: | ||
324 | bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.") | ||
325 | return | ||
326 | |||
327 | version_data = {} | ||
328 | version_data['CACHE_VER'] = __cache_version__ | ||
329 | version_data['BITBAKE_VER'] = bb.__version__ | ||
330 | |||
331 | cache_data = copy.deepcopy(self.depends_cache) | ||
332 | for fn in self.depends_cache: | ||
333 | if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']: | ||
334 | bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn) | ||
335 | del cache_data[fn] | ||
336 | elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']: | ||
337 | bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn) | ||
338 | del cache_data[fn] | ||
339 | |||
340 | p = pickle.Pickler(file(self.cachefile, "wb" ), -1 ) | ||
341 | p.dump([cache_data, version_data]) | ||
342 | |||
343 | def mtime(self, cachefile): | ||
344 | return bb.parse.cached_mtime_noerror(cachefile) | ||
345 | |||
346 | def handle_data(self, file_name, cacheData): | ||
347 | """ | ||
348 | Save data we need into the cache | ||
349 | """ | ||
350 | |||
351 | pn = self.getVar('PN', file_name, True) | ||
352 | pe = self.getVar('PE', file_name, True) or "0" | ||
353 | pv = self.getVar('PV', file_name, True) | ||
354 | if 'SRCREVINACTION' in pv: | ||
355 | bb.note("Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name)) | ||
356 | pr = self.getVar('PR', file_name, True) | ||
357 | dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") | ||
358 | depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") | ||
359 | packages = (self.getVar('PACKAGES', file_name, True) or "").split() | ||
360 | packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() | ||
361 | rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() | ||
362 | |||
363 | cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) | ||
364 | |||
365 | # build PackageName to FileName lookup table | ||
366 | if pn not in cacheData.pkg_pn: | ||
367 | cacheData.pkg_pn[pn] = [] | ||
368 | cacheData.pkg_pn[pn].append(file_name) | ||
369 | |||
370 | cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) | ||
371 | |||
372 | # build FileName to PackageName lookup table | ||
373 | cacheData.pkg_fn[file_name] = pn | ||
374 | cacheData.pkg_pepvpr[file_name] = (pe,pv,pr) | ||
375 | cacheData.pkg_dp[file_name] = dp | ||
376 | |||
377 | provides = [pn] | ||
378 | for provide in (self.getVar("PROVIDES", file_name, True) or "").split(): | ||
379 | if provide not in provides: | ||
380 | provides.append(provide) | ||
381 | |||
382 | # Build forward and reverse provider hashes | ||
383 | # Forward: virtual -> [filenames] | ||
384 | # Reverse: PN -> [virtuals] | ||
385 | if pn not in cacheData.pn_provides: | ||
386 | cacheData.pn_provides[pn] = [] | ||
387 | |||
388 | cacheData.fn_provides[file_name] = provides | ||
389 | for provide in provides: | ||
390 | if provide not in cacheData.providers: | ||
391 | cacheData.providers[provide] = [] | ||
392 | cacheData.providers[provide].append(file_name) | ||
393 | if not provide in cacheData.pn_provides[pn]: | ||
394 | cacheData.pn_provides[pn].append(provide) | ||
395 | |||
396 | cacheData.deps[file_name] = [] | ||
397 | for dep in depends: | ||
398 | if not dep in cacheData.deps[file_name]: | ||
399 | cacheData.deps[file_name].append(dep) | ||
400 | if not dep in cacheData.all_depends: | ||
401 | cacheData.all_depends.append(dep) | ||
402 | |||
403 | # Build reverse hash for PACKAGES, so runtime dependencies | ||
404 | # can be be resolved (RDEPENDS, RRECOMMENDS etc.) | ||
405 | for package in packages: | ||
406 | if not package in cacheData.packages: | ||
407 | cacheData.packages[package] = [] | ||
408 | cacheData.packages[package].append(file_name) | ||
409 | rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() | ||
410 | |||
411 | for package in packages_dynamic: | ||
412 | if not package in cacheData.packages_dynamic: | ||
413 | cacheData.packages_dynamic[package] = [] | ||
414 | cacheData.packages_dynamic[package].append(file_name) | ||
415 | |||
416 | for rprovide in rprovides: | ||
417 | if not rprovide in cacheData.rproviders: | ||
418 | cacheData.rproviders[rprovide] = [] | ||
419 | cacheData.rproviders[rprovide].append(file_name) | ||
420 | |||
421 | # Build hash of runtime depends and rececommends | ||
422 | |||
423 | if not file_name in cacheData.rundeps: | ||
424 | cacheData.rundeps[file_name] = {} | ||
425 | if not file_name in cacheData.runrecs: | ||
426 | cacheData.runrecs[file_name] = {} | ||
427 | |||
428 | rdepends = self.getVar('RDEPENDS', file_name, True) or "" | ||
429 | rrecommends = self.getVar('RRECOMMENDS', file_name, True) or "" | ||
430 | for package in packages + [pn]: | ||
431 | if not package in cacheData.rundeps[file_name]: | ||
432 | cacheData.rundeps[file_name][package] = [] | ||
433 | if not package in cacheData.runrecs[file_name]: | ||
434 | cacheData.runrecs[file_name][package] = [] | ||
435 | |||
436 | cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "") | ||
437 | cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "") | ||
438 | |||
439 | # Collect files we may need for possible world-dep | ||
440 | # calculations | ||
441 | if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): | ||
442 | cacheData.possible_world.append(file_name) | ||
443 | |||
444 | # Touch this to make sure its in the cache | ||
445 | self.getVar('__BB_DONT_CACHE', file_name, True) | ||
446 | self.getVar('BBCLASSEXTEND', file_name, True) | ||
447 | |||
448 | def load_bbfile( self, bbfile , config): | ||
449 | """ | ||
450 | Load and parse one .bb build file | ||
451 | Return the data and whether parsing resulted in the file being skipped | ||
452 | """ | ||
453 | |||
454 | import bb | ||
455 | from bb import utils, data, parse, debug, event, fatal | ||
456 | |||
457 | # expand tmpdir to include this topdir | ||
458 | data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) | ||
459 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
460 | oldpath = os.path.abspath(os.getcwd()) | ||
461 | if bb.parse.cached_mtime_noerror(bbfile_loc): | ||
462 | os.chdir(bbfile_loc) | ||
463 | bb_data = data.init_db(config) | ||
464 | try: | ||
465 | bb_data = parse.handle(bbfile, bb_data) # read .bb data | ||
466 | os.chdir(oldpath) | ||
467 | return bb_data | ||
468 | except: | ||
469 | os.chdir(oldpath) | ||
470 | raise | ||
471 | |||
472 | def init(cooker): | ||
473 | """ | ||
474 | The Objective: Cache the minimum amount of data possible yet get to the | ||
475 | stage of building packages (i.e. tryBuild) without reparsing any .bb files. | ||
476 | |||
477 | To do this, we intercept getVar calls and only cache the variables we see | ||
478 | being accessed. We rely on the cache getVar calls being made for all | ||
479 | variables bitbake might need to use to reach this stage. For each cached | ||
480 | file we need to track: | ||
481 | |||
482 | * Its mtime | ||
483 | * The mtimes of all its dependencies | ||
484 | * Whether it caused a parse.SkipPackage exception | ||
485 | |||
486 | Files causing parsing errors are evicted from the cache. | ||
487 | |||
488 | """ | ||
489 | return Cache(cooker) | ||
490 | |||
491 | |||
492 | |||
493 | #============================================================================# | ||
494 | # CacheData | ||
495 | #============================================================================# | ||
496 | class CacheData: | ||
497 | """ | ||
498 | The data structures we compile from the cached data | ||
499 | """ | ||
500 | |||
501 | def __init__(self): | ||
502 | """ | ||
503 | Direct cache variables | ||
504 | (from Cache.handle_data) | ||
505 | """ | ||
506 | self.providers = {} | ||
507 | self.rproviders = {} | ||
508 | self.packages = {} | ||
509 | self.packages_dynamic = {} | ||
510 | self.possible_world = [] | ||
511 | self.pkg_pn = {} | ||
512 | self.pkg_fn = {} | ||
513 | self.pkg_pepvpr = {} | ||
514 | self.pkg_dp = {} | ||
515 | self.pn_provides = {} | ||
516 | self.fn_provides = {} | ||
517 | self.all_depends = [] | ||
518 | self.deps = {} | ||
519 | self.rundeps = {} | ||
520 | self.runrecs = {} | ||
521 | self.task_queues = {} | ||
522 | self.task_deps = {} | ||
523 | self.stamp = {} | ||
524 | self.preferred = {} | ||
525 | |||
526 | """ | ||
527 | Indirect Cache variables | ||
528 | (set elsewhere) | ||
529 | """ | ||
530 | self.ignored_dependencies = [] | ||
531 | self.world_target = set() | ||
532 | self.bbfile_priority = {} | ||
533 | self.bbfile_config_priorities = [] | ||