diff options
Diffstat (limited to 'bitbake-dev/lib/bb/cache.py')
-rw-r--r-- | bitbake-dev/lib/bb/cache.py | 465 |
1 files changed, 465 insertions, 0 deletions
diff --git a/bitbake-dev/lib/bb/cache.py b/bitbake-dev/lib/bb/cache.py new file mode 100644 index 0000000000..bcf393a578 --- /dev/null +++ b/bitbake-dev/lib/bb/cache.py | |||
@@ -0,0 +1,465 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # BitBake 'Event' implementation | ||
5 | # | ||
6 | # Caching of bitbake variables before task execution | ||
7 | |||
8 | # Copyright (C) 2006 Richard Purdie | ||
9 | |||
10 | # but small sections based on code from bin/bitbake: | ||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # Copyright (C) 2003, 2004 Phil Blundell | ||
13 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
14 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
15 | # Copyright (C) 2005 ROAD GmbH | ||
16 | # | ||
17 | # This program is free software; you can redistribute it and/or modify | ||
18 | # it under the terms of the GNU General Public License version 2 as | ||
19 | # published by the Free Software Foundation. | ||
20 | # | ||
21 | # This program is distributed in the hope that it will be useful, | ||
22 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
23 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
24 | # GNU General Public License for more details. | ||
25 | # | ||
26 | # You should have received a copy of the GNU General Public License along | ||
27 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
28 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
29 | |||
30 | |||
31 | import os, re | ||
32 | import bb.data | ||
33 | import bb.utils | ||
34 | from sets import Set | ||
35 | |||
36 | try: | ||
37 | import cPickle as pickle | ||
38 | except ImportError: | ||
39 | import pickle | ||
40 | bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") | ||
41 | |||
42 | __cache_version__ = "128" | ||
43 | |||
44 | class Cache: | ||
45 | """ | ||
46 | BitBake Cache implementation | ||
47 | """ | ||
48 | def __init__(self, cooker): | ||
49 | |||
50 | |||
51 | self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True) | ||
52 | self.clean = {} | ||
53 | self.checked = {} | ||
54 | self.depends_cache = {} | ||
55 | self.data = None | ||
56 | self.data_fn = None | ||
57 | self.cacheclean = True | ||
58 | |||
59 | if self.cachedir in [None, '']: | ||
60 | self.has_cache = False | ||
61 | bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.") | ||
62 | else: | ||
63 | self.has_cache = True | ||
64 | self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") | ||
65 | |||
66 | bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) | ||
67 | try: | ||
68 | os.stat( self.cachedir ) | ||
69 | except OSError: | ||
70 | bb.mkdirhier( self.cachedir ) | ||
71 | |||
72 | if not self.has_cache: | ||
73 | return | ||
74 | |||
75 | # If any of configuration.data's dependencies are newer than the | ||
76 | # cache there isn't even any point in loading it... | ||
77 | newest_mtime = 0 | ||
78 | deps = bb.data.getVar("__depends", cooker.configuration.data, True) | ||
79 | for f,old_mtime in deps: | ||
80 | if old_mtime > newest_mtime: | ||
81 | newest_mtime = old_mtime | ||
82 | |||
83 | if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: | ||
84 | try: | ||
85 | p = pickle.Unpickler(file(self.cachefile, "rb")) | ||
86 | self.depends_cache, version_data = p.load() | ||
87 | if version_data['CACHE_VER'] != __cache_version__: | ||
88 | raise ValueError, 'Cache Version Mismatch' | ||
89 | if version_data['BITBAKE_VER'] != bb.__version__: | ||
90 | raise ValueError, 'Bitbake Version Mismatch' | ||
91 | except EOFError: | ||
92 | bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...") | ||
93 | self.depends_cache = {} | ||
94 | except: | ||
95 | bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") | ||
96 | self.depends_cache = {} | ||
97 | else: | ||
98 | bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...") | ||
99 | |||
100 | def getVar(self, var, fn, exp = 0): | ||
101 | """ | ||
102 | Gets the value of a variable | ||
103 | (similar to getVar in the data class) | ||
104 | |||
105 | There are two scenarios: | ||
106 | 1. We have cached data - serve from depends_cache[fn] | ||
107 | 2. We're learning what data to cache - serve from data | ||
108 | backend but add a copy of the data to the cache. | ||
109 | """ | ||
110 | if fn in self.clean: | ||
111 | return self.depends_cache[fn][var] | ||
112 | |||
113 | if not fn in self.depends_cache: | ||
114 | self.depends_cache[fn] = {} | ||
115 | |||
116 | if fn != self.data_fn: | ||
117 | # We're trying to access data in the cache which doesn't exist | ||
118 | # yet setData hasn't been called to setup the right access. Very bad. | ||
119 | bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) | ||
120 | |||
121 | self.cacheclean = False | ||
122 | result = bb.data.getVar(var, self.data, exp) | ||
123 | self.depends_cache[fn][var] = result | ||
124 | return result | ||
125 | |||
126 | def setData(self, fn, data): | ||
127 | """ | ||
128 | Called to prime bb_cache ready to learn which variables to cache. | ||
129 | Will be followed by calls to self.getVar which aren't cached | ||
130 | but can be fulfilled from self.data. | ||
131 | """ | ||
132 | self.data_fn = fn | ||
133 | self.data = data | ||
134 | |||
135 | # Make sure __depends makes the depends_cache | ||
136 | self.getVar("__depends", fn, True) | ||
137 | self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) | ||
138 | |||
139 | def loadDataFull(self, fn, cfgData): | ||
140 | """ | ||
141 | Return a complete set of data for fn. | ||
142 | To do this, we need to parse the file. | ||
143 | """ | ||
144 | bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn) | ||
145 | |||
146 | bb_data, skipped = self.load_bbfile(fn, cfgData) | ||
147 | return bb_data | ||
148 | |||
149 | def loadData(self, fn, cfgData): | ||
150 | """ | ||
151 | Load a subset of data for fn. | ||
152 | If the cached data is valid we do nothing, | ||
153 | To do this, we need to parse the file and set the system | ||
154 | to record the variables accessed. | ||
155 | Return the cache status and whether the file was skipped when parsed | ||
156 | """ | ||
157 | if fn not in self.checked: | ||
158 | self.cacheValidUpdate(fn) | ||
159 | if self.cacheValid(fn): | ||
160 | if "SKIPPED" in self.depends_cache[fn]: | ||
161 | return True, True | ||
162 | return True, False | ||
163 | |||
164 | bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn) | ||
165 | |||
166 | bb_data, skipped = self.load_bbfile(fn, cfgData) | ||
167 | self.setData(fn, bb_data) | ||
168 | return False, skipped | ||
169 | |||
170 | def cacheValid(self, fn): | ||
171 | """ | ||
172 | Is the cache valid for fn? | ||
173 | Fast version, no timestamps checked. | ||
174 | """ | ||
175 | # Is cache enabled? | ||
176 | if not self.has_cache: | ||
177 | return False | ||
178 | if fn in self.clean: | ||
179 | return True | ||
180 | return False | ||
181 | |||
182 | def cacheValidUpdate(self, fn): | ||
183 | """ | ||
184 | Is the cache valid for fn? | ||
185 | Make thorough (slower) checks including timestamps. | ||
186 | """ | ||
187 | # Is cache enabled? | ||
188 | if not self.has_cache: | ||
189 | return False | ||
190 | |||
191 | self.checked[fn] = "" | ||
192 | |||
193 | # Pretend we're clean so getVar works | ||
194 | self.clean[fn] = "" | ||
195 | |||
196 | # File isn't in depends_cache | ||
197 | if not fn in self.depends_cache: | ||
198 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn) | ||
199 | self.remove(fn) | ||
200 | return False | ||
201 | |||
202 | mtime = bb.parse.cached_mtime_noerror(fn) | ||
203 | |||
204 | # Check file still exists | ||
205 | if mtime == 0: | ||
206 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) | ||
207 | self.remove(fn) | ||
208 | return False | ||
209 | |||
210 | # Check the file's timestamp | ||
211 | if mtime != self.getVar("CACHETIMESTAMP", fn, True): | ||
212 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) | ||
213 | self.remove(fn) | ||
214 | return False | ||
215 | |||
216 | # Check dependencies are still valid | ||
217 | depends = self.getVar("__depends", fn, True) | ||
218 | if depends: | ||
219 | for f,old_mtime in depends: | ||
220 | fmtime = bb.parse.cached_mtime_noerror(f) | ||
221 | # Check if file still exists | ||
222 | if fmtime == 0: | ||
223 | self.remove(fn) | ||
224 | return False | ||
225 | |||
226 | if (fmtime != old_mtime): | ||
227 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) | ||
228 | self.remove(fn) | ||
229 | return False | ||
230 | |||
231 | #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) | ||
232 | if not fn in self.clean: | ||
233 | self.clean[fn] = "" | ||
234 | |||
235 | return True | ||
236 | |||
237 | def skip(self, fn): | ||
238 | """ | ||
239 | Mark a fn as skipped | ||
240 | Called from the parser | ||
241 | """ | ||
242 | if not fn in self.depends_cache: | ||
243 | self.depends_cache[fn] = {} | ||
244 | self.depends_cache[fn]["SKIPPED"] = "1" | ||
245 | |||
246 | def remove(self, fn): | ||
247 | """ | ||
248 | Remove a fn from the cache | ||
249 | Called from the parser in error cases | ||
250 | """ | ||
251 | bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn) | ||
252 | if fn in self.depends_cache: | ||
253 | del self.depends_cache[fn] | ||
254 | if fn in self.clean: | ||
255 | del self.clean[fn] | ||
256 | |||
257 | def sync(self): | ||
258 | """ | ||
259 | Save the cache | ||
260 | Called from the parser when complete (or exiting) | ||
261 | """ | ||
262 | |||
263 | if not self.has_cache: | ||
264 | return | ||
265 | |||
266 | if self.cacheclean: | ||
267 | bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.") | ||
268 | return | ||
269 | |||
270 | version_data = {} | ||
271 | version_data['CACHE_VER'] = __cache_version__ | ||
272 | version_data['BITBAKE_VER'] = bb.__version__ | ||
273 | |||
274 | p = pickle.Pickler(file(self.cachefile, "wb" ), -1 ) | ||
275 | p.dump([self.depends_cache, version_data]) | ||
276 | |||
277 | def mtime(self, cachefile): | ||
278 | return bb.parse.cached_mtime_noerror(cachefile) | ||
279 | |||
280 | def handle_data(self, file_name, cacheData): | ||
281 | """ | ||
282 | Save data we need into the cache | ||
283 | """ | ||
284 | |||
285 | pn = self.getVar('PN', file_name, True) | ||
286 | pe = self.getVar('PE', file_name, True) or "0" | ||
287 | pv = self.getVar('PV', file_name, True) | ||
288 | pr = self.getVar('PR', file_name, True) | ||
289 | dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") | ||
290 | depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") | ||
291 | packages = (self.getVar('PACKAGES', file_name, True) or "").split() | ||
292 | packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() | ||
293 | rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() | ||
294 | |||
295 | cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) | ||
296 | |||
297 | # build PackageName to FileName lookup table | ||
298 | if pn not in cacheData.pkg_pn: | ||
299 | cacheData.pkg_pn[pn] = [] | ||
300 | cacheData.pkg_pn[pn].append(file_name) | ||
301 | |||
302 | cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) | ||
303 | |||
304 | # build FileName to PackageName lookup table | ||
305 | cacheData.pkg_fn[file_name] = pn | ||
306 | cacheData.pkg_pepvpr[file_name] = (pe,pv,pr) | ||
307 | cacheData.pkg_dp[file_name] = dp | ||
308 | |||
309 | provides = [pn] | ||
310 | for provide in (self.getVar("PROVIDES", file_name, True) or "").split(): | ||
311 | if provide not in provides: | ||
312 | provides.append(provide) | ||
313 | |||
314 | # Build forward and reverse provider hashes | ||
315 | # Forward: virtual -> [filenames] | ||
316 | # Reverse: PN -> [virtuals] | ||
317 | if pn not in cacheData.pn_provides: | ||
318 | cacheData.pn_provides[pn] = [] | ||
319 | |||
320 | cacheData.fn_provides[file_name] = provides | ||
321 | for provide in provides: | ||
322 | if provide not in cacheData.providers: | ||
323 | cacheData.providers[provide] = [] | ||
324 | cacheData.providers[provide].append(file_name) | ||
325 | if not provide in cacheData.pn_provides[pn]: | ||
326 | cacheData.pn_provides[pn].append(provide) | ||
327 | |||
328 | cacheData.deps[file_name] = [] | ||
329 | for dep in depends: | ||
330 | if not dep in cacheData.deps[file_name]: | ||
331 | cacheData.deps[file_name].append(dep) | ||
332 | if not dep in cacheData.all_depends: | ||
333 | cacheData.all_depends.append(dep) | ||
334 | |||
335 | # Build reverse hash for PACKAGES, so runtime dependencies | ||
336 | # can be be resolved (RDEPENDS, RRECOMMENDS etc.) | ||
337 | for package in packages: | ||
338 | if not package in cacheData.packages: | ||
339 | cacheData.packages[package] = [] | ||
340 | cacheData.packages[package].append(file_name) | ||
341 | rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() | ||
342 | |||
343 | for package in packages_dynamic: | ||
344 | if not package in cacheData.packages_dynamic: | ||
345 | cacheData.packages_dynamic[package] = [] | ||
346 | cacheData.packages_dynamic[package].append(file_name) | ||
347 | |||
348 | for rprovide in rprovides: | ||
349 | if not rprovide in cacheData.rproviders: | ||
350 | cacheData.rproviders[rprovide] = [] | ||
351 | cacheData.rproviders[rprovide].append(file_name) | ||
352 | |||
353 | # Build hash of runtime depends and rececommends | ||
354 | |||
355 | if not file_name in cacheData.rundeps: | ||
356 | cacheData.rundeps[file_name] = {} | ||
357 | if not file_name in cacheData.runrecs: | ||
358 | cacheData.runrecs[file_name] = {} | ||
359 | |||
360 | rdepends = self.getVar('RDEPENDS', file_name, True) or "" | ||
361 | rrecommends = self.getVar('RRECOMMENDS', file_name, True) or "" | ||
362 | for package in packages + [pn]: | ||
363 | if not package in cacheData.rundeps[file_name]: | ||
364 | cacheData.rundeps[file_name][package] = [] | ||
365 | if not package in cacheData.runrecs[file_name]: | ||
366 | cacheData.runrecs[file_name][package] = [] | ||
367 | |||
368 | cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "") | ||
369 | cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "") | ||
370 | |||
371 | # Collect files we may need for possible world-dep | ||
372 | # calculations | ||
373 | if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): | ||
374 | cacheData.possible_world.append(file_name) | ||
375 | |||
376 | |||
377 | def load_bbfile( self, bbfile , config): | ||
378 | """ | ||
379 | Load and parse one .bb build file | ||
380 | Return the data and whether parsing resulted in the file being skipped | ||
381 | """ | ||
382 | |||
383 | import bb | ||
384 | from bb import utils, data, parse, debug, event, fatal | ||
385 | |||
386 | # expand tmpdir to include this topdir | ||
387 | data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) | ||
388 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
389 | oldpath = os.path.abspath(os.getcwd()) | ||
390 | if bb.parse.cached_mtime_noerror(bbfile_loc): | ||
391 | os.chdir(bbfile_loc) | ||
392 | bb_data = data.init_db(config) | ||
393 | try: | ||
394 | bb_data = parse.handle(bbfile, bb_data) # read .bb data | ||
395 | os.chdir(oldpath) | ||
396 | return bb_data, False | ||
397 | except bb.parse.SkipPackage: | ||
398 | os.chdir(oldpath) | ||
399 | return bb_data, True | ||
400 | except: | ||
401 | os.chdir(oldpath) | ||
402 | raise | ||
403 | |||
404 | def init(cooker): | ||
405 | """ | ||
406 | The Objective: Cache the minimum amount of data possible yet get to the | ||
407 | stage of building packages (i.e. tryBuild) without reparsing any .bb files. | ||
408 | |||
409 | To do this, we intercept getVar calls and only cache the variables we see | ||
410 | being accessed. We rely on the cache getVar calls being made for all | ||
411 | variables bitbake might need to use to reach this stage. For each cached | ||
412 | file we need to track: | ||
413 | |||
414 | * Its mtime | ||
415 | * The mtimes of all its dependencies | ||
416 | * Whether it caused a parse.SkipPackage exception | ||
417 | |||
418 | Files causing parsing errors are evicted from the cache. | ||
419 | |||
420 | """ | ||
421 | return Cache(cooker) | ||
422 | |||
423 | |||
424 | |||
425 | #============================================================================# | ||
426 | # CacheData | ||
427 | #============================================================================# | ||
428 | class CacheData: | ||
429 | """ | ||
430 | The data structures we compile from the cached data | ||
431 | """ | ||
432 | |||
433 | def __init__(self): | ||
434 | """ | ||
435 | Direct cache variables | ||
436 | (from Cache.handle_data) | ||
437 | """ | ||
438 | self.providers = {} | ||
439 | self.rproviders = {} | ||
440 | self.packages = {} | ||
441 | self.packages_dynamic = {} | ||
442 | self.possible_world = [] | ||
443 | self.pkg_pn = {} | ||
444 | self.pkg_fn = {} | ||
445 | self.pkg_pepvpr = {} | ||
446 | self.pkg_dp = {} | ||
447 | self.pn_provides = {} | ||
448 | self.fn_provides = {} | ||
449 | self.all_depends = [] | ||
450 | self.deps = {} | ||
451 | self.rundeps = {} | ||
452 | self.runrecs = {} | ||
453 | self.task_queues = {} | ||
454 | self.task_deps = {} | ||
455 | self.stamp = {} | ||
456 | self.preferred = {} | ||
457 | |||
458 | """ | ||
459 | Indirect Cache variables | ||
460 | (set elsewhere) | ||
461 | """ | ||
462 | self.ignored_dependencies = [] | ||
463 | self.world_target = Set() | ||
464 | self.bbfile_priority = {} | ||
465 | self.bbfile_config_priorities = [] | ||