diff options
-rwxr-xr-x | bitbake/bin/bitbake-diffsigs | 9 | ||||
-rw-r--r-- | bitbake/lib/bb/siggen.py | 213 |
2 files changed, 222 insertions, 0 deletions
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs new file mode 100755 index 0000000000..e5595cdf58 --- /dev/null +++ b/bitbake/bin/bitbake-diffsigs | |||
@@ -0,0 +1,9 @@ | |||
1 | #!/usr/bin/env python | ||
2 | import os | ||
3 | import sys | ||
4 | import warnings | ||
5 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) | ||
6 | |||
7 | import bb.siggen | ||
8 | |||
9 | bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2]) | ||
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py new file mode 100644 index 0000000000..3cb67042b8 --- /dev/null +++ b/bitbake/lib/bb/siggen.py | |||
@@ -0,0 +1,213 @@ | |||
1 | import hashlib | ||
2 | import re | ||
3 | |||
4 | try: | ||
5 | import cPickle as pickle | ||
6 | except ImportError: | ||
7 | import pickle | ||
8 | bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") | ||
9 | |||
10 | def init(d): | ||
11 | siggens = [obj for obj in globals().itervalues() | ||
12 | if type(obj) is type and issubclass(obj, SignatureGenerator)] | ||
13 | |||
14 | desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop" | ||
15 | for sg in siggens: | ||
16 | if desired == sg.name: | ||
17 | return sg(d) | ||
18 | break | ||
19 | else: | ||
20 | bb.error("Invalid signature generator '%s', using default 'noop' generator" % desired) | ||
21 | bb.error("Available generators: %s" % ", ".join(obj.name for obj in siggens)) | ||
22 | return SignatureGenerator(d) | ||
23 | |||
24 | class SignatureGenerator(object): | ||
25 | """ | ||
26 | """ | ||
27 | name = "noop" | ||
28 | |||
29 | def __init__(self, data): | ||
30 | return | ||
31 | |||
32 | def finalise(self, fn, d): | ||
33 | return | ||
34 | |||
35 | class SignatureGeneratorBasic(SignatureGenerator): | ||
36 | """ | ||
37 | """ | ||
38 | name = "basic" | ||
39 | |||
40 | def __init__(self, data): | ||
41 | self.basehash = {} | ||
42 | self.taskhash = {} | ||
43 | self.taskdeps = {} | ||
44 | self.runtaskdeps = {} | ||
45 | self.gendeps = {} | ||
46 | self.lookupcache = {} | ||
47 | self.basewhitelist = (data.getVar("BB_HASHBASE_WHITELIST", True) or "").split() | ||
48 | self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None | ||
49 | |||
50 | if self.taskwhitelist: | ||
51 | self.twl = re.compile(self.taskwhitelist) | ||
52 | else: | ||
53 | self.twl = None | ||
54 | |||
55 | def _build_data(self, fn, d): | ||
56 | |||
57 | self.taskdeps[fn], self.gendeps[fn] = bb.data.generate_dependencies(d) | ||
58 | |||
59 | basehash = {} | ||
60 | lookupcache = {} | ||
61 | |||
62 | for task in self.taskdeps[fn]: | ||
63 | data = d.getVar(task, False) | ||
64 | lookupcache[task] = data | ||
65 | for dep in sorted(self.taskdeps[fn][task]): | ||
66 | if dep in self.basewhitelist: | ||
67 | continue | ||
68 | if dep in lookupcache: | ||
69 | var = lookupcache[dep] | ||
70 | else: | ||
71 | var = d.getVar(dep, False) | ||
72 | lookupcache[dep] = var | ||
73 | if var: | ||
74 | data = data + var | ||
75 | self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest() | ||
76 | #bb.note("Hash for %s is %s" % (task, tashhash[task])) | ||
77 | |||
78 | self.lookupcache[fn] = lookupcache | ||
79 | |||
80 | def finalise(self, fn, d, variant): | ||
81 | |||
82 | if variant: | ||
83 | fn = "virtual:" + variant + ":" + fn | ||
84 | |||
85 | self._build_data(fn, d) | ||
86 | |||
87 | #Slow but can be useful for debugging mismatched basehashes | ||
88 | #for task in self.taskdeps[fn]: | ||
89 | # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False) | ||
90 | |||
91 | for task in self.taskdeps[fn]: | ||
92 | d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task]) | ||
93 | |||
94 | def get_taskhash(self, fn, task, deps, dataCache): | ||
95 | k = fn + "." + task | ||
96 | data = dataCache.basetaskhash[k] | ||
97 | self.runtaskdeps[k] = deps | ||
98 | for dep in sorted(deps): | ||
99 | if self.twl and self.twl.search(dataCache.pkg_fn[fn]): | ||
100 | #bb.note("Skipping %s" % dep) | ||
101 | continue | ||
102 | if dep not in self.taskhash: | ||
103 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep) | ||
104 | data = data + self.taskhash[dep] | ||
105 | h = hashlib.md5(data).hexdigest() | ||
106 | self.taskhash[k] = h | ||
107 | #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) | ||
108 | return h | ||
109 | |||
110 | def dump_sigtask(self, fn, task, stampbase, runtime): | ||
111 | k = fn + "." + task | ||
112 | if runtime: | ||
113 | sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k] | ||
114 | else: | ||
115 | sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k] | ||
116 | data = {} | ||
117 | data['basewhitelist'] = self.basewhitelist | ||
118 | data['taskdeps'] = self.taskdeps[fn][task] | ||
119 | data['basehash'] = self.basehash[k] | ||
120 | data['gendeps'] = {} | ||
121 | data['varvals'] = {} | ||
122 | data['varvals'][task] = self.lookupcache[fn][task] | ||
123 | for dep in self.taskdeps[fn][task]: | ||
124 | data['gendeps'][dep] = self.gendeps[fn][dep] | ||
125 | data['varvals'][dep] = self.lookupcache[fn][dep] | ||
126 | |||
127 | if runtime: | ||
128 | data['runtaskdeps'] = self.runtaskdeps[k] | ||
129 | data['runtaskhashes'] = {} | ||
130 | for dep in data['runtaskdeps']: | ||
131 | data['runtaskhashes'][dep] = self.taskhash[dep] | ||
132 | |||
133 | p = pickle.Pickler(file(sigfile, "wb"), -1) | ||
134 | p.dump(data) | ||
135 | |||
136 | def dump_sigs(self, dataCache): | ||
137 | for fn in self.taskdeps: | ||
138 | for task in self.taskdeps[fn]: | ||
139 | k = fn + "." + task | ||
140 | if k not in self.taskhash: | ||
141 | continue | ||
142 | if dataCache.basetaskhash[k] != self.basehash[k]: | ||
143 | bb.error("Bitbake's cached basehash does not match the one we just generated!") | ||
144 | bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k])) | ||
145 | self.dump_sigtask(fn, task, dataCache.stamp[fn], True) | ||
146 | |||
147 | def compare_sigfiles(a, b): | ||
148 | p1 = pickle.Unpickler(file(a, "rb")) | ||
149 | a_data = p1.load() | ||
150 | p2 = pickle.Unpickler(file(b, "rb")) | ||
151 | b_data = p2.load() | ||
152 | |||
153 | #print "Checking" | ||
154 | #print str(a_data) | ||
155 | #print str(b_data) | ||
156 | |||
157 | def dict_diff(a, b): | ||
158 | sa = set(a.keys()) | ||
159 | sb = set(b.keys()) | ||
160 | common = sa & sb | ||
161 | changed = set() | ||
162 | for i in common: | ||
163 | if a[i] != b[i]: | ||
164 | changed.add(i) | ||
165 | added = sa - sb | ||
166 | removed = sb - sa | ||
167 | return changed, added, removed | ||
168 | |||
169 | if a_data['basewhitelist'] != b_data['basewhitelist']: | ||
170 | print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist']) | ||
171 | |||
172 | if a_data['taskwhitelist'] != b_data['taskwhitelist']: | ||
173 | print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist']) | ||
174 | |||
175 | |||
176 | if a_data['taskdeps'] != b_data['taskdeps']: | ||
177 | print "Task dependencies changed from %s to %s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])) | ||
178 | |||
179 | if a_data['basehash'] != b_data['basehash']: | ||
180 | print "basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']) | ||
181 | |||
182 | changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps']) | ||
183 | if changed: | ||
184 | for dep in changed: | ||
185 | print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]) | ||
186 | #if added: | ||
187 | # print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep]) | ||
188 | #if removed: | ||
189 | # print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep]) | ||
190 | |||
191 | |||
192 | changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) | ||
193 | if changed: | ||
194 | for dep in changed: | ||
195 | print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]) | ||
196 | #if added: | ||
197 | # print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep]) | ||
198 | #if removed: | ||
199 | # print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep]) | ||
200 | |||
201 | if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and a_data['runtaskdeps'] != b_data['runtaskdeps']: | ||
202 | print "Tasks this task depends on changed from %s to %s" % (a_data['taskdeps'], b_data['taskdeps']) | ||
203 | |||
204 | if 'runtaskhashes' in a_data: | ||
205 | for dep in a_data['runtaskhashes']: | ||
206 | if a_data['runtaskhashes'][dep] != b_data['runtaskhashes'][dep]: | ||
207 | print "Hash for dependent task %s changed from %s to %s" % (dep, a_data['runtaskhashes'][dep], b_data['runtaskhashes'][dep]) | ||
208 | |||
209 | |||
210 | |||
211 | |||
212 | |||
213 | |||