diff options
author | Richard Purdie <richard@openedhand.com> | 2006-11-16 15:02:15 +0000 |
---|---|---|
committer | Richard Purdie <richard@openedhand.com> | 2006-11-16 15:02:15 +0000 |
commit | 306b7c7a9757ead077363074e7bbac2e5c03e7c5 (patch) | |
tree | 6935017a9af749c46816881c86258f514384ba1c /bitbake/lib | |
parent | 65930a38e415ae4a0182e1cea1be838e0ada50ee (diff) | |
download | poky-306b7c7a9757ead077363074e7bbac2e5c03e7c5.tar.gz |
bitbake: Upgrade from 1.4 -> 1.7.4ish
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@863 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib')
33 files changed, 3343 insertions, 1186 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py new file mode 100644 index 0000000000..826d435f98 --- /dev/null +++ b/bitbake/lib/bb/COW.py | |||
@@ -0,0 +1,305 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | This is a copy on write dictionary and set which abuses classes to try and be nice and fast. | ||
5 | |||
6 | Please Note: | ||
7 | Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. | ||
8 | Assign a file to __warn__ to get warnings about slow operations. | ||
9 | """ | ||
10 | |||
11 | from inspect import getmro | ||
12 | |||
13 | import copy | ||
14 | import types, sets | ||
15 | types.ImmutableTypes = tuple([ \ | ||
16 | types.BooleanType, \ | ||
17 | types.ComplexType, \ | ||
18 | types.FloatType, \ | ||
19 | types.IntType, \ | ||
20 | types.LongType, \ | ||
21 | types.NoneType, \ | ||
22 | types.TupleType, \ | ||
23 | sets.ImmutableSet] + \ | ||
24 | list(types.StringTypes)) | ||
25 | |||
26 | MUTABLE = "__mutable__" | ||
27 | |||
28 | class COWMeta(type): | ||
29 | pass | ||
30 | |||
31 | class COWDictMeta(COWMeta): | ||
32 | __warn__ = False | ||
33 | __hasmutable__ = False | ||
34 | __marker__ = tuple() | ||
35 | |||
36 | def __str__(cls): | ||
37 | # FIXME: I have magic numbers! | ||
38 | return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) | ||
39 | __repr__ = __str__ | ||
40 | |||
41 | def cow(cls): | ||
42 | class C(cls): | ||
43 | __count__ = cls.__count__ + 1 | ||
44 | return C | ||
45 | copy = cow | ||
46 | __call__ = cow | ||
47 | |||
48 | def __setitem__(cls, key, value): | ||
49 | if not isinstance(value, types.ImmutableTypes): | ||
50 | if not isinstance(value, COWMeta): | ||
51 | cls.__hasmutable__ = True | ||
52 | key += MUTABLE | ||
53 | setattr(cls, key, value) | ||
54 | |||
55 | def __getmutable__(cls, key, readonly=False): | ||
56 | nkey = key + MUTABLE | ||
57 | try: | ||
58 | return cls.__dict__[nkey] | ||
59 | except KeyError: | ||
60 | pass | ||
61 | |||
62 | value = getattr(cls, nkey) | ||
63 | if readonly: | ||
64 | return value | ||
65 | |||
66 | if not cls.__warn__ is False and not isinstance(value, COWMeta): | ||
67 | print >> cls.__warn__, "Warning: Doing a copy because %s is a mutable type." % key | ||
68 | try: | ||
69 | value = value.copy() | ||
70 | except AttributeError, e: | ||
71 | value = copy.copy(value) | ||
72 | setattr(cls, nkey, value) | ||
73 | return value | ||
74 | |||
75 | __getmarker__ = [] | ||
76 | def __getreadonly__(cls, key, default=__getmarker__): | ||
77 | """\ | ||
78 | Get a value (even if mutable) which you promise not to change. | ||
79 | """ | ||
80 | return cls.__getitem__(key, default, True) | ||
81 | |||
82 | def __getitem__(cls, key, default=__getmarker__, readonly=False): | ||
83 | try: | ||
84 | try: | ||
85 | value = getattr(cls, key) | ||
86 | except AttributeError: | ||
87 | value = cls.__getmutable__(key, readonly) | ||
88 | |||
89 | # This is for values which have been deleted | ||
90 | if value is cls.__marker__: | ||
91 | raise AttributeError("key %s does not exist." % key) | ||
92 | |||
93 | return value | ||
94 | except AttributeError, e: | ||
95 | if not default is cls.__getmarker__: | ||
96 | return default | ||
97 | |||
98 | raise KeyError(str(e)) | ||
99 | |||
100 | def __delitem__(cls, key): | ||
101 | cls.__setitem__(key, cls.__marker__) | ||
102 | |||
103 | def __revertitem__(cls, key): | ||
104 | if not cls.__dict__.has_key(key): | ||
105 | key += MUTABLE | ||
106 | delattr(cls, key) | ||
107 | |||
108 | def has_key(cls, key): | ||
109 | value = cls.__getreadonly__(key, cls.__marker__) | ||
110 | if value is cls.__marker__: | ||
111 | return False | ||
112 | return True | ||
113 | |||
114 | def iter(cls, type, readonly=False): | ||
115 | for key in dir(cls): | ||
116 | if key.startswith("__"): | ||
117 | continue | ||
118 | |||
119 | if key.endswith(MUTABLE): | ||
120 | key = key[:-len(MUTABLE)] | ||
121 | |||
122 | if type == "keys": | ||
123 | yield key | ||
124 | |||
125 | try: | ||
126 | if readonly: | ||
127 | value = cls.__getreadonly__(key) | ||
128 | else: | ||
129 | value = cls[key] | ||
130 | except KeyError: | ||
131 | continue | ||
132 | |||
133 | if type == "values": | ||
134 | yield value | ||
135 | if type == "items": | ||
136 | yield (key, value) | ||
137 | raise StopIteration() | ||
138 | |||
139 | def iterkeys(cls): | ||
140 | return cls.iter("keys") | ||
141 | def itervalues(cls, readonly=False): | ||
142 | if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: | ||
143 | print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True." | ||
144 | return cls.iter("values", readonly) | ||
145 | def iteritems(cls, readonly=False): | ||
146 | if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: | ||
147 | print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True." | ||
148 | return cls.iter("items", readonly) | ||
149 | |||
150 | class COWSetMeta(COWDictMeta): | ||
151 | def __str__(cls): | ||
152 | # FIXME: I have magic numbers! | ||
153 | return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3) | ||
154 | __repr__ = __str__ | ||
155 | |||
156 | def cow(cls): | ||
157 | class C(cls): | ||
158 | __count__ = cls.__count__ + 1 | ||
159 | return C | ||
160 | |||
161 | def add(cls, value): | ||
162 | COWDictMeta.__setitem__(cls, repr(hash(value)), value) | ||
163 | |||
164 | def remove(cls, value): | ||
165 | COWDictMeta.__delitem__(cls, repr(hash(value))) | ||
166 | |||
167 | def __in__(cls, value): | ||
168 | return COWDictMeta.has_key(repr(hash(value))) | ||
169 | |||
170 | def iterkeys(cls): | ||
171 | raise TypeError("sets don't have keys") | ||
172 | |||
173 | def iteritems(cls): | ||
174 | raise TypeError("sets don't have 'items'") | ||
175 | |||
176 | # These are the actual classes you use! | ||
177 | class COWDictBase(object): | ||
178 | __metaclass__ = COWDictMeta | ||
179 | __count__ = 0 | ||
180 | |||
181 | class COWSetBase(object): | ||
182 | __metaclass__ = COWSetMeta | ||
183 | __count__ = 0 | ||
184 | |||
185 | if __name__ == "__main__": | ||
186 | import sys | ||
187 | COWDictBase.__warn__ = sys.stderr | ||
188 | a = COWDictBase() | ||
189 | print "a", a | ||
190 | |||
191 | a['a'] = 'a' | ||
192 | a['b'] = 'b' | ||
193 | a['dict'] = {} | ||
194 | |||
195 | b = a.copy() | ||
196 | print "b", b | ||
197 | b['c'] = 'b' | ||
198 | |||
199 | |||
200 | |||
201 | print "a", a | ||
202 | for x in a.iteritems(): | ||
203 | print x | ||
204 | print "--" | ||
205 | print "b", b | ||
206 | for x in b.iteritems(): | ||
207 | print x | ||
208 | |||
209 | |||
210 | b['dict']['a'] = 'b' | ||
211 | b['a'] = 'c' | ||
212 | |||
213 | print "a", a | ||
214 | for x in a.iteritems(): | ||
215 | print x | ||
216 | print "--" | ||
217 | print "b", b | ||
218 | for x in b.iteritems(): | ||
219 | print x | ||
220 | |||
221 | |||
222 | try: | ||
223 | b['dict2'] | ||
224 | except KeyError, e: | ||
225 | print "Okay!" | ||
226 | |||
227 | a['set'] = COWSetBase() | ||
228 | a['set'].add("o1") | ||
229 | a['set'].add("o1") | ||
230 | a['set'].add("o2") | ||
231 | |||
232 | print "a", a | ||
233 | for x in a['set'].itervalues(): | ||
234 | print x | ||
235 | print "--" | ||
236 | print "b", b | ||
237 | for x in b['set'].itervalues(): | ||
238 | print x | ||
239 | |||
240 | |||
241 | b['set'].add('o3') | ||
242 | |||
243 | print "a", a | ||
244 | for x in a['set'].itervalues(): | ||
245 | print x | ||
246 | print "--" | ||
247 | print "b", b | ||
248 | for x in b['set'].itervalues(): | ||
249 | print x | ||
250 | |||
251 | |||
252 | a['set2'] = set() | ||
253 | a['set2'].add("o1") | ||
254 | a['set2'].add("o1") | ||
255 | a['set2'].add("o2") | ||
256 | |||
257 | print "a", a | ||
258 | for x in a.iteritems(): | ||
259 | print x | ||
260 | print "--" | ||
261 | print "b", b | ||
262 | for x in b.iteritems(readonly=True): | ||
263 | print x | ||
264 | |||
265 | |||
266 | del b['b'] | ||
267 | try: | ||
268 | print b['b'] | ||
269 | except KeyError: | ||
270 | print "Yay! deleted key raises error" | ||
271 | |||
272 | if b.has_key('b'): | ||
273 | print "Boo!" | ||
274 | else: | ||
275 | print "Yay - has_key with delete works!" | ||
276 | |||
277 | print "a", a | ||
278 | for x in a.iteritems(): | ||
279 | print x | ||
280 | print "--" | ||
281 | print "b", b | ||
282 | for x in b.iteritems(readonly=True): | ||
283 | print x | ||
284 | |||
285 | |||
286 | b.__revertitem__('b') | ||
287 | |||
288 | print "a", a | ||
289 | for x in a.iteritems(): | ||
290 | print x | ||
291 | print "--" | ||
292 | print "b", b | ||
293 | for x in b.iteritems(readonly=True): | ||
294 | print x | ||
295 | |||
296 | |||
297 | b.__revertitem__('dict') | ||
298 | print "a", a | ||
299 | for x in a.iteritems(): | ||
300 | print x | ||
301 | print "--" | ||
302 | print "b", b | ||
303 | for x in b.iteritems(readonly=True): | ||
304 | print x | ||
305 | |||
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index c3e7a16658..61eb5f3db8 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -23,7 +23,7 @@ this program; if not, write to the Free Software Foundation, Inc., 59 Temple | |||
23 | Place, Suite 330, Boston, MA 02111-1307 USA. | 23 | Place, Suite 330, Boston, MA 02111-1307 USA. |
24 | """ | 24 | """ |
25 | 25 | ||
26 | __version__ = "1.4.3" | 26 | __version__ = "1.7.4" |
27 | 27 | ||
28 | __all__ = [ | 28 | __all__ = [ |
29 | 29 | ||
@@ -63,24 +63,24 @@ __all__ = [ | |||
63 | "manifest", | 63 | "manifest", |
64 | "methodpool", | 64 | "methodpool", |
65 | "cache", | 65 | "cache", |
66 | "runqueue", | ||
67 | "taskdata", | ||
68 | "providers", | ||
66 | ] | 69 | ] |
67 | 70 | ||
68 | whitespace = '\t\n\x0b\x0c\r ' | 71 | whitespace = '\t\n\x0b\x0c\r ' |
69 | lowercase = 'abcdefghijklmnopqrstuvwxyz' | 72 | lowercase = 'abcdefghijklmnopqrstuvwxyz' |
70 | 73 | ||
71 | import sys, os, types, re, string | 74 | import sys, os, types, re, string, bb |
75 | from bb import msg | ||
72 | 76 | ||
73 | #projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) | 77 | #projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) |
74 | projectdir = os.getcwd() | 78 | projectdir = os.getcwd() |
75 | 79 | ||
76 | debug_level = 0 | ||
77 | |||
78 | if "BBDEBUG" in os.environ: | 80 | if "BBDEBUG" in os.environ: |
79 | level = int(os.environ["BBDEBUG"]) | 81 | level = int(os.environ["BBDEBUG"]) |
80 | if level: | 82 | if level: |
81 | debug_level = level | 83 | bb.msg.set_debug_level(level) |
82 | else: | ||
83 | debug_level = 0 | ||
84 | 84 | ||
85 | class VarExpandError(Exception): | 85 | class VarExpandError(Exception): |
86 | pass | 86 | pass |
@@ -99,22 +99,17 @@ class MalformedUrl(Exception): | |||
99 | ####################################################################### | 99 | ####################################################################### |
100 | ####################################################################### | 100 | ####################################################################### |
101 | 101 | ||
102 | debug_prepend = '' | ||
103 | |||
104 | |||
105 | def debug(lvl, *args): | 102 | def debug(lvl, *args): |
106 | if debug_level >= lvl: | 103 | bb.msg.std_debug(lvl, ''.join(args)) |
107 | print debug_prepend + 'DEBUG:', ''.join(args) | ||
108 | 104 | ||
109 | def note(*args): | 105 | def note(*args): |
110 | print debug_prepend + 'NOTE:', ''.join(args) | 106 | bb.msg.std_note(''.join(args)) |
111 | 107 | ||
112 | def error(*args): | 108 | def error(*args): |
113 | print debug_prepend + 'ERROR:', ''.join(args) | 109 | bb.msg.std_error(''.join(args)) |
114 | 110 | ||
115 | def fatal(*args): | 111 | def fatal(*args): |
116 | print debug_prepend + 'ERROR:', ''.join(args) | 112 | bb.msg.std_fatal(''.join(args)) |
117 | sys.exit(1) | ||
118 | 113 | ||
119 | 114 | ||
120 | ####################################################################### | 115 | ####################################################################### |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 8e169e002a..942bdc1a39 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
@@ -25,18 +25,9 @@ You should have received a copy of the GNU General Public License along with | |||
25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | 25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig |
26 | """ | 26 | """ |
27 | 27 | ||
28 | from bb import debug, data, fetch, fatal, error, note, event, mkdirhier, utils | 28 | from bb import data, fetch, event, mkdirhier, utils |
29 | import bb, os | 29 | import bb, os |
30 | 30 | ||
31 | # data holds flags and function name for a given task | ||
32 | _task_data = data.init() | ||
33 | |||
34 | # graph represents task interdependencies | ||
35 | _task_graph = bb.digraph() | ||
36 | |||
37 | # stack represents execution order, excepting dependencies | ||
38 | _task_stack = [] | ||
39 | |||
40 | # events | 31 | # events |
41 | class FuncFailed(Exception): | 32 | class FuncFailed(Exception): |
42 | """Executed function failed""" | 33 | """Executed function failed""" |
@@ -76,13 +67,6 @@ class InvalidTask(TaskBase): | |||
76 | 67 | ||
77 | # functions | 68 | # functions |
78 | 69 | ||
79 | def init(data): | ||
80 | global _task_data, _task_graph, _task_stack | ||
81 | _task_data = data.init() | ||
82 | _task_graph = bb.digraph() | ||
83 | _task_stack = [] | ||
84 | |||
85 | |||
86 | def exec_func(func, d, dirs = None): | 70 | def exec_func(func, d, dirs = None): |
87 | """Execute an BB 'function'""" | 71 | """Execute an BB 'function'""" |
88 | 72 | ||
@@ -163,7 +147,7 @@ def exec_func_shell(func, d): | |||
163 | 147 | ||
164 | f = open(runfile, "w") | 148 | f = open(runfile, "w") |
165 | f.write("#!/bin/sh -e\n") | 149 | f.write("#!/bin/sh -e\n") |
166 | if bb.debug_level > 0: f.write("set -x\n") | 150 | if bb.msg.debug_level['default'] > 0: f.write("set -x\n") |
167 | data.emit_env(f, d) | 151 | data.emit_env(f, d) |
168 | 152 | ||
169 | f.write("cd %s\n" % os.getcwd()) | 153 | f.write("cd %s\n" % os.getcwd()) |
@@ -171,18 +155,18 @@ def exec_func_shell(func, d): | |||
171 | f.close() | 155 | f.close() |
172 | os.chmod(runfile, 0775) | 156 | os.chmod(runfile, 0775) |
173 | if not func: | 157 | if not func: |
174 | error("Function not specified") | 158 | bb.msg.error(bb.msg.domain.Build, "Function not specified") |
175 | raise FuncFailed() | 159 | raise FuncFailed() |
176 | 160 | ||
177 | # open logs | 161 | # open logs |
178 | si = file('/dev/null', 'r') | 162 | si = file('/dev/null', 'r') |
179 | try: | 163 | try: |
180 | if bb.debug_level > 0: | 164 | if bb.msg.debug_level['default'] > 0: |
181 | so = os.popen("tee \"%s\"" % logfile, "w") | 165 | so = os.popen("tee \"%s\"" % logfile, "w") |
182 | else: | 166 | else: |
183 | so = file(logfile, 'w') | 167 | so = file(logfile, 'w') |
184 | except OSError, e: | 168 | except OSError, e: |
185 | bb.error("opening log file: %s" % e) | 169 | bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e) |
186 | pass | 170 | pass |
187 | 171 | ||
188 | se = so | 172 | se = so |
@@ -205,7 +189,10 @@ def exec_func_shell(func, d): | |||
205 | else: | 189 | else: |
206 | maybe_fakeroot = '' | 190 | maybe_fakeroot = '' |
207 | ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile)) | 191 | ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile)) |
208 | os.chdir(prevdir) | 192 | try: |
193 | os.chdir(prevdir) | ||
194 | except: | ||
195 | pass | ||
209 | 196 | ||
210 | if not interact: | 197 | if not interact: |
211 | # restore the backups | 198 | # restore the backups |
@@ -224,14 +211,14 @@ def exec_func_shell(func, d): | |||
224 | os.close(ose[0]) | 211 | os.close(ose[0]) |
225 | 212 | ||
226 | if ret==0: | 213 | if ret==0: |
227 | if bb.debug_level > 0: | 214 | if bb.msg.debug_level['default'] > 0: |
228 | os.remove(runfile) | 215 | os.remove(runfile) |
229 | # os.remove(logfile) | 216 | # os.remove(logfile) |
230 | return | 217 | return |
231 | else: | 218 | else: |
232 | error("function %s failed" % func) | 219 | bb.msg.error(bb.msg.domain.Build, "function %s failed" % func) |
233 | if data.getVar("BBINCLUDELOGS", d): | 220 | if data.getVar("BBINCLUDELOGS", d): |
234 | error("log data follows (%s)" % logfile) | 221 | bb.msg.error(bb.msg.domain.Build, "log data follows (%s)" % logfile) |
235 | f = open(logfile, "r") | 222 | f = open(logfile, "r") |
236 | while True: | 223 | while True: |
237 | l = f.readline() | 224 | l = f.readline() |
@@ -241,7 +228,7 @@ def exec_func_shell(func, d): | |||
241 | print '| %s' % l | 228 | print '| %s' % l |
242 | f.close() | 229 | f.close() |
243 | else: | 230 | else: |
244 | error("see log in %s" % logfile) | 231 | bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile) |
245 | raise FuncFailed( logfile ) | 232 | raise FuncFailed( logfile ) |
246 | 233 | ||
247 | 234 | ||
@@ -281,7 +268,7 @@ def exec_task(task, d): | |||
281 | return 1 | 268 | return 1 |
282 | 269 | ||
283 | try: | 270 | try: |
284 | debug(1, "Executing task %s" % item) | 271 | bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item) |
285 | old_overrides = data.getVar('OVERRIDES', d, 0) | 272 | old_overrides = data.getVar('OVERRIDES', d, 0) |
286 | localdata = data.createCopy(d) | 273 | localdata = data.createCopy(d) |
287 | data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) | 274 | data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) |
@@ -292,21 +279,63 @@ def exec_task(task, d): | |||
292 | task_cache.append(item) | 279 | task_cache.append(item) |
293 | data.setVar('_task_cache', task_cache, d) | 280 | data.setVar('_task_cache', task_cache, d) |
294 | except FuncFailed, reason: | 281 | except FuncFailed, reason: |
295 | note( "Task failed: %s" % reason ) | 282 | bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason ) |
296 | failedevent = TaskFailed(item, d) | 283 | failedevent = TaskFailed(item, d) |
297 | event.fire(failedevent) | 284 | event.fire(failedevent) |
298 | raise EventException("Function failed in task: %s" % reason, failedevent) | 285 | raise EventException("Function failed in task: %s" % reason, failedevent) |
299 | 286 | ||
300 | # execute | 287 | if data.getVarFlag(task, 'dontrundeps', d): |
301 | task_graph.walkdown(task, execute) | 288 | execute(None, task) |
289 | else: | ||
290 | task_graph.walkdown(task, execute) | ||
302 | 291 | ||
303 | # make stamp, or cause event and raise exception | 292 | # make stamp, or cause event and raise exception |
304 | if not data.getVarFlag(task, 'nostamp', d): | 293 | if not data.getVarFlag(task, 'nostamp', d): |
305 | mkstamp(task, d) | 294 | mkstamp(task, d) |
306 | 295 | ||
296 | def stamp_is_current_cache(dataCache, file_name, task, checkdeps = 1): | ||
297 | """ | ||
298 | Check status of a given task's stamp. | ||
299 | Returns 0 if it is not current and needs updating. | ||
300 | Same as stamp_is_current but works against the dataCache instead of d | ||
301 | """ | ||
302 | task_graph = dataCache.task_queues[file_name] | ||
303 | |||
304 | if not dataCache.stamp[file_name]: | ||
305 | return 0 | ||
306 | |||
307 | stampfile = "%s.%s" % (dataCache.stamp[file_name], task) | ||
308 | if not os.access(stampfile, os.F_OK): | ||
309 | return 0 | ||
310 | |||
311 | if checkdeps == 0: | ||
312 | return 1 | ||
313 | |||
314 | import stat | ||
315 | tasktime = os.stat(stampfile)[stat.ST_MTIME] | ||
316 | |||
317 | _deps = [] | ||
318 | def checkStamp(graph, task): | ||
319 | # check for existance | ||
320 | if 'nostamp' in dataCache.task_deps[file_name] and task in dataCache.task_deps[file_name]['nostamp']: | ||
321 | return 1 | ||
322 | |||
323 | if not stamp_is_current_cache(dataCache, file_name, task, 0): | ||
324 | return 0 | ||
325 | |||
326 | depfile = "%s.%s" % (dataCache.stamp[file_name], task) | ||
327 | deptime = os.stat(depfile)[stat.ST_MTIME] | ||
328 | if deptime > tasktime: | ||
329 | return 0 | ||
330 | return 1 | ||
331 | |||
332 | return task_graph.walkdown(task, checkStamp) | ||
307 | 333 | ||
308 | def stamp_is_current(task, d, checkdeps = 1): | 334 | def stamp_is_current(task, d, checkdeps = 1): |
309 | """Check status of a given task's stamp. returns 0 if it is not current and needs updating.""" | 335 | """ |
336 | Check status of a given task's stamp. | ||
337 | Returns 0 if it is not current and needs updating. | ||
338 | """ | ||
310 | task_graph = data.getVar('_task_graph', d) | 339 | task_graph = data.getVar('_task_graph', d) |
311 | if not task_graph: | 340 | if not task_graph: |
312 | task_graph = bb.digraph() | 341 | task_graph = bb.digraph() |
@@ -360,7 +389,6 @@ def mkstamp(task, d): | |||
360 | f = open(stamp, "w") | 389 | f = open(stamp, "w") |
361 | f.close() | 390 | f.close() |
362 | 391 | ||
363 | |||
364 | def add_task(task, deps, d): | 392 | def add_task(task, deps, d): |
365 | task_graph = data.getVar('_task_graph', d) | 393 | task_graph = data.getVar('_task_graph', d) |
366 | if not task_graph: | 394 | if not task_graph: |
@@ -374,6 +402,21 @@ def add_task(task, deps, d): | |||
374 | # don't assume holding a reference | 402 | # don't assume holding a reference |
375 | data.setVar('_task_graph', task_graph, d) | 403 | data.setVar('_task_graph', task_graph, d) |
376 | 404 | ||
405 | task_deps = data.getVar('_task_deps', d) | ||
406 | if not task_deps: | ||
407 | task_deps = {} | ||
408 | def getTask(name): | ||
409 | deptask = data.getVarFlag(task, name, d) | ||
410 | if deptask: | ||
411 | if not name in task_deps: | ||
412 | task_deps[name] = {} | ||
413 | task_deps[name][task] = deptask | ||
414 | getTask('deptask') | ||
415 | getTask('rdeptask') | ||
416 | getTask('recrdeptask') | ||
417 | getTask('nostamp') | ||
418 | |||
419 | data.setVar('_task_deps', task_deps, d) | ||
377 | 420 | ||
378 | def remove_task(task, kill, d): | 421 | def remove_task(task, kill, d): |
379 | """Remove an BB 'task'. | 422 | """Remove an BB 'task'. |
@@ -399,6 +442,3 @@ def task_exists(task, d): | |||
399 | task_graph = bb.digraph() | 442 | task_graph = bb.digraph() |
400 | data.setVar('_task_graph', task_graph, d) | 443 | data.setVar('_task_graph', task_graph, d) |
401 | return task_graph.hasnode(task) | 444 | return task_graph.hasnode(task) |
402 | |||
403 | def get_task_data(): | ||
404 | return _task_data | ||
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 921a9f7589..05c42518a7 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -33,15 +33,15 @@ Place, Suite 330, Boston, MA 02111-1307 USA. | |||
33 | import os, re | 33 | import os, re |
34 | import bb.data | 34 | import bb.data |
35 | import bb.utils | 35 | import bb.utils |
36 | from sets import Set | ||
36 | 37 | ||
37 | try: | 38 | try: |
38 | import cPickle as pickle | 39 | import cPickle as pickle |
39 | except ImportError: | 40 | except ImportError: |
40 | import pickle | 41 | import pickle |
41 | print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." | 42 | bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") |
42 | 43 | ||
43 | # __cache_version__ = "123" | 44 | __cache_version__ = "125" |
44 | __cache_version__ = "124" # changes the __depends structure | ||
45 | 45 | ||
46 | class Cache: | 46 | class Cache: |
47 | """ | 47 | """ |
@@ -58,14 +58,12 @@ class Cache: | |||
58 | 58 | ||
59 | if self.cachedir in [None, '']: | 59 | if self.cachedir in [None, '']: |
60 | self.has_cache = False | 60 | self.has_cache = False |
61 | if cooker.cb is not None: | 61 | bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.") |
62 | print "NOTE: Not using a cache. Set CACHE = <directory> to enable." | ||
63 | else: | 62 | else: |
64 | self.has_cache = True | 63 | self.has_cache = True |
65 | self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") | 64 | self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") |
66 | 65 | ||
67 | if cooker.cb is not None: | 66 | bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) |
68 | print "NOTE: Using cache in '%s'" % self.cachedir | ||
69 | try: | 67 | try: |
70 | os.stat( self.cachedir ) | 68 | os.stat( self.cachedir ) |
71 | except OSError: | 69 | except OSError: |
@@ -80,7 +78,7 @@ class Cache: | |||
80 | if version_data['BITBAKE_VER'] != bb.__version__: | 78 | if version_data['BITBAKE_VER'] != bb.__version__: |
81 | raise ValueError, 'Bitbake Version Mismatch' | 79 | raise ValueError, 'Bitbake Version Mismatch' |
82 | except (ValueError, KeyError): | 80 | except (ValueError, KeyError): |
83 | bb.note("Invalid cache found, rebuilding...") | 81 | bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") |
84 | self.depends_cache = {} | 82 | self.depends_cache = {} |
85 | 83 | ||
86 | if self.depends_cache: | 84 | if self.depends_cache: |
@@ -108,7 +106,7 @@ class Cache: | |||
108 | if fn != self.data_fn: | 106 | if fn != self.data_fn: |
109 | # We're trying to access data in the cache which doesn't exist | 107 | # We're trying to access data in the cache which doesn't exist |
110 | # yet setData hasn't been called to setup the right access. Very bad. | 108 | # yet setData hasn't been called to setup the right access. Very bad. |
111 | bb.error("Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) | 109 | bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) |
112 | 110 | ||
113 | result = bb.data.getVar(var, self.data, exp) | 111 | result = bb.data.getVar(var, self.data, exp) |
114 | self.depends_cache[fn][var] = result | 112 | self.depends_cache[fn][var] = result |
@@ -127,15 +125,15 @@ class Cache: | |||
127 | self.getVar("__depends", fn, True) | 125 | self.getVar("__depends", fn, True) |
128 | self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) | 126 | self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) |
129 | 127 | ||
130 | def loadDataFull(self, fn, cooker): | 128 | def loadDataFull(self, fn, cfgData): |
131 | """ | 129 | """ |
132 | Return a complete set of data for fn. | 130 | Return a complete set of data for fn. |
133 | To do this, we need to parse the file. | 131 | To do this, we need to parse the file. |
134 | """ | 132 | """ |
135 | bb_data, skipped = self.load_bbfile(fn, cooker) | 133 | bb_data, skipped = self.load_bbfile(fn, cfgData) |
136 | return bb_data | 134 | return bb_data |
137 | 135 | ||
138 | def loadData(self, fn, cooker): | 136 | def loadData(self, fn, cfgData): |
139 | """ | 137 | """ |
140 | Load a subset of data for fn. | 138 | Load a subset of data for fn. |
141 | If the cached data is valid we do nothing, | 139 | If the cached data is valid we do nothing, |
@@ -148,7 +146,7 @@ class Cache: | |||
148 | return True, True | 146 | return True, True |
149 | return True, False | 147 | return True, False |
150 | 148 | ||
151 | bb_data, skipped = self.load_bbfile(fn, cooker) | 149 | bb_data, skipped = self.load_bbfile(fn, cfgData) |
152 | self.setData(fn, bb_data) | 150 | self.setData(fn, bb_data) |
153 | return False, skipped | 151 | return False, skipped |
154 | 152 | ||
@@ -175,32 +173,36 @@ class Cache: | |||
175 | 173 | ||
176 | # Check file still exists | 174 | # Check file still exists |
177 | if self.mtime(fn) == 0: | 175 | if self.mtime(fn) == 0: |
178 | bb.debug(2, "Cache: %s not longer exists" % fn) | 176 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) |
179 | self.remove(fn) | 177 | self.remove(fn) |
180 | return False | 178 | return False |
181 | 179 | ||
182 | # File isn't in depends_cache | 180 | # File isn't in depends_cache |
183 | if not fn in self.depends_cache: | 181 | if not fn in self.depends_cache: |
184 | bb.debug(2, "Cache: %s is not cached" % fn) | 182 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn) |
185 | self.remove(fn) | 183 | self.remove(fn) |
186 | return False | 184 | return False |
187 | 185 | ||
188 | # Check the file's timestamp | 186 | # Check the file's timestamp |
189 | if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): | 187 | if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): |
190 | bb.debug(2, "Cache: %s changed" % fn) | 188 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) |
191 | self.remove(fn) | 189 | self.remove(fn) |
192 | return False | 190 | return False |
193 | 191 | ||
194 | # Check dependencies are still valid | 192 | # Check dependencies are still valid |
195 | depends = self.getVar("__depends", fn, True) | 193 | depends = self.getVar("__depends", fn, True) |
196 | for f,old_mtime in depends: | 194 | for f,old_mtime in depends: |
195 | # Check if file still exists | ||
196 | if self.mtime(f) == 0: | ||
197 | return False | ||
198 | |||
197 | new_mtime = bb.parse.cached_mtime(f) | 199 | new_mtime = bb.parse.cached_mtime(f) |
198 | if (new_mtime > old_mtime): | 200 | if (new_mtime > old_mtime): |
199 | bb.debug(2, "Cache: %s's dependency %s changed" % (fn, f)) | 201 | bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) |
200 | self.remove(fn) | 202 | self.remove(fn) |
201 | return False | 203 | return False |
202 | 204 | ||
203 | bb.debug(2, "Depends Cache: %s is clean" % fn) | 205 | bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) |
204 | if not fn in self.clean: | 206 | if not fn in self.clean: |
205 | self.clean[fn] = "" | 207 | self.clean[fn] = "" |
206 | 208 | ||
@@ -220,7 +222,7 @@ class Cache: | |||
220 | Remove a fn from the cache | 222 | Remove a fn from the cache |
221 | Called from the parser in error cases | 223 | Called from the parser in error cases |
222 | """ | 224 | """ |
223 | bb.debug(1, "Removing %s from cache" % fn) | 225 | bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn) |
224 | if fn in self.depends_cache: | 226 | if fn in self.depends_cache: |
225 | del self.depends_cache[fn] | 227 | del self.depends_cache[fn] |
226 | if fn in self.clean: | 228 | if fn in self.clean: |
@@ -229,7 +231,7 @@ class Cache: | |||
229 | def sync(self): | 231 | def sync(self): |
230 | """ | 232 | """ |
231 | Save the cache | 233 | Save the cache |
232 | Called from the parser when complete (or exitting) | 234 | Called from the parser when complete (or exiting) |
233 | """ | 235 | """ |
234 | 236 | ||
235 | if not self.has_cache: | 237 | if not self.has_cache: |
@@ -243,12 +245,103 @@ class Cache: | |||
243 | p.dump([self.depends_cache, version_data]) | 245 | p.dump([self.depends_cache, version_data]) |
244 | 246 | ||
245 | def mtime(self, cachefile): | 247 | def mtime(self, cachefile): |
246 | try: | 248 | return bb.parse.cached_mtime_noerror(cachefile) |
247 | return os.stat(cachefile)[8] | ||
248 | except OSError: | ||
249 | return 0 | ||
250 | 249 | ||
251 | def load_bbfile( self, bbfile , cooker): | 250 | def handle_data(self, file_name, cacheData): |
251 | """ | ||
252 | Save data we need into the cache | ||
253 | """ | ||
254 | |||
255 | pn = self.getVar('PN', file_name, True) | ||
256 | pv = self.getVar('PV', file_name, True) | ||
257 | pr = self.getVar('PR', file_name, True) | ||
258 | dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") | ||
259 | provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split()) | ||
260 | depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") | ||
261 | packages = (self.getVar('PACKAGES', file_name, True) or "").split() | ||
262 | packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() | ||
263 | rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() | ||
264 | |||
265 | cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True) | ||
266 | cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) | ||
267 | |||
268 | # build PackageName to FileName lookup table | ||
269 | if pn not in cacheData.pkg_pn: | ||
270 | cacheData.pkg_pn[pn] = [] | ||
271 | cacheData.pkg_pn[pn].append(file_name) | ||
272 | |||
273 | cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) | ||
274 | |||
275 | # build FileName to PackageName lookup table | ||
276 | cacheData.pkg_fn[file_name] = pn | ||
277 | cacheData.pkg_pvpr[file_name] = (pv,pr) | ||
278 | cacheData.pkg_dp[file_name] = dp | ||
279 | |||
280 | # Build forward and reverse provider hashes | ||
281 | # Forward: virtual -> [filenames] | ||
282 | # Reverse: PN -> [virtuals] | ||
283 | if pn not in cacheData.pn_provides: | ||
284 | cacheData.pn_provides[pn] = Set() | ||
285 | cacheData.pn_provides[pn] |= provides | ||
286 | |||
287 | for provide in provides: | ||
288 | if provide not in cacheData.providers: | ||
289 | cacheData.providers[provide] = [] | ||
290 | cacheData.providers[provide].append(file_name) | ||
291 | |||
292 | cacheData.deps[file_name] = Set() | ||
293 | for dep in depends: | ||
294 | cacheData.all_depends.add(dep) | ||
295 | cacheData.deps[file_name].add(dep) | ||
296 | |||
297 | # Build reverse hash for PACKAGES, so runtime dependencies | ||
298 | # can be be resolved (RDEPENDS, RRECOMMENDS etc.) | ||
299 | for package in packages: | ||
300 | if not package in cacheData.packages: | ||
301 | cacheData.packages[package] = [] | ||
302 | cacheData.packages[package].append(file_name) | ||
303 | rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() | ||
304 | |||
305 | for package in packages_dynamic: | ||
306 | if not package in cacheData.packages_dynamic: | ||
307 | cacheData.packages_dynamic[package] = [] | ||
308 | cacheData.packages_dynamic[package].append(file_name) | ||
309 | |||
310 | for rprovide in rprovides: | ||
311 | if not rprovide in cacheData.rproviders: | ||
312 | cacheData.rproviders[rprovide] = [] | ||
313 | cacheData.rproviders[rprovide].append(file_name) | ||
314 | |||
315 | # Build hash of runtime depends and rececommends | ||
316 | |||
317 | def add_dep(deplist, deps): | ||
318 | for dep in deps: | ||
319 | if not dep in deplist: | ||
320 | deplist[dep] = "" | ||
321 | |||
322 | if not file_name in cacheData.rundeps: | ||
323 | cacheData.rundeps[file_name] = {} | ||
324 | if not file_name in cacheData.runrecs: | ||
325 | cacheData.runrecs[file_name] = {} | ||
326 | |||
327 | for package in packages + [pn]: | ||
328 | if not package in cacheData.rundeps[file_name]: | ||
329 | cacheData.rundeps[file_name][package] = {} | ||
330 | if not package in cacheData.runrecs[file_name]: | ||
331 | cacheData.runrecs[file_name][package] = {} | ||
332 | |||
333 | add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")) | ||
334 | add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")) | ||
335 | add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or "")) | ||
336 | add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) | ||
337 | |||
338 | # Collect files we may need for possible world-dep | ||
339 | # calculations | ||
340 | if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): | ||
341 | cacheData.possible_world.append(file_name) | ||
342 | |||
343 | |||
344 | def load_bbfile( self, bbfile , config): | ||
252 | """ | 345 | """ |
253 | Load and parse one .bb build file | 346 | Load and parse one .bb build file |
254 | Return the data and whether parsing resulted in the file being skipped | 347 | Return the data and whether parsing resulted in the file being skipped |
@@ -257,25 +350,15 @@ class Cache: | |||
257 | import bb | 350 | import bb |
258 | from bb import utils, data, parse, debug, event, fatal | 351 | from bb import utils, data, parse, debug, event, fatal |
259 | 352 | ||
260 | topdir = data.getVar('TOPDIR', cooker.configuration.data) | ||
261 | if not topdir: | ||
262 | topdir = os.path.abspath(os.getcwd()) | ||
263 | # set topdir to here | ||
264 | data.setVar('TOPDIR', topdir, cooker.configuration) | ||
265 | bbfile = os.path.abspath(bbfile) | ||
266 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
267 | # expand tmpdir to include this topdir | 353 | # expand tmpdir to include this topdir |
268 | data.setVar('TMPDIR', data.getVar('TMPDIR', cooker.configuration.data, 1) or "", cooker.configuration.data) | 354 | data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) |
269 | # set topdir to location of .bb file | 355 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) |
270 | topdir = bbfile_loc | ||
271 | #data.setVar('TOPDIR', topdir, cfg) | ||
272 | # go there | ||
273 | oldpath = os.path.abspath(os.getcwd()) | 356 | oldpath = os.path.abspath(os.getcwd()) |
274 | if self.mtime(topdir): | 357 | if self.mtime(bbfile_loc): |
275 | os.chdir(topdir) | 358 | os.chdir(bbfile_loc) |
276 | bb_data = data.init_db(cooker.configuration.data) | 359 | bb_data = data.init_db(config) |
277 | try: | 360 | try: |
278 | parse.handle(bbfile, bb_data) # read .bb data | 361 | bb_data = parse.handle(bbfile, bb_data) # read .bb data |
279 | os.chdir(oldpath) | 362 | os.chdir(oldpath) |
280 | return bb_data, False | 363 | return bb_data, False |
281 | except bb.parse.SkipPackage: | 364 | except bb.parse.SkipPackage: |
@@ -304,3 +387,45 @@ def init(cooker): | |||
304 | """ | 387 | """ |
305 | return Cache(cooker) | 388 | return Cache(cooker) |
306 | 389 | ||
390 | |||
391 | |||
392 | #============================================================================# | ||
393 | # CacheData | ||
394 | #============================================================================# | ||
395 | class CacheData: | ||
396 | """ | ||
397 | The data structures we compile from the cached data | ||
398 | """ | ||
399 | |||
400 | def __init__(self): | ||
401 | """ | ||
402 | Direct cache variables | ||
403 | (from Cache.handle_data) | ||
404 | """ | ||
405 | self.providers = {} | ||
406 | self.rproviders = {} | ||
407 | self.packages = {} | ||
408 | self.packages_dynamic = {} | ||
409 | self.possible_world = [] | ||
410 | self.pkg_pn = {} | ||
411 | self.pkg_fn = {} | ||
412 | self.pkg_pvpr = {} | ||
413 | self.pkg_dp = {} | ||
414 | self.pn_provides = {} | ||
415 | self.all_depends = Set() | ||
416 | self.deps = {} | ||
417 | self.rundeps = {} | ||
418 | self.runrecs = {} | ||
419 | self.task_queues = {} | ||
420 | self.task_deps = {} | ||
421 | self.stamp = {} | ||
422 | self.preferred = {} | ||
423 | |||
424 | """ | ||
425 | Indirect Cache variables | ||
426 | (set elsewhere) | ||
427 | """ | ||
428 | self.ignored_dependencies = [] | ||
429 | self.world_target = Set() | ||
430 | self.bbfile_priority = {} | ||
431 | self.bbfile_config_priorities = [] | ||
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 55d1cc9053..819dff9679 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
@@ -45,7 +45,8 @@ else: | |||
45 | path = os.path.dirname(os.path.dirname(sys.argv[0])) | 45 | path = os.path.dirname(os.path.dirname(sys.argv[0])) |
46 | sys.path.insert(0,path) | 46 | sys.path.insert(0,path) |
47 | 47 | ||
48 | from bb import note, debug, data_smart | 48 | from bb import data_smart |
49 | import bb | ||
49 | 50 | ||
50 | _dict_type = data_smart.DataSmart | 51 | _dict_type = data_smart.DataSmart |
51 | 52 | ||
@@ -362,10 +363,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): | |||
362 | val.rstrip() | 363 | val.rstrip() |
363 | if not val: | 364 | if not val: |
364 | return 0 | 365 | return 0 |
366 | |||
367 | varExpanded = expand(var, d) | ||
365 | 368 | ||
366 | if getVarFlag(var, "func", d): | 369 | if getVarFlag(var, "func", d): |
367 | # NOTE: should probably check for unbalanced {} within the var | 370 | # NOTE: should probably check for unbalanced {} within the var |
368 | o.write("%s() {\n%s\n}\n" % (var, val)) | 371 | o.write("%s() {\n%s\n}\n" % (varExpanded, val)) |
369 | else: | 372 | else: |
370 | if getVarFlag(var, "export", d): | 373 | if getVarFlag(var, "export", d): |
371 | o.write('export ') | 374 | o.write('export ') |
@@ -375,7 +378,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): | |||
375 | # if we're going to output this within doublequotes, | 378 | # if we're going to output this within doublequotes, |
376 | # to a shell, we need to escape the quotes in the var | 379 | # to a shell, we need to escape the quotes in the var |
377 | alter = re.sub('"', '\\"', val.strip()) | 380 | alter = re.sub('"', '\\"', val.strip()) |
378 | o.write('%s="%s"\n' % (var, alter)) | 381 | o.write('%s="%s"\n' % (varExpanded, alter)) |
379 | return 1 | 382 | return 1 |
380 | 383 | ||
381 | 384 | ||
@@ -430,8 +433,38 @@ def update_data(d): | |||
430 | >>> update_data(d) | 433 | >>> update_data(d) |
431 | >>> print getVar('TEST', d) | 434 | >>> print getVar('TEST', d) |
432 | local | 435 | local |
436 | |||
437 | CopyMonster: | ||
438 | >>> e = d.createCopy() | ||
439 | >>> setVar('TEST_foo', 'foo', e) | ||
440 | >>> update_data(e) | ||
441 | >>> print getVar('TEST', e) | ||
442 | local | ||
443 | |||
444 | >>> setVar('OVERRIDES', 'arm:ramses:local:foo', e) | ||
445 | >>> update_data(e) | ||
446 | >>> print getVar('TEST', e) | ||
447 | foo | ||
448 | |||
449 | >>> f = d.createCopy() | ||
450 | >>> setVar('TEST_moo', 'something', f) | ||
451 | >>> setVar('OVERRIDES', 'moo:arm:ramses:local:foo', e) | ||
452 | >>> update_data(e) | ||
453 | >>> print getVar('TEST', e) | ||
454 | foo | ||
455 | |||
456 | |||
457 | >>> h = init() | ||
458 | >>> setVar('SRC_URI', 'file://append.foo;patch=1 ', h) | ||
459 | >>> g = h.createCopy() | ||
460 | >>> setVar('SRC_URI_append_arm', 'file://other.foo;patch=1', g) | ||
461 | >>> setVar('OVERRIDES', 'arm:moo', g) | ||
462 | >>> update_data(g) | ||
463 | >>> print getVar('SRC_URI', g) | ||
464 | file://append.foo;patch=1 file://other.foo;patch=1 | ||
465 | |||
433 | """ | 466 | """ |
434 | debug(2, "update_data()") | 467 | bb.msg.debug(2, bb.msg.domain.Data, "update_data()") |
435 | 468 | ||
436 | # now ask the cookie monster for help | 469 | # now ask the cookie monster for help |
437 | #print "Cookie Monster" | 470 | #print "Cookie Monster" |
@@ -460,7 +493,7 @@ def update_data(d): | |||
460 | l = len(o)+1 | 493 | l = len(o)+1 |
461 | 494 | ||
462 | # see if one should even try | 495 | # see if one should even try |
463 | if not o in d._seen_overrides: | 496 | if not d._seen_overrides.has_key(o): |
464 | continue | 497 | continue |
465 | 498 | ||
466 | vars = d._seen_overrides[o] | 499 | vars = d._seen_overrides[o] |
@@ -469,10 +502,10 @@ def update_data(d): | |||
469 | try: | 502 | try: |
470 | d[name] = d[var] | 503 | d[name] = d[var] |
471 | except: | 504 | except: |
472 | note ("Untracked delVar") | 505 | bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar") |
473 | 506 | ||
474 | # now on to the appends and prepends | 507 | # now on to the appends and prepends |
475 | if '_append' in d._special_values: | 508 | if d._special_values.has_key('_append'): |
476 | appends = d._special_values['_append'] or [] | 509 | appends = d._special_values['_append'] or [] |
477 | for append in appends: | 510 | for append in appends: |
478 | for (a, o) in getVarFlag(append, '_append', d) or []: | 511 | for (a, o) in getVarFlag(append, '_append', d) or []: |
@@ -487,7 +520,7 @@ def update_data(d): | |||
487 | setVar(append, sval, d) | 520 | setVar(append, sval, d) |
488 | 521 | ||
489 | 522 | ||
490 | if '_prepend' in d._special_values: | 523 | if d._special_values.has_key('_prepend'): |
491 | prepends = d._special_values['_prepend'] or [] | 524 | prepends = d._special_values['_prepend'] or [] |
492 | 525 | ||
493 | for prepend in prepends: | 526 | for prepend in prepends: |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index fbd4167fe4..054b852200 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
@@ -29,14 +29,12 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig | |||
29 | """ | 29 | """ |
30 | 30 | ||
31 | import copy, os, re, sys, time, types | 31 | import copy, os, re, sys, time, types |
32 | from bb import note, debug, error, fatal, utils, methodpool | 32 | import bb |
33 | from bb import utils, methodpool | ||
34 | from COW import COWDictBase | ||
33 | from sets import Set | 35 | from sets import Set |
36 | from new import classobj | ||
34 | 37 | ||
35 | try: | ||
36 | import cPickle as pickle | ||
37 | except ImportError: | ||
38 | import pickle | ||
39 | print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." | ||
40 | 38 | ||
41 | __setvar_keyword__ = ["_append","_prepend"] | 39 | __setvar_keyword__ = ["_append","_prepend"] |
42 | __setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?') | 40 | __setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?') |
@@ -45,12 +43,14 @@ __expand_python_regexp__ = re.compile(r"\${@.+?}") | |||
45 | 43 | ||
46 | 44 | ||
47 | class DataSmart: | 45 | class DataSmart: |
48 | def __init__(self): | 46 | def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ): |
49 | self.dict = {} | 47 | self.dict = {} |
50 | 48 | ||
51 | # cookie monster tribute | 49 | # cookie monster tribute |
52 | self._special_values = {} | 50 | self._special_values = special |
53 | self._seen_overrides = {} | 51 | self._seen_overrides = seen |
52 | |||
53 | self.expand_cache = {} | ||
54 | 54 | ||
55 | def expand(self,s, varname): | 55 | def expand(self,s, varname): |
56 | def var_sub(match): | 56 | def var_sub(match): |
@@ -75,6 +75,9 @@ class DataSmart: | |||
75 | if type(s) is not types.StringType: # sanity check | 75 | if type(s) is not types.StringType: # sanity check |
76 | return s | 76 | return s |
77 | 77 | ||
78 | if varname and varname in self.expand_cache: | ||
79 | return self.expand_cache[varname] | ||
80 | |||
78 | while s.find('$') != -1: | 81 | while s.find('$') != -1: |
79 | olds = s | 82 | olds = s |
80 | try: | 83 | try: |
@@ -82,15 +85,20 @@ class DataSmart: | |||
82 | s = __expand_python_regexp__.sub(python_sub, s) | 85 | s = __expand_python_regexp__.sub(python_sub, s) |
83 | if s == olds: break | 86 | if s == olds: break |
84 | if type(s) is not types.StringType: # sanity check | 87 | if type(s) is not types.StringType: # sanity check |
85 | error('expansion of %s returned non-string %s' % (olds, s)) | 88 | bb.msg.error(bb.msg.domain.Data, 'expansion of %s returned non-string %s' % (olds, s)) |
86 | except KeyboardInterrupt: | 89 | except KeyboardInterrupt: |
87 | raise | 90 | raise |
88 | except: | 91 | except: |
89 | note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) | 92 | bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) |
90 | raise | 93 | raise |
94 | |||
95 | if varname: | ||
96 | self.expand_cache[varname] = s | ||
97 | |||
91 | return s | 98 | return s |
92 | 99 | ||
93 | def initVar(self, var): | 100 | def initVar(self, var): |
101 | self.expand_cache = {} | ||
94 | if not var in self.dict: | 102 | if not var in self.dict: |
95 | self.dict[var] = {} | 103 | self.dict[var] = {} |
96 | 104 | ||
@@ -119,6 +127,7 @@ class DataSmart: | |||
119 | self.initVar(var) | 127 | self.initVar(var) |
120 | 128 | ||
121 | def setVar(self,var,value): | 129 | def setVar(self,var,value): |
130 | self.expand_cache = {} | ||
122 | match = __setvar_regexp__.match(var) | 131 | match = __setvar_regexp__.match(var) |
123 | if match and match.group("keyword") in __setvar_keyword__: | 132 | if match and match.group("keyword") in __setvar_keyword__: |
124 | base = match.group('base') | 133 | base = match.group('base') |
@@ -128,6 +137,7 @@ class DataSmart: | |||
128 | l.append([value, override]) | 137 | l.append([value, override]) |
129 | self.setVarFlag(base, keyword, l) | 138 | self.setVarFlag(base, keyword, l) |
130 | 139 | ||
140 | # todo make sure keyword is not __doc__ or __module__ | ||
131 | # pay the cookie monster | 141 | # pay the cookie monster |
132 | try: | 142 | try: |
133 | self._special_values[keyword].add( base ) | 143 | self._special_values[keyword].add( base ) |
@@ -135,10 +145,6 @@ class DataSmart: | |||
135 | self._special_values[keyword] = Set() | 145 | self._special_values[keyword] = Set() |
136 | self._special_values[keyword].add( base ) | 146 | self._special_values[keyword].add( base ) |
137 | 147 | ||
138 | # SRC_URI_append_simpad is both a flag and a override | ||
139 | #if not override in self._seen_overrides: | ||
140 | # self._seen_overrides[override] = Set() | ||
141 | #self._seen_overrides[override].add( base ) | ||
142 | return | 148 | return |
143 | 149 | ||
144 | if not var in self.dict: | 150 | if not var in self.dict: |
@@ -150,7 +156,7 @@ class DataSmart: | |||
150 | # more cookies for the cookie monster | 156 | # more cookies for the cookie monster |
151 | if '_' in var: | 157 | if '_' in var: |
152 | override = var[var.rfind('_')+1:] | 158 | override = var[var.rfind('_')+1:] |
153 | if not override in self._seen_overrides: | 159 | if not self._seen_overrides.has_key(override): |
154 | self._seen_overrides[override] = Set() | 160 | self._seen_overrides[override] = Set() |
155 | self._seen_overrides[override].add( var ) | 161 | self._seen_overrides[override].add( var ) |
156 | 162 | ||
@@ -165,6 +171,7 @@ class DataSmart: | |||
165 | return value | 171 | return value |
166 | 172 | ||
167 | def delVar(self,var): | 173 | def delVar(self,var): |
174 | self.expand_cache = {} | ||
168 | self.dict[var] = {} | 175 | self.dict[var] = {} |
169 | 176 | ||
170 | def setVarFlag(self,var,flag,flagvalue): | 177 | def setVarFlag(self,var,flag,flagvalue): |
@@ -234,10 +241,8 @@ class DataSmart: | |||
234 | Create a copy of self by setting _data to self | 241 | Create a copy of self by setting _data to self |
235 | """ | 242 | """ |
236 | # we really want this to be a DataSmart... | 243 | # we really want this to be a DataSmart... |
237 | data = DataSmart() | 244 | data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy()) |
238 | data.dict["_data"] = self.dict | 245 | data.dict["_data"] = self.dict |
239 | data._seen_overrides = copy.deepcopy(self._seen_overrides) | ||
240 | data._special_values = copy.deepcopy(self._special_values) | ||
241 | 246 | ||
242 | return data | 247 | return data |
243 | 248 | ||
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 7ab0590765..24aebc41ca 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py | |||
@@ -38,13 +38,16 @@ class NoMethodError(Exception): | |||
38 | class MissingParameterError(Exception): | 38 | class MissingParameterError(Exception): |
39 | """Exception raised when a fetch method is missing a critical parameter in the url""" | 39 | """Exception raised when a fetch method is missing a critical parameter in the url""" |
40 | 40 | ||
41 | class ParameterError(Exception): | ||
42 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | ||
43 | |||
41 | class MD5SumError(Exception): | 44 | class MD5SumError(Exception): |
42 | """Exception raised when a MD5SUM of a file does not match the expected one""" | 45 | """Exception raised when a MD5SUM of a file does not match the expected one""" |
43 | 46 | ||
44 | def uri_replace(uri, uri_find, uri_replace, d): | 47 | def uri_replace(uri, uri_find, uri_replace, d): |
45 | # bb.note("uri_replace: operating on %s" % uri) | 48 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri) |
46 | if not uri or not uri_find or not uri_replace: | 49 | if not uri or not uri_find or not uri_replace: |
47 | bb.debug(1, "uri_replace: passed an undefined value, not replacing") | 50 | bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") |
48 | uri_decoded = list(bb.decodeurl(uri)) | 51 | uri_decoded = list(bb.decodeurl(uri)) |
49 | uri_find_decoded = list(bb.decodeurl(uri_find)) | 52 | uri_find_decoded = list(bb.decodeurl(uri_find)) |
50 | uri_replace_decoded = list(bb.decodeurl(uri_replace)) | 53 | uri_replace_decoded = list(bb.decodeurl(uri_replace)) |
@@ -62,9 +65,9 @@ def uri_replace(uri, uri_find, uri_replace, d): | |||
62 | localfn = bb.fetch.localpath(uri, d) | 65 | localfn = bb.fetch.localpath(uri, d) |
63 | if localfn: | 66 | if localfn: |
64 | result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) | 67 | result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) |
65 | # bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) | 68 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) |
66 | else: | 69 | else: |
67 | # bb.note("uri_replace: no match") | 70 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match") |
68 | return uri | 71 | return uri |
69 | # else: | 72 | # else: |
70 | # for j in i.keys(): | 73 | # for j in i.keys(): |
@@ -72,62 +75,94 @@ def uri_replace(uri, uri_find, uri_replace, d): | |||
72 | return bb.encodeurl(result_decoded) | 75 | return bb.encodeurl(result_decoded) |
73 | 76 | ||
74 | methods = [] | 77 | methods = [] |
78 | urldata = {} | ||
75 | 79 | ||
76 | def init(urls = [], d = None): | 80 | def init(urls = [], d = None): |
77 | if d == None: | 81 | if d == None: |
78 | bb.debug(2,"BUG init called with None as data object!!!") | 82 | bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!") |
79 | return | 83 | return |
80 | 84 | ||
81 | for m in methods: | 85 | for m in methods: |
82 | m.urls = [] | 86 | m.urls = [] |
83 | 87 | ||
84 | for u in urls: | 88 | for u in urls: |
89 | ud = initdata(u, d) | ||
90 | if ud.method: | ||
91 | ud.method.urls.append(u) | ||
92 | |||
93 | def initdata(url, d): | ||
94 | if url not in urldata: | ||
95 | ud = FetchData() | ||
96 | (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d)) | ||
97 | ud.date = Fetch.getSRCDate(d) | ||
85 | for m in methods: | 98 | for m in methods: |
86 | m.data = d | 99 | if m.supports(url, ud, d): |
87 | if m.supports(u, d): | 100 | ud.localpath = m.localpath(url, ud, d) |
88 | m.urls.append(u) | 101 | ud.md5 = ud.localpath + '.md5' |
102 | # if user sets localpath for file, use it instead. | ||
103 | if "localpath" in ud.parm: | ||
104 | ud.localpath = ud.parm["localpath"] | ||
105 | ud.method = m | ||
106 | break | ||
107 | urldata[url] = ud | ||
108 | return urldata[url] | ||
89 | 109 | ||
90 | def go(d): | 110 | def go(d): |
91 | """Fetch all urls""" | 111 | """Fetch all urls""" |
92 | for m in methods: | 112 | for m in methods: |
93 | if m.urls: | 113 | for u in m.urls: |
94 | m.go(d) | 114 | ud = urldata[u] |
115 | if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[u].md5): | ||
116 | # File already present along with md5 stamp file | ||
117 | # Touch md5 file to show activity | ||
118 | os.utime(ud.md5, None) | ||
119 | continue | ||
120 | # RP - is olddir needed? | ||
121 | # olddir = os.path.abspath(os.getcwd()) | ||
122 | m.go(u, ud , d) | ||
123 | # os.chdir(olddir) | ||
124 | if ud.localfile and not m.forcefetch(u, ud, d): | ||
125 | Fetch.write_md5sum(u, ud, d) | ||
95 | 126 | ||
96 | def localpaths(d): | 127 | def localpaths(d): |
97 | """Return a list of the local filenames, assuming successful fetch""" | 128 | """Return a list of the local filenames, assuming successful fetch""" |
98 | local = [] | 129 | local = [] |
99 | for m in methods: | 130 | for m in methods: |
100 | for u in m.urls: | 131 | for u in m.urls: |
101 | local.append(m.localpath(u, d)) | 132 | local.append(urldata[u].localpath) |
102 | return local | 133 | return local |
103 | 134 | ||
104 | def localpath(url, d): | 135 | def localpath(url, d): |
105 | for m in methods: | 136 | ud = initdata(url, d) |
106 | if m.supports(url, d): | 137 | if ud.method: |
107 | return m.localpath(url, d) | 138 | return ud.localpath |
108 | return url | 139 | return url |
109 | 140 | ||
141 | class FetchData(object): | ||
142 | """Class for fetcher variable store""" | ||
143 | def __init__(self): | ||
144 | self.localfile = "" | ||
145 | |||
146 | |||
110 | class Fetch(object): | 147 | class Fetch(object): |
111 | """Base class for 'fetch'ing data""" | 148 | """Base class for 'fetch'ing data""" |
112 | 149 | ||
113 | def __init__(self, urls = []): | 150 | def __init__(self, urls = []): |
114 | self.urls = [] | 151 | self.urls = [] |
115 | for url in urls: | ||
116 | if self.supports(bb.decodeurl(url), d) is 1: | ||
117 | self.urls.append(url) | ||
118 | 152 | ||
119 | def supports(url, d): | 153 | def supports(self, url, urldata, d): |
120 | """Check to see if this fetch class supports a given url. | 154 | """ |
121 | Expects supplied url in list form, as outputted by bb.decodeurl(). | 155 | Check to see if this fetch class supports a given url. |
122 | """ | 156 | """ |
123 | return 0 | 157 | return 0 |
124 | supports = staticmethod(supports) | ||
125 | 158 | ||
126 | def localpath(url, d): | 159 | def localpath(self, url, urldata, d): |
127 | """Return the local filename of a given url assuming a successful fetch. | 160 | """ |
161 | Return the local filename of a given url assuming a successful fetch. | ||
162 | Can also setup variables in urldata for use in go (saving code duplication | ||
163 | and duplicate code execution) | ||
128 | """ | 164 | """ |
129 | return url | 165 | return url |
130 | localpath = staticmethod(localpath) | ||
131 | 166 | ||
132 | def setUrls(self, urls): | 167 | def setUrls(self, urls): |
133 | self.__urls = urls | 168 | self.__urls = urls |
@@ -137,16 +172,17 @@ class Fetch(object): | |||
137 | 172 | ||
138 | urls = property(getUrls, setUrls, None, "Urls property") | 173 | urls = property(getUrls, setUrls, None, "Urls property") |
139 | 174 | ||
140 | def setData(self, data): | 175 | def forcefetch(self, url, urldata, d): |
141 | self.__data = data | 176 | """ |
142 | 177 | Force a fetch, even if localpath exists? | |
143 | def getData(self): | 178 | """ |
144 | return self.__data | 179 | return False |
145 | |||
146 | data = property(getData, setData, None, "Data property") | ||
147 | 180 | ||
148 | def go(self, urls = []): | 181 | def go(self, url, urldata, d): |
149 | """Fetch urls""" | 182 | """ |
183 | Fetch urls | ||
184 | Assumes localpath was called first | ||
185 | """ | ||
150 | raise NoMethodError("Missing implementation for url") | 186 | raise NoMethodError("Missing implementation for url") |
151 | 187 | ||
152 | def getSRCDate(d): | 188 | def getSRCDate(d): |
@@ -155,7 +191,12 @@ class Fetch(object): | |||
155 | 191 | ||
156 | d the bb.data module | 192 | d the bb.data module |
157 | """ | 193 | """ |
158 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1 ) | 194 | pn = data.getVar("PN", d, 1) |
195 | |||
196 | if pn: | ||
197 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1) | ||
198 | |||
199 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
159 | getSRCDate = staticmethod(getSRCDate) | 200 | getSRCDate = staticmethod(getSRCDate) |
160 | 201 | ||
161 | def try_mirror(d, tarfn): | 202 | def try_mirror(d, tarfn): |
@@ -168,6 +209,11 @@ class Fetch(object): | |||
168 | d Is a bb.data instance | 209 | d Is a bb.data instance |
169 | tarfn is the name of the tarball | 210 | tarfn is the name of the tarball |
170 | """ | 211 | """ |
212 | tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn) | ||
213 | if os.access(tarpath, os.R_OK): | ||
214 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn) | ||
215 | return True | ||
216 | |||
171 | pn = data.getVar('PN', d, True) | 217 | pn = data.getVar('PN', d, True) |
172 | src_tarball_stash = None | 218 | src_tarball_stash = None |
173 | if pn: | 219 | if pn: |
@@ -176,36 +222,45 @@ class Fetch(object): | |||
176 | for stash in src_tarball_stash: | 222 | for stash in src_tarball_stash: |
177 | fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True) | 223 | fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True) |
178 | uri = stash + tarfn | 224 | uri = stash + tarfn |
179 | bb.note("fetch " + uri) | 225 | bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) |
180 | fetchcmd = fetchcmd.replace("${URI}", uri) | 226 | fetchcmd = fetchcmd.replace("${URI}", uri) |
181 | ret = os.system(fetchcmd) | 227 | ret = os.system(fetchcmd) |
182 | if ret == 0: | 228 | if ret == 0: |
183 | bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) | 229 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn) |
184 | return True | 230 | return True |
185 | return False | 231 | return False |
186 | try_mirror = staticmethod(try_mirror) | 232 | try_mirror = staticmethod(try_mirror) |
187 | 233 | ||
188 | def check_for_tarball(d, tarfn, dldir, date): | 234 | def verify_md5sum(ud, got_sum): |
189 | """ | 235 | """ |
190 | Check for a local copy then check the tarball stash. | 236 | Verify the md5sum we wanted with the one we got |
191 | Both checks are skipped if date == 'now'. | ||
192 | |||
193 | d Is a bb.data instance | ||
194 | tarfn is the name of the tarball | ||
195 | date is the SRCDATE | ||
196 | """ | 237 | """ |
197 | if "now" != date: | 238 | wanted_sum = None |
198 | dl = os.path.join(dldir, tarfn) | 239 | if 'md5sum' in ud.parm: |
199 | if os.access(dl, os.R_OK): | 240 | wanted_sum = ud.parm['md5sum'] |
200 | bb.debug(1, "%s already exists, skipping checkout." % tarfn) | 241 | if not wanted_sum: |
201 | return True | 242 | return True |
202 | 243 | ||
203 | # try to use the tarball stash | 244 | return wanted_sum == got_sum |
204 | if Fetch.try_mirror(d, tarfn): | 245 | verify_md5sum = staticmethod(verify_md5sum) |
205 | return True | 246 | |
206 | return False | 247 | def write_md5sum(url, ud, d): |
207 | check_for_tarball = staticmethod(check_for_tarball) | 248 | if bb.which(data.getVar('PATH', d), 'md5sum'): |
208 | 249 | try: | |
250 | md5pipe = os.popen('md5sum ' + ud.localpath) | ||
251 | md5data = (md5pipe.readline().split() or [ "" ])[0] | ||
252 | md5pipe.close() | ||
253 | except OSError: | ||
254 | md5data = "" | ||
255 | |||
256 | # verify the md5sum | ||
257 | if not Fetch.verify_md5sum(ud, md5data): | ||
258 | raise MD5SumError(url) | ||
259 | |||
260 | md5out = file(ud.md5, 'w') | ||
261 | md5out.write(md5data) | ||
262 | md5out.close() | ||
263 | write_md5sum = staticmethod(write_md5sum) | ||
209 | 264 | ||
210 | import cvs | 265 | import cvs |
211 | import git | 266 | import git |
@@ -214,6 +269,7 @@ import svn | |||
214 | import wget | 269 | import wget |
215 | import svk | 270 | import svk |
216 | import ssh | 271 | import ssh |
272 | import perforce | ||
217 | 273 | ||
218 | methods.append(cvs.Cvs()) | 274 | methods.append(cvs.Cvs()) |
219 | methods.append(git.Git()) | 275 | methods.append(git.Git()) |
@@ -222,3 +278,4 @@ methods.append(svn.Svn()) | |||
222 | methods.append(wget.Wget()) | 278 | methods.append(wget.Wget()) |
223 | methods.append(svk.Svk()) | 279 | methods.append(svk.Svk()) |
224 | methods.append(ssh.SSH()) | 280 | methods.append(ssh.SSH()) |
281 | methods.append(perforce.Perforce()) | ||
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py index 0b2477560a..3bdac177eb 100644 --- a/bitbake/lib/bb/fetch/cvs.py +++ b/bitbake/lib/bb/fetch/cvs.py | |||
@@ -33,164 +33,119 @@ from bb.fetch import FetchError | |||
33 | from bb.fetch import MissingParameterError | 33 | from bb.fetch import MissingParameterError |
34 | 34 | ||
35 | class Cvs(Fetch): | 35 | class Cvs(Fetch): |
36 | """Class to fetch a module or modules from cvs repositories""" | 36 | """ |
37 | def supports(url, d): | 37 | Class to fetch a module or modules from cvs repositories |
38 | """Check to see if a given url can be fetched with cvs. | 38 | """ |
39 | Expects supplied url in list form, as outputted by bb.decodeurl(). | 39 | def supports(self, url, ud, d): |
40 | """ | 40 | """ |
41 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 41 | Check to see if a given url can be fetched with cvs. |
42 | return type in ['cvs', 'pserver'] | 42 | """ |
43 | supports = staticmethod(supports) | 43 | return ud.type in ['cvs', 'pserver'] |
44 | |||
45 | def localpath(url, d): | ||
46 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
47 | if "localpath" in parm: | ||
48 | # if user overrides local path, use it. | ||
49 | return parm["localpath"] | ||
50 | 44 | ||
51 | if not "module" in parm: | 45 | def localpath(self, url, ud, d): |
46 | if not "module" in ud.parm: | ||
52 | raise MissingParameterError("cvs method needs a 'module' parameter") | 47 | raise MissingParameterError("cvs method needs a 'module' parameter") |
53 | else: | 48 | ud.module = ud.parm["module"] |
54 | module = parm["module"] | 49 | |
55 | if 'tag' in parm: | 50 | ud.tag = "" |
56 | tag = parm['tag'] | 51 | if 'tag' in ud.parm: |
57 | else: | 52 | ud.tag = ud.parm['tag'] |
58 | tag = "" | 53 | |
59 | if 'date' in parm: | 54 | # Override the default date in certain cases |
60 | date = parm['date'] | 55 | if 'date' in ud.parm: |
61 | else: | 56 | ud.date = ud.parm['date'] |
62 | if not tag: | 57 | elif ud.tag: |
63 | date = Fetch.getSRCDate(d) | 58 | ud.date = "" |
64 | else: | 59 | |
65 | date = "" | 60 | ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date), d) |
61 | |||
62 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
66 | 63 | ||
67 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d)) | 64 | def forcefetch(self, url, ud, d): |
68 | localpath = staticmethod(localpath) | 65 | if (ud.date == "now"): |
66 | return True | ||
67 | return False | ||
69 | 68 | ||
70 | def go(self, d, urls = []): | 69 | def go(self, loc, ud, d): |
71 | """Fetch urls""" | 70 | |
72 | if not urls: | 71 | # try to use the tarball stash |
73 | urls = self.urls | 72 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): |
73 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath) | ||
74 | return | ||
75 | |||
76 | method = "pserver" | ||
77 | if "method" in ud.parm: | ||
78 | method = ud.parm["method"] | ||
79 | |||
80 | localdir = ud.module | ||
81 | if "localdir" in ud.parm: | ||
82 | localdir = ud.parm["localdir"] | ||
83 | |||
84 | cvs_rsh = None | ||
85 | if method == "ext": | ||
86 | if "rsh" in ud.parm: | ||
87 | cvs_rsh = ud.parm["rsh"] | ||
88 | |||
89 | if method == "dir": | ||
90 | cvsroot = ud.path | ||
91 | else: | ||
92 | cvsroot = ":" + method + ":" + ud.user | ||
93 | if ud.pswd: | ||
94 | cvsroot += ":" + ud.pswd | ||
95 | cvsroot += "@" + ud.host + ":" + ud.path | ||
96 | |||
97 | options = [] | ||
98 | if ud.date: | ||
99 | options.append("-D %s" % ud.date) | ||
100 | if ud.tag: | ||
101 | options.append("-r %s" % ud.tag) | ||
74 | 102 | ||
75 | localdata = data.createCopy(d) | 103 | localdata = data.createCopy(d) |
76 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) | 104 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) |
77 | data.update_data(localdata) | 105 | data.update_data(localdata) |
78 | 106 | ||
79 | for loc in urls: | 107 | data.setVar('CVSROOT', cvsroot, localdata) |
80 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | 108 | data.setVar('CVSCOOPTS', " ".join(options), localdata) |
81 | if not "module" in parm: | 109 | data.setVar('CVSMODULE', ud.module, localdata) |
82 | raise MissingParameterError("cvs method needs a 'module' parameter") | 110 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) |
83 | else: | 111 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) |
84 | module = parm["module"] | 112 | |
85 | 113 | if cvs_rsh: | |
86 | dlfile = self.localpath(loc, localdata) | 114 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) |
87 | dldir = data.getVar('DL_DIR', localdata, 1) | 115 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) |
88 | # if local path contains the cvs | 116 | |
89 | # module, consider the dir above it to be the | 117 | # create module directory |
90 | # download directory | 118 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") |
91 | # pos = dlfile.find(module) | 119 | pkg = data.expand('${PN}', d) |
92 | # if pos: | 120 | pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) |
93 | # dldir = dlfile[:pos] | 121 | moddir = os.path.join(pkgdir,localdir) |
94 | # else: | 122 | if os.access(os.path.join(moddir,'CVS'), os.R_OK): |
95 | # dldir = os.path.dirname(dlfile) | 123 | bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) |
96 | 124 | # update sources there | |
97 | # setup cvs options | ||
98 | options = [] | ||
99 | if 'tag' in parm: | ||
100 | tag = parm['tag'] | ||
101 | else: | ||
102 | tag = "" | ||
103 | |||
104 | if 'date' in parm: | ||
105 | date = parm['date'] | ||
106 | else: | ||
107 | if not tag: | ||
108 | date = Fetch.getSRCDate(d) | ||
109 | else: | ||
110 | date = "" | ||
111 | |||
112 | if "method" in parm: | ||
113 | method = parm["method"] | ||
114 | else: | ||
115 | method = "pserver" | ||
116 | |||
117 | if "localdir" in parm: | ||
118 | localdir = parm["localdir"] | ||
119 | else: | ||
120 | localdir = module | ||
121 | |||
122 | cvs_rsh = None | ||
123 | if method == "ext": | ||
124 | if "rsh" in parm: | ||
125 | cvs_rsh = parm["rsh"] | ||
126 | |||
127 | tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) | ||
128 | data.setVar('TARFILES', dlfile, localdata) | ||
129 | data.setVar('TARFN', tarfn, localdata) | ||
130 | |||
131 | if Fetch.check_for_tarball(d, tarfn, dldir, date): | ||
132 | continue | ||
133 | |||
134 | if date: | ||
135 | options.append("-D %s" % date) | ||
136 | if tag: | ||
137 | options.append("-r %s" % tag) | ||
138 | |||
139 | olddir = os.path.abspath(os.getcwd()) | ||
140 | os.chdir(data.expand(dldir, localdata)) | ||
141 | |||
142 | # setup cvsroot | ||
143 | if method == "dir": | ||
144 | cvsroot = path | ||
145 | else: | ||
146 | cvsroot = ":" + method + ":" + user | ||
147 | if pswd: | ||
148 | cvsroot += ":" + pswd | ||
149 | cvsroot += "@" + host + ":" + path | ||
150 | |||
151 | data.setVar('CVSROOT', cvsroot, localdata) | ||
152 | data.setVar('CVSCOOPTS', " ".join(options), localdata) | ||
153 | data.setVar('CVSMODULE', module, localdata) | ||
154 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
155 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) | ||
156 | |||
157 | if cvs_rsh: | ||
158 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) | ||
159 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | ||
160 | |||
161 | # create module directory | ||
162 | bb.debug(2, "Fetch: checking for module directory") | ||
163 | pkg=data.expand('${PN}', d) | ||
164 | pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) | ||
165 | moddir=os.path.join(pkgdir,localdir) | ||
166 | if os.access(os.path.join(moddir,'CVS'), os.R_OK): | ||
167 | bb.note("Update " + loc) | ||
168 | # update sources there | ||
169 | os.chdir(moddir) | ||
170 | myret = os.system(cvsupdatecmd) | ||
171 | else: | ||
172 | bb.note("Fetch " + loc) | ||
173 | # check out sources there | ||
174 | bb.mkdirhier(pkgdir) | ||
175 | os.chdir(pkgdir) | ||
176 | bb.debug(1, "Running %s" % cvscmd) | ||
177 | myret = os.system(cvscmd) | ||
178 | |||
179 | if myret != 0 or not os.access(moddir, os.R_OK): | ||
180 | try: | ||
181 | os.rmdir(moddir) | ||
182 | except OSError: | ||
183 | pass | ||
184 | raise FetchError(module) | ||
185 | |||
186 | os.chdir(moddir) | 125 | os.chdir(moddir) |
187 | os.chdir('..') | 126 | myret = os.system(cvsupdatecmd) |
188 | # tar them up to a defined filename | 127 | else: |
189 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) | 128 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) |
190 | if myret != 0: | 129 | # check out sources there |
191 | try: | 130 | bb.mkdirhier(pkgdir) |
192 | os.unlink(tarfn) | 131 | os.chdir(pkgdir) |
193 | except OSError: | 132 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) |
194 | pass | 133 | myret = os.system(cvscmd) |
195 | os.chdir(olddir) | 134 | |
196 | del localdata | 135 | if myret != 0 or not os.access(moddir, os.R_OK): |
136 | try: | ||
137 | os.rmdir(moddir) | ||
138 | except OSError: | ||
139 | pass | ||
140 | raise FetchError(ud.module) | ||
141 | |||
142 | os.chdir(moddir) | ||
143 | os.chdir('..') | ||
144 | # tar them up to a defined filename | ||
145 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir))) | ||
146 | if myret != 0: | ||
147 | try: | ||
148 | os.unlink(ud.localpath) | ||
149 | except OSError: | ||
150 | pass | ||
151 | raise FetchError(ud.module) | ||
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 49235c141e..75a7629223 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py | |||
@@ -37,7 +37,7 @@ def prunedir(topdir): | |||
37 | 37 | ||
38 | def rungitcmd(cmd,d): | 38 | def rungitcmd(cmd,d): |
39 | 39 | ||
40 | bb.debug(1, "Running %s" % cmd) | 40 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd) |
41 | 41 | ||
42 | # Need to export PATH as git is likely to be in metadata paths | 42 | # Need to export PATH as git is likely to be in metadata paths |
43 | # rather than host provided | 43 | # rather than host provided |
@@ -48,108 +48,80 @@ def rungitcmd(cmd,d): | |||
48 | if myret != 0: | 48 | if myret != 0: |
49 | raise FetchError("Git: %s failed" % pathcmd) | 49 | raise FetchError("Git: %s failed" % pathcmd) |
50 | 50 | ||
51 | def gettag(parm): | ||
52 | if 'tag' in parm: | ||
53 | tag = parm['tag'] | ||
54 | else: | ||
55 | tag = "" | ||
56 | if not tag: | ||
57 | tag = "master" | ||
58 | |||
59 | return tag | ||
60 | |||
61 | def getprotocol(parm): | ||
62 | if 'protocol' in parm: | ||
63 | proto = parm['protocol'] | ||
64 | else: | ||
65 | proto = "" | ||
66 | if not proto: | ||
67 | proto = "rsync" | ||
68 | |||
69 | return proto | ||
70 | |||
71 | def localfile(url, d): | ||
72 | """Return the filename to cache the checkout in""" | ||
73 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
74 | |||
75 | #if user sets localpath for file, use it instead. | ||
76 | if "localpath" in parm: | ||
77 | return parm["localpath"] | ||
78 | |||
79 | tag = gettag(parm) | ||
80 | |||
81 | return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d) | ||
82 | |||
83 | class Git(Fetch): | 51 | class Git(Fetch): |
84 | """Class to fetch a module or modules from git repositories""" | 52 | """Class to fetch a module or modules from git repositories""" |
85 | def supports(url, d): | 53 | def supports(self, url, ud, d): |
86 | """Check to see if a given url can be fetched with cvs. | 54 | """ |
87 | Expects supplied url in list form, as outputted by bb.decodeurl(). | 55 | Check to see if a given url can be fetched with cvs. |
88 | """ | 56 | """ |
89 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 57 | return ud.type in ['git'] |
90 | return type in ['git'] | ||
91 | supports = staticmethod(supports) | ||
92 | 58 | ||
93 | def localpath(url, d): | 59 | def localpath(self, url, ud, d): |
94 | 60 | ||
95 | return os.path.join(data.getVar("DL_DIR", d, 1), localfile(url, d)) | 61 | ud.proto = "rsync" |
62 | if 'protocol' in ud.parm: | ||
63 | ud.proto = ud.parm['protocol'] | ||
96 | 64 | ||
97 | localpath = staticmethod(localpath) | 65 | ud.tag = "master" |
66 | if 'tag' in ud.parm: | ||
67 | ud.tag = ud.parm['tag'] | ||
98 | 68 | ||
99 | def go(self, d, urls = []): | 69 | ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) |
100 | """Fetch urls""" | ||
101 | if not urls: | ||
102 | urls = self.urls | ||
103 | 70 | ||
104 | for loc in urls: | 71 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) |
105 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) | ||
106 | 72 | ||
107 | tag = gettag(parm) | 73 | def forcefetch(self, url, ud, d): |
108 | proto = getprotocol(parm) | 74 | # tag=="master" must always update |
75 | if (ud.tag == "master"): | ||
76 | return True | ||
77 | return False | ||
109 | 78 | ||
110 | gitsrcname = '%s%s' % (host, path.replace('/', '.')) | 79 | def go(self, loc, ud, d): |
80 | """Fetch url""" | ||
111 | 81 | ||
112 | repofilename = 'git_%s.tar.gz' % (gitsrcname) | 82 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): |
113 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) | 83 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) |
114 | repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) | 84 | return |
115 | 85 | ||
116 | coname = '%s' % (tag) | 86 | gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) |
117 | codir = os.path.join(repodir, coname) | ||
118 | 87 | ||
119 | cofile = self.localpath(loc, d) | 88 | repofilename = 'git_%s.tar.gz' % (gitsrcname) |
89 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) | ||
90 | repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) | ||
120 | 91 | ||
121 | # tag=="master" must always update | 92 | coname = '%s' % (ud.tag) |
122 | if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): | 93 | codir = os.path.join(repodir, coname) |
123 | bb.debug(1, "%s already exists (or was stashed). Skipping git checkout." % cofile) | ||
124 | continue | ||
125 | 94 | ||
126 | if not os.path.exists(repodir): | 95 | if not os.path.exists(repodir): |
127 | if Fetch.try_mirror(d, repofilename): | 96 | if Fetch.try_mirror(d, repofilename): |
128 | bb.mkdirhier(repodir) | 97 | bb.mkdirhier(repodir) |
129 | os.chdir(repodir) | 98 | os.chdir(repodir) |
130 | rungitcmd("tar -xzf %s" % (repofile),d) | 99 | rungitcmd("tar -xzf %s" % (repofile),d) |
131 | else: | 100 | else: |
132 | rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) | 101 | rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d) |
133 | 102 | ||
134 | os.chdir(repodir) | 103 | os.chdir(repodir) |
135 | rungitcmd("git pull %s://%s%s" % (proto, host, path),d) | 104 | rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d) |
136 | rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) | 105 | rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d) |
137 | rungitcmd("git prune-packed", d) | 106 | rungitcmd("git prune-packed", d) |
138 | # old method of downloading tags | 107 | rungitcmd("git pack-redundant --all | xargs -r rm", d) |
139 | #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) | 108 | # Remove all but the .git directory |
109 | rungitcmd("rm * -Rf", d) | ||
110 | # old method of downloading tags | ||
111 | #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d) | ||
140 | 112 | ||
141 | os.chdir(repodir) | 113 | os.chdir(repodir) |
142 | bb.note("Creating tarball of git repository") | 114 | bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") |
143 | rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) | 115 | rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) |
144 | 116 | ||
145 | if os.path.exists(codir): | 117 | if os.path.exists(codir): |
146 | prunedir(codir) | 118 | prunedir(codir) |
147 | 119 | ||
148 | bb.mkdirhier(codir) | 120 | bb.mkdirhier(codir) |
149 | os.chdir(repodir) | 121 | os.chdir(repodir) |
150 | rungitcmd("git read-tree %s" % (tag),d) | 122 | rungitcmd("git read-tree %s" % (ud.tag),d) |
151 | rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) | 123 | rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) |
152 | 124 | ||
153 | os.chdir(codir) | 125 | os.chdir(codir) |
154 | bb.note("Creating tarball of git checkout") | 126 | bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") |
155 | rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d) | 127 | rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d) |
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py index 51938f823e..5224976704 100644 --- a/bitbake/lib/bb/fetch/local.py +++ b/bitbake/lib/bb/fetch/local.py | |||
@@ -31,15 +31,13 @@ from bb import data | |||
31 | from bb.fetch import Fetch | 31 | from bb.fetch import Fetch |
32 | 32 | ||
33 | class Local(Fetch): | 33 | class Local(Fetch): |
34 | def supports(url, d): | 34 | def supports(self, url, urldata, d): |
35 | """Check to see if a given url can be fetched in the local filesystem. | ||
36 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
37 | """ | 35 | """ |
38 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 36 | Check to see if a given url can be fetched with cvs. |
39 | return type in ['file','patch'] | 37 | """ |
40 | supports = staticmethod(supports) | 38 | return urldata.type in ['file','patch'] |
41 | 39 | ||
42 | def localpath(url, d): | 40 | def localpath(self, url, urldata, d): |
43 | """Return the local filename of a given url assuming a successful fetch. | 41 | """Return the local filename of a given url assuming a successful fetch. |
44 | """ | 42 | """ |
45 | path = url.split("://")[1] | 43 | path = url.split("://")[1] |
@@ -52,10 +50,10 @@ class Local(Fetch): | |||
52 | filesdir = data.getVar('FILESDIR', d, 1) | 50 | filesdir = data.getVar('FILESDIR', d, 1) |
53 | if filesdir: | 51 | if filesdir: |
54 | newpath = os.path.join(filesdir, path) | 52 | newpath = os.path.join(filesdir, path) |
53 | # We don't set localfile as for this fetcher the file is already local! | ||
55 | return newpath | 54 | return newpath |
56 | localpath = staticmethod(localpath) | ||
57 | 55 | ||
58 | def go(self, urls = []): | 56 | def go(self, url, urldata, d): |
59 | """Fetch urls (no-op for Local method)""" | 57 | """Fetch urls (no-op for Local method)""" |
60 | # no need to fetch local files, we'll deal with them in place. | 58 | # no need to fetch local files, we'll deal with them in place. |
61 | return 1 | 59 | return 1 |
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py new file mode 100644 index 0000000000..88acf69951 --- /dev/null +++ b/bitbake/lib/bb/fetch/perforce.py | |||
@@ -0,0 +1,213 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | BitBake 'Fetch' implementations | ||
6 | |||
7 | Classes for obtaining upstream sources for the | ||
8 | BitBake build tools. | ||
9 | |||
10 | Copyright (C) 2003, 2004 Chris Larson | ||
11 | |||
12 | This program is free software; you can redistribute it and/or modify it under | ||
13 | the terms of the GNU General Public License as published by the Free Software | ||
14 | Foundation; either version 2 of the License, or (at your option) any later | ||
15 | version. | ||
16 | |||
17 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
18 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
19 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
20 | |||
21 | You should have received a copy of the GNU General Public License along with | ||
22 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
23 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
24 | |||
25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | """ | ||
27 | |||
28 | import os, re | ||
29 | import bb | ||
30 | from bb import data | ||
31 | from bb.fetch import Fetch | ||
32 | from bb.fetch import FetchError | ||
33 | from bb.fetch import MissingParameterError | ||
34 | |||
35 | class Perforce(Fetch): | ||
36 | def supports(self, url, ud, d): | ||
37 | return ud.type in ['p4'] | ||
38 | |||
39 | def doparse(url,d): | ||
40 | parm=[] | ||
41 | path = url.split("://")[1] | ||
42 | delim = path.find("@"); | ||
43 | if delim != -1: | ||
44 | (user,pswd,host,port) = path.split('@')[0].split(":") | ||
45 | path = path.split('@')[1] | ||
46 | else: | ||
47 | (host,port) = data.getVar('P4PORT', d).split(':') | ||
48 | user = "" | ||
49 | pswd = "" | ||
50 | |||
51 | if path.find(";") != -1: | ||
52 | keys=[] | ||
53 | values=[] | ||
54 | plist = path.split(';') | ||
55 | for item in plist: | ||
56 | if item.count('='): | ||
57 | (key,value) = item.split('=') | ||
58 | keys.append(key) | ||
59 | values.append(value) | ||
60 | |||
61 | parm = dict(zip(keys,values)) | ||
62 | path = "//" + path.split(';')[0] | ||
63 | host += ":%s" % (port) | ||
64 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) | ||
65 | |||
66 | return host,path,user,pswd,parm | ||
67 | doparse = staticmethod(doparse) | ||
68 | |||
69 | def getcset(d, depot,host,user,pswd,parm): | ||
70 | if "cset" in parm: | ||
71 | return parm["cset"]; | ||
72 | if user: | ||
73 | data.setVar('P4USER', user, d) | ||
74 | if pswd: | ||
75 | data.setVar('P4PASSWD', pswd, d) | ||
76 | if host: | ||
77 | data.setVar('P4PORT', host, d) | ||
78 | |||
79 | p4date = data.getVar("P4DATE", d, 1) | ||
80 | if "revision" in parm: | ||
81 | depot += "#%s" % (parm["revision"]) | ||
82 | elif "label" in parm: | ||
83 | depot += "@%s" % (parm["label"]) | ||
84 | elif p4date: | ||
85 | depot += "@%s" % (p4date) | ||
86 | |||
87 | p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) | ||
88 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot)) | ||
89 | p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot)) | ||
90 | cset = p4file.readline().strip() | ||
91 | bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset)) | ||
92 | if not cset: | ||
93 | return -1 | ||
94 | |||
95 | return cset.split(' ')[1] | ||
96 | getcset = staticmethod(getcset) | ||
97 | |||
98 | def localpath(self, url, ud, d): | ||
99 | |||
100 | (host,path,user,pswd,parm) = Perforce.doparse(url,d) | ||
101 | |||
102 | # If a label is specified, we use that as our filename | ||
103 | |||
104 | if "label" in parm: | ||
105 | ud.localfile = "%s.tar.gz" % (parm["label"]) | ||
106 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
107 | |||
108 | base = path | ||
109 | which = path.find('/...') | ||
110 | if which != -1: | ||
111 | base = path[:which] | ||
112 | |||
113 | if base[0] == "/": | ||
114 | base = base[1:] | ||
115 | |||
116 | cset = Perforce.getcset(d, path, host, user, pswd, parm) | ||
117 | |||
118 | ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d) | ||
119 | |||
120 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
121 | |||
122 | def go(self, loc, ud, d): | ||
123 | """ | ||
124 | Fetch urls | ||
125 | """ | ||
126 | |||
127 | # try to use the tarball stash | ||
128 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): | ||
129 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) | ||
130 | return | ||
131 | |||
132 | (host,depot,user,pswd,parm) = Perforce.doparse(loc, d) | ||
133 | |||
134 | if depot.find('/...') != -1: | ||
135 | path = depot[:depot.find('/...')] | ||
136 | else: | ||
137 | path = depot | ||
138 | |||
139 | if "module" in parm: | ||
140 | module = parm["module"] | ||
141 | else: | ||
142 | module = os.path.basename(path) | ||
143 | |||
144 | localdata = data.createCopy(d) | ||
145 | data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
146 | data.update_data(localdata) | ||
147 | |||
148 | # Get the p4 command | ||
149 | if user: | ||
150 | data.setVar('P4USER', user, localdata) | ||
151 | |||
152 | if pswd: | ||
153 | data.setVar('P4PASSWD', pswd, localdata) | ||
154 | |||
155 | if host: | ||
156 | data.setVar('P4PORT', host, localdata) | ||
157 | |||
158 | p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
159 | |||
160 | # create temp directory | ||
161 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") | ||
162 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
163 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) | ||
164 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
165 | tmpfile = tmppipe.readline().strip() | ||
166 | if not tmpfile: | ||
167 | bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
168 | raise FetchError(module) | ||
169 | |||
170 | if "label" in parm: | ||
171 | depot = "%s@%s" % (depot,parm["label"]) | ||
172 | else: | ||
173 | cset = Perforce.getcset(d, depot, host, user, pswd, parm) | ||
174 | depot = "%s@%s" % (depot,cset) | ||
175 | |||
176 | os.chdir(tmpfile) | ||
177 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) | ||
178 | bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot)) | ||
179 | p4file = os.popen("%s files %s" % (p4cmd, depot)) | ||
180 | |||
181 | if not p4file: | ||
182 | bb.error("Fetch: unable to get the P4 files from %s" % (depot)) | ||
183 | raise FetchError(module) | ||
184 | |||
185 | count = 0 | ||
186 | |||
187 | for file in p4file: | ||
188 | list = file.split() | ||
189 | |||
190 | if list[2] == "delete": | ||
191 | continue | ||
192 | |||
193 | dest = list[0][len(path)+1:] | ||
194 | where = dest.find("#") | ||
195 | |||
196 | os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0])) | ||
197 | count = count + 1 | ||
198 | |||
199 | if count == 0: | ||
200 | bb.error("Fetch: No files gathered from the P4 fetch") | ||
201 | raise FetchError(module) | ||
202 | |||
203 | myret = os.system("tar -czf %s %s" % (ud.localpath, module)) | ||
204 | if myret != 0: | ||
205 | try: | ||
206 | os.unlink(ud.localpath) | ||
207 | except OSError: | ||
208 | pass | ||
209 | raise FetchError(module) | ||
210 | # cleanup | ||
211 | os.system('rm -rf %s' % tmpfile) | ||
212 | |||
213 | |||
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py index 57874d5ba9..e5f69e33e7 100644 --- a/bitbake/lib/bb/fetch/ssh.py +++ b/bitbake/lib/bb/fetch/ssh.py | |||
@@ -64,59 +64,55 @@ __pattern__ = re.compile(r''' | |||
64 | class SSH(Fetch): | 64 | class SSH(Fetch): |
65 | '''Class to fetch a module or modules via Secure Shell''' | 65 | '''Class to fetch a module or modules via Secure Shell''' |
66 | 66 | ||
67 | def supports(self, url, d): | 67 | def supports(self, url, urldata, d): |
68 | return __pattern__.match(url) != None | 68 | return __pattern__.match(url) != None |
69 | 69 | ||
70 | def localpath(self, url, d): | 70 | def localpath(self, url, urldata, d): |
71 | m = __pattern__.match(url) | 71 | m = __pattern__.match(url) |
72 | path = m.group('path') | 72 | path = m.group('path') |
73 | host = m.group('host') | 73 | host = m.group('host') |
74 | lpath = os.path.join(data.getVar('DL_DIR', d, 1), host, os.path.basename(path)) | 74 | lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) |
75 | return lpath | 75 | return lpath |
76 | 76 | ||
77 | def go(self, d, urls = []): | 77 | def go(self, url, urldata, d): |
78 | if not urls: | 78 | dldir = data.getVar('DL_DIR', d, 1) |
79 | urls = self.urls | 79 | |
80 | 80 | m = __pattern__.match(url) | |
81 | for url in urls: | 81 | path = m.group('path') |
82 | dldir = data.getVar('DL_DIR', d, 1) | 82 | host = m.group('host') |
83 | 83 | port = m.group('port') | |
84 | m = __pattern__.match(url) | 84 | user = m.group('user') |
85 | path = m.group('path') | 85 | password = m.group('pass') |
86 | host = m.group('host') | 86 | |
87 | port = m.group('port') | 87 | ldir = os.path.join(dldir, host) |
88 | user = m.group('user') | 88 | lpath = os.path.join(ldir, os.path.basename(path)) |
89 | password = m.group('pass') | 89 | |
90 | 90 | if not os.path.exists(ldir): | |
91 | ldir = os.path.join(dldir, host) | 91 | os.makedirs(ldir) |
92 | lpath = os.path.join(ldir, os.path.basename(path)) | 92 | |
93 | 93 | if port: | |
94 | if not os.path.exists(ldir): | 94 | port = '-P %s' % port |
95 | os.makedirs(ldir) | 95 | else: |
96 | 96 | port = '' | |
97 | if port: | 97 | |
98 | port = '-P %s' % port | 98 | if user: |
99 | else: | 99 | fr = user |
100 | port = '' | 100 | if password: |
101 | 101 | fr += ':%s' % password | |
102 | if user: | 102 | fr += '@%s' % host |
103 | fr = user | 103 | else: |
104 | if password: | 104 | fr = host |
105 | fr += ':%s' % password | 105 | fr += ':%s' % path |
106 | fr += '@%s' % host | 106 | |
107 | else: | 107 | |
108 | fr = host | 108 | import commands |
109 | fr += ':%s' % path | 109 | cmd = 'scp -B -r %s %s %s/' % ( |
110 | 110 | port, | |
111 | 111 | commands.mkarg(fr), | |
112 | import commands | 112 | commands.mkarg(ldir) |
113 | cmd = 'scp -B -r %s %s %s/' % ( | 113 | ) |
114 | port, | 114 | |
115 | commands.mkarg(fr), | 115 | (exitstatus, output) = commands.getstatusoutput(cmd) |
116 | commands.mkarg(ldir) | 116 | if exitstatus != 0: |
117 | ) | 117 | print output |
118 | 118 | raise FetchError('Unable to fetch %s' % url) | |
119 | (exitstatus, output) = commands.getstatusoutput(cmd) | ||
120 | if exitstatus != 0: | ||
121 | print output | ||
122 | raise FetchError('Unable to fetch %s' % url) | ||
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py index 19103213cd..29270ab3d8 100644 --- a/bitbake/lib/bb/fetch/svk.py +++ b/bitbake/lib/bb/fetch/svk.py | |||
@@ -42,112 +42,76 @@ from bb.fetch import MissingParameterError | |||
42 | 42 | ||
43 | class Svk(Fetch): | 43 | class Svk(Fetch): |
44 | """Class to fetch a module or modules from svk repositories""" | 44 | """Class to fetch a module or modules from svk repositories""" |
45 | def supports(url, d): | 45 | def supports(self, url, ud, d): |
46 | """Check to see if a given url can be fetched with svk. | ||
47 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
48 | """ | 46 | """ |
49 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 47 | Check to see if a given url can be fetched with cvs. |
50 | return type in ['svk'] | 48 | """ |
51 | supports = staticmethod(supports) | 49 | return ud.type in ['svk'] |
52 | |||
53 | def localpath(url, d): | ||
54 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
55 | if "localpath" in parm: | ||
56 | # if user overrides local path, use it. | ||
57 | return parm["localpath"] | ||
58 | 50 | ||
59 | if not "module" in parm: | 51 | def localpath(self, url, ud, d): |
52 | if not "module" in ud.parm: | ||
60 | raise MissingParameterError("svk method needs a 'module' parameter") | 53 | raise MissingParameterError("svk method needs a 'module' parameter") |
61 | else: | 54 | else: |
62 | module = parm["module"] | 55 | ud.module = ud.parm["module"] |
63 | if 'rev' in parm: | 56 | |
64 | revision = parm['rev'] | 57 | ud.revision = "" |
65 | else: | 58 | if 'rev' in ud.parm: |
66 | revision = "" | 59 | ud.revision = ud.parm['rev'] |
60 | |||
61 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
67 | 62 | ||
68 | date = Fetch.getSRCDate(d) | 63 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) |
69 | 64 | ||
70 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d)) | 65 | def forcefetch(self, url, ud, d): |
71 | localpath = staticmethod(localpath) | 66 | if (ud.date == "now"): |
67 | return True | ||
68 | return False | ||
72 | 69 | ||
73 | def go(self, d, urls = []): | 70 | def go(self, loc, ud, d): |
74 | """Fetch urls""" | 71 | """Fetch urls""" |
75 | if not urls: | ||
76 | urls = self.urls | ||
77 | 72 | ||
73 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): | ||
74 | return | ||
75 | |||
76 | svkroot = ud.host + ud.path | ||
77 | |||
78 | svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) | ||
79 | |||
80 | if ud.revision: | ||
81 | svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) | ||
82 | |||
83 | # create temp directory | ||
78 | localdata = data.createCopy(d) | 84 | localdata = data.createCopy(d) |
79 | data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
80 | data.update_data(localdata) | 85 | data.update_data(localdata) |
81 | 86 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") | |
82 | for loc in urls: | 87 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) |
83 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | 88 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) |
84 | if not "module" in parm: | 89 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") |
85 | raise MissingParameterError("svk method needs a 'module' parameter") | 90 | tmpfile = tmppipe.readline().strip() |
86 | else: | 91 | if not tmpfile: |
87 | module = parm["module"] | 92 | bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") |
88 | 93 | raise FetchError(ud.module) | |
89 | dlfile = self.localpath(loc, localdata) | 94 | |
90 | dldir = data.getVar('DL_DIR', localdata, 1) | 95 | # check out sources there |
91 | 96 | os.chdir(tmpfile) | |
92 | # setup svk options | 97 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) |
93 | options = [] | 98 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) |
94 | if 'rev' in parm: | 99 | myret = os.system(svkcmd) |
95 | revision = parm['rev'] | 100 | if myret != 0: |
96 | else: | 101 | try: |
97 | revision = "" | 102 | os.rmdir(tmpfile) |
98 | 103 | except OSError: | |
99 | date = Fetch.getSRCDate(d) | 104 | pass |
100 | tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) | 105 | raise FetchError(ud.module) |
101 | data.setVar('TARFILES', dlfile, localdata) | 106 | |
102 | data.setVar('TARFN', tarfn, localdata) | 107 | os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) |
103 | 108 | # tar them up to a defined filename | |
104 | if Fetch.check_for_tarball(d, tarfn, dldir, date): | 109 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) |
105 | continue | 110 | if myret != 0: |
106 | 111 | try: | |
107 | olddir = os.path.abspath(os.getcwd()) | 112 | os.unlink(ud.localpath) |
108 | os.chdir(data.expand(dldir, localdata)) | 113 | except OSError: |
109 | 114 | pass | |
110 | svkroot = host + path | 115 | raise FetchError(ud.module) |
111 | 116 | # cleanup | |
112 | data.setVar('SVKROOT', svkroot, localdata) | 117 | os.system('rm -rf %s' % tmpfile) |
113 | data.setVar('SVKCOOPTS', " ".join(options), localdata) | ||
114 | data.setVar('SVKMODULE', module, localdata) | ||
115 | svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module) | ||
116 | |||
117 | if revision: | ||
118 | svkcmd = "svk co -r %s/%s" % (revision, svkroot, module) | ||
119 | |||
120 | # create temp directory | ||
121 | bb.debug(2, "Fetch: creating temporary directory") | ||
122 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
123 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) | ||
124 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
125 | tmpfile = tmppipe.readline().strip() | ||
126 | if not tmpfile: | ||
127 | bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
128 | raise FetchError(module) | ||
129 | |||
130 | # check out sources there | ||
131 | os.chdir(tmpfile) | ||
132 | bb.note("Fetch " + loc) | ||
133 | bb.debug(1, "Running %s" % svkcmd) | ||
134 | myret = os.system(svkcmd) | ||
135 | if myret != 0: | ||
136 | try: | ||
137 | os.rmdir(tmpfile) | ||
138 | except OSError: | ||
139 | pass | ||
140 | raise FetchError(module) | ||
141 | |||
142 | os.chdir(os.path.join(tmpfile, os.path.dirname(module))) | ||
143 | # tar them up to a defined filename | ||
144 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) | ||
145 | if myret != 0: | ||
146 | try: | ||
147 | os.unlink(tarfn) | ||
148 | except OSError: | ||
149 | pass | ||
150 | # cleanup | ||
151 | os.system('rm -rf %s' % tmpfile) | ||
152 | os.chdir(olddir) | ||
153 | del localdata | ||
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py index d1a959371b..b95de2a79b 100644 --- a/bitbake/lib/bb/fetch/svn.py +++ b/bitbake/lib/bb/fetch/svn.py | |||
@@ -26,6 +26,7 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig | |||
26 | """ | 26 | """ |
27 | 27 | ||
28 | import os, re | 28 | import os, re |
29 | import sys | ||
29 | import bb | 30 | import bb |
30 | from bb import data | 31 | from bb import data |
31 | from bb.fetch import Fetch | 32 | from bb.fetch import Fetch |
@@ -34,136 +35,98 @@ from bb.fetch import MissingParameterError | |||
34 | 35 | ||
35 | class Svn(Fetch): | 36 | class Svn(Fetch): |
36 | """Class to fetch a module or modules from svn repositories""" | 37 | """Class to fetch a module or modules from svn repositories""" |
37 | def supports(url, d): | 38 | def supports(self, url, ud, d): |
38 | """Check to see if a given url can be fetched with svn. | ||
39 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
40 | """ | 39 | """ |
41 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 40 | Check to see if a given url can be fetched with svn. |
42 | return type in ['svn'] | 41 | """ |
43 | supports = staticmethod(supports) | 42 | return ud.type in ['svn'] |
44 | |||
45 | def localpath(url, d): | ||
46 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
47 | if "localpath" in parm: | ||
48 | # if user overrides local path, use it. | ||
49 | return parm["localpath"] | ||
50 | 43 | ||
51 | if not "module" in parm: | 44 | def localpath(self, url, ud, d): |
45 | if not "module" in ud.parm: | ||
52 | raise MissingParameterError("svn method needs a 'module' parameter") | 46 | raise MissingParameterError("svn method needs a 'module' parameter") |
53 | else: | 47 | else: |
54 | module = parm["module"] | 48 | ud.module = ud.parm["module"] |
55 | if 'rev' in parm: | 49 | |
56 | revision = parm['rev'] | 50 | ud.revision = "" |
57 | else: | 51 | if 'rev' in ud.parm: |
58 | revision = "" | 52 | ud.revision = ud.parm['rev'] |
53 | |||
54 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
55 | |||
56 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
59 | 57 | ||
60 | date = Fetch.getSRCDate(d) | 58 | def forcefetch(self, url, ud, d): |
59 | if (ud.date == "now"): | ||
60 | return True | ||
61 | return False | ||
61 | 62 | ||
62 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d)) | 63 | def go(self, loc, ud, d): |
63 | localpath = staticmethod(localpath) | 64 | """Fetch url""" |
64 | 65 | ||
65 | def go(self, d, urls = []): | 66 | # try to use the tarball stash |
66 | """Fetch urls""" | 67 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): |
67 | if not urls: | 68 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath) |
68 | urls = self.urls | 69 | return |
70 | |||
71 | proto = "svn" | ||
72 | if "proto" in ud.parm: | ||
73 | proto = ud.parm["proto"] | ||
74 | |||
75 | svn_rsh = None | ||
76 | if proto == "svn+ssh" and "rsh" in ud.parm: | ||
77 | svn_rsh = ud.parm["rsh"] | ||
78 | |||
79 | svnroot = ud.host + ud.path | ||
80 | |||
81 | # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" | ||
82 | options = [] | ||
83 | if ud.revision: | ||
84 | options.append("-r %s" % ud.revision) | ||
85 | elif ud.date != "now": | ||
86 | options.append("-r {%s}" % ud.date) | ||
69 | 87 | ||
70 | localdata = data.createCopy(d) | 88 | localdata = data.createCopy(d) |
71 | data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) | 89 | data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) |
72 | data.update_data(localdata) | 90 | data.update_data(localdata) |
73 | 91 | ||
74 | for loc in urls: | 92 | data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata) |
75 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | 93 | data.setVar('SVNCOOPTS', " ".join(options), localdata) |
76 | if not "module" in parm: | 94 | data.setVar('SVNMODULE', ud.module, localdata) |
77 | raise MissingParameterError("svn method needs a 'module' parameter") | 95 | svncmd = data.getVar('FETCHCOMMAND', localdata, 1) |
78 | else: | 96 | svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) |
79 | module = parm["module"] | 97 | |
80 | 98 | if svn_rsh: | |
81 | dlfile = self.localpath(loc, localdata) | 99 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) |
82 | dldir = data.getVar('DL_DIR', localdata, 1) | 100 | svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd) |
83 | # if local path contains the svn | 101 | |
84 | # module, consider the dir above it to be the | 102 | pkg = data.expand('${PN}', d) |
85 | # download directory | 103 | pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg) |
86 | # pos = dlfile.find(module) | 104 | moddir = os.path.join(pkgdir, ud.module) |
87 | # if pos: | 105 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") |
88 | # dldir = dlfile[:pos] | 106 | |
89 | # else: | 107 | if os.access(os.path.join(moddir, '.svn'), os.R_OK): |
90 | # dldir = os.path.dirname(dlfile) | 108 | bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) |
91 | 109 | # update sources there | |
92 | # setup svn options | 110 | os.chdir(moddir) |
93 | options = [] | 111 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) |
94 | if 'rev' in parm: | 112 | myret = os.system(svnupcmd) |
95 | revision = parm['rev'] | 113 | else: |
96 | else: | 114 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) |
97 | revision = "" | 115 | # check out sources there |
98 | 116 | bb.mkdirhier(pkgdir) | |
99 | date = Fetch.getSRCDate(d) | 117 | os.chdir(pkgdir) |
100 | 118 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) | |
101 | if "proto" in parm: | ||
102 | proto = parm["proto"] | ||
103 | else: | ||
104 | proto = "svn" | ||
105 | |||
106 | svn_rsh = None | ||
107 | if proto == "svn+ssh" and "rsh" in parm: | ||
108 | svn_rsh = parm["rsh"] | ||
109 | |||
110 | tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) | ||
111 | data.setVar('TARFILES', dlfile, localdata) | ||
112 | data.setVar('TARFN', tarfn, localdata) | ||
113 | |||
114 | if Fetch.check_for_tarball(d, tarfn, dldir, date): | ||
115 | continue | ||
116 | |||
117 | olddir = os.path.abspath(os.getcwd()) | ||
118 | os.chdir(data.expand(dldir, localdata)) | ||
119 | |||
120 | svnroot = host + path | ||
121 | |||
122 | data.setVar('SVNROOT', svnroot, localdata) | ||
123 | data.setVar('SVNCOOPTS', " ".join(options), localdata) | ||
124 | data.setVar('SVNMODULE', module, localdata) | ||
125 | svncmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
126 | svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module) | ||
127 | |||
128 | if revision: | ||
129 | svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module) | ||
130 | elif date == "now": | ||
131 | svncmd = "svn co %s://%s/%s" % (proto, svnroot, module) | ||
132 | |||
133 | if svn_rsh: | ||
134 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) | ||
135 | |||
136 | # create temp directory | ||
137 | bb.debug(2, "Fetch: creating temporary directory") | ||
138 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
139 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata) | ||
140 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
141 | tmpfile = tmppipe.readline().strip() | ||
142 | if not tmpfile: | ||
143 | bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
144 | raise FetchError(module) | ||
145 | |||
146 | # check out sources there | ||
147 | os.chdir(tmpfile) | ||
148 | bb.note("Fetch " + loc) | ||
149 | bb.debug(1, "Running %s" % svncmd) | ||
150 | myret = os.system(svncmd) | 119 | myret = os.system(svncmd) |
151 | if myret != 0: | 120 | |
152 | try: | 121 | if myret != 0: |
153 | os.rmdir(tmpfile) | 122 | raise FetchError(ud.module) |
154 | except OSError: | 123 | |
155 | pass | 124 | os.chdir(pkgdir) |
156 | raise FetchError(module) | 125 | # tar them up to a defined filename |
157 | 126 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) | |
158 | os.chdir(os.path.join(tmpfile, os.path.dirname(module))) | 127 | if myret != 0: |
159 | # tar them up to a defined filename | 128 | try: |
160 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) | 129 | os.unlink(ud.localpath) |
161 | if myret != 0: | 130 | except OSError: |
162 | try: | 131 | pass |
163 | os.unlink(tarfn) | 132 | raise FetchError(ud.module) |
164 | except OSError: | ||
165 | pass | ||
166 | # cleanup | ||
167 | os.system('rm -rf %s' % tmpfile) | ||
168 | os.chdir(olddir) | ||
169 | del localdata | ||
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py index e47a8859be..9c9c1675a1 100644 --- a/bitbake/lib/bb/fetch/wget.py +++ b/bitbake/lib/bb/fetch/wget.py | |||
@@ -30,138 +30,70 @@ import bb | |||
30 | from bb import data | 30 | from bb import data |
31 | from bb.fetch import Fetch | 31 | from bb.fetch import Fetch |
32 | from bb.fetch import FetchError | 32 | from bb.fetch import FetchError |
33 | from bb.fetch import MD5SumError | ||
34 | from bb.fetch import uri_replace | 33 | from bb.fetch import uri_replace |
35 | 34 | ||
36 | class Wget(Fetch): | 35 | class Wget(Fetch): |
37 | """Class to fetch urls via 'wget'""" | 36 | """Class to fetch urls via 'wget'""" |
38 | def supports(url, d): | 37 | def supports(self, url, ud, d): |
39 | """Check to see if a given url can be fetched using wget. | ||
40 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
41 | """ | 38 | """ |
42 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 39 | Check to see if a given url can be fetched with cvs. |
43 | return type in ['http','https','ftp'] | 40 | """ |
44 | supports = staticmethod(supports) | 41 | return ud.type in ['http','https','ftp'] |
45 | 42 | ||
46 | def localpath(url, d): | 43 | def localpath(self, url, ud, d): |
47 | # strip off parameters | 44 | |
48 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | 45 | url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) |
49 | if "localpath" in parm: | 46 | ud.basename = os.path.basename(ud.path) |
50 | # if user overrides local path, use it. | 47 | ud.localfile = data.expand(os.path.basename(url), d) |
51 | return parm["localpath"] | 48 | |
52 | url = bb.encodeurl([type, host, path, user, pswd, {}]) | 49 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) |
53 | 50 | ||
54 | return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url)) | 51 | def go(self, uri, ud, d): |
55 | localpath = staticmethod(localpath) | ||
56 | |||
57 | def go(self, d, urls = []): | ||
58 | """Fetch urls""" | 52 | """Fetch urls""" |
59 | 53 | ||
60 | def md5_sum(parm, d): | 54 | def fetch_uri(uri, ud, d): |
61 | """ | 55 | if os.path.exists(ud.localpath): |
62 | Return the MD5SUM associated with the to be downloaded | 56 | # file exists, but we didnt complete it.. trying again.. |
63 | file. | ||
64 | It can return None if no md5sum is associated | ||
65 | """ | ||
66 | try: | ||
67 | return parm['md5sum'] | ||
68 | except: | ||
69 | return None | ||
70 | |||
71 | def verify_md5sum(wanted_sum, got_sum): | ||
72 | """ | ||
73 | Verify the md5sum we wanted with the one we got | ||
74 | """ | ||
75 | if not wanted_sum: | ||
76 | return True | ||
77 | |||
78 | return wanted_sum == got_sum | ||
79 | |||
80 | def fetch_uri(uri, basename, dl, md5, parm, d): | ||
81 | # the MD5 sum we want to verify | ||
82 | wanted_md5sum = md5_sum(parm, d) | ||
83 | if os.path.exists(dl): | ||
84 | # file exists, but we didnt complete it.. trying again.. | ||
85 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) | 57 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) |
86 | else: | 58 | else: |
87 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) | 59 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) |
88 | 60 | ||
89 | bb.note("fetch " + uri) | 61 | bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) |
90 | fetchcmd = fetchcmd.replace("${URI}", uri) | 62 | fetchcmd = fetchcmd.replace("${URI}", uri) |
91 | fetchcmd = fetchcmd.replace("${FILE}", basename) | 63 | fetchcmd = fetchcmd.replace("${FILE}", ud.basename) |
92 | bb.debug(2, "executing " + fetchcmd) | 64 | bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) |
93 | ret = os.system(fetchcmd) | 65 | ret = os.system(fetchcmd) |
94 | if ret != 0: | 66 | if ret != 0: |
95 | return False | 67 | return False |
96 | 68 | ||
97 | # check if sourceforge did send us to the mirror page | 69 | # check if sourceforge did send us to the mirror page |
98 | dl_dir = data.getVar("DL_DIR", d, True) | 70 | if not os.path.exists(ud.localpath): |
99 | if not os.path.exists(dl): | 71 | os.system("rm %s*" % ud.localpath) # FIXME shell quote it |
100 | os.system("rm %s*" % dl) # FIXME shell quote it | 72 | bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename) |
101 | bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename) | ||
102 | return False | 73 | return False |
103 | 74 | ||
104 | # supposedly complete.. write out md5sum | ||
105 | if bb.which(data.getVar('PATH', d), 'md5sum'): | ||
106 | try: | ||
107 | md5pipe = os.popen('md5sum ' + dl) | ||
108 | md5data = (md5pipe.readline().split() or [ "" ])[0] | ||
109 | md5pipe.close() | ||
110 | except OSError: | ||
111 | md5data = "" | ||
112 | |||
113 | # verify the md5sum | ||
114 | if not verify_md5sum(wanted_md5sum, md5data): | ||
115 | raise MD5SumError(uri) | ||
116 | |||
117 | md5out = file(md5, 'w') | ||
118 | md5out.write(md5data) | ||
119 | md5out.close() | ||
120 | return True | 75 | return True |
121 | 76 | ||
122 | if not urls: | ||
123 | urls = self.urls | ||
124 | |||
125 | localdata = data.createCopy(d) | 77 | localdata = data.createCopy(d) |
126 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) | 78 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) |
127 | data.update_data(localdata) | 79 | data.update_data(localdata) |
128 | 80 | ||
129 | for uri in urls: | 81 | premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] |
130 | completed = 0 | 82 | for (find, replace) in premirrors: |
131 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) | 83 | newuri = uri_replace(uri, find, replace, d) |
132 | basename = os.path.basename(path) | 84 | if newuri != uri: |
133 | dl = self.localpath(uri, d) | 85 | if fetch_uri(newuri, ud, localdata): |
134 | dl = data.expand(dl, localdata) | 86 | return |
135 | md5 = dl + '.md5' | 87 | |
136 | 88 | if fetch_uri(uri, ud, localdata): | |
137 | if os.path.exists(md5): | 89 | return |
138 | # complete, nothing to see here.. | 90 | |
139 | continue | 91 | # try mirrors |
140 | 92 | mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] | |
141 | premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] | 93 | for (find, replace) in mirrors: |
142 | for (find, replace) in premirrors: | 94 | newuri = uri_replace(uri, find, replace, d) |
143 | newuri = uri_replace(uri, find, replace, d) | 95 | if newuri != uri: |
144 | if newuri != uri: | 96 | if fetch_uri(newuri, ud, localdata): |
145 | if fetch_uri(newuri, basename, dl, md5, parm, localdata): | 97 | return |
146 | completed = 1 | 98 | |
147 | break | 99 | raise FetchError(uri) |
148 | |||
149 | if completed: | ||
150 | continue | ||
151 | |||
152 | if fetch_uri(uri, basename, dl, md5, parm, localdata): | ||
153 | continue | ||
154 | |||
155 | # try mirrors | ||
156 | mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] | ||
157 | for (find, replace) in mirrors: | ||
158 | newuri = uri_replace(uri, find, replace, d) | ||
159 | if newuri != uri: | ||
160 | if fetch_uri(newuri, basename, dl, md5, parm, localdata): | ||
161 | completed = 1 | ||
162 | break | ||
163 | |||
164 | if not completed: | ||
165 | raise FetchError(uri) | ||
166 | |||
167 | del localdata | ||
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py index d7434ed33e..e14986bc19 100644 --- a/bitbake/lib/bb/methodpool.py +++ b/bitbake/lib/bb/methodpool.py | |||
@@ -61,9 +61,6 @@ def insert_method(modulename, code, fn): | |||
61 | comp = better_compile(code, "<bb>", fn ) | 61 | comp = better_compile(code, "<bb>", fn ) |
62 | better_exec(comp, __builtins__, code, fn) | 62 | better_exec(comp, __builtins__, code, fn) |
63 | 63 | ||
64 | # hack hack hack XXX | ||
65 | return | ||
66 | |||
67 | # now some instrumentation | 64 | # now some instrumentation |
68 | code = comp.co_names | 65 | code = comp.co_names |
69 | for name in code: | 66 | for name in code: |
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py new file mode 100644 index 0000000000..473851cc72 --- /dev/null +++ b/bitbake/lib/bb/msg.py | |||
@@ -0,0 +1,108 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | BitBake 'msg' implementation | ||
6 | |||
7 | Message handling infrastructure for bitbake | ||
8 | |||
9 | # Copyright (C) 2006 Richard Purdie | ||
10 | |||
11 | This program is free software; you can redistribute it and/or modify it under | ||
12 | the terms of the GNU General Public License as published by the Free Software | ||
13 | Foundation; either version 2 of the License, or (at your option) any later | ||
14 | version. | ||
15 | |||
16 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
17 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
18 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
19 | |||
20 | You should have received a copy of the GNU General Public License along with | ||
21 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
22 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
23 | |||
24 | """ | ||
25 | |||
26 | import sys, os, re, bb | ||
27 | from bb import utils | ||
28 | |||
29 | debug_level = {} | ||
30 | |||
31 | verbose = False | ||
32 | |||
33 | domain = bb.utils.Enum( | ||
34 | 'Build', | ||
35 | 'Cache', | ||
36 | 'Collection', | ||
37 | 'Data', | ||
38 | 'Depends', | ||
39 | 'Fetcher', | ||
40 | 'Parsing', | ||
41 | 'Provider', | ||
42 | 'RunQueue', | ||
43 | 'TaskData', | ||
44 | 'Util') | ||
45 | |||
46 | # | ||
47 | # Message control functions | ||
48 | # | ||
49 | |||
50 | def set_debug_level(level): | ||
51 | bb.msg.debug_level = {} | ||
52 | for domain in bb.msg.domain: | ||
53 | bb.msg.debug_level[domain] = level | ||
54 | bb.msg.debug_level['default'] = level | ||
55 | |||
56 | def set_verbose(level): | ||
57 | bb.msg.verbose = level | ||
58 | |||
59 | def set_debug_domains(domains): | ||
60 | for domain in domains: | ||
61 | found = False | ||
62 | for ddomain in bb.msg.domain: | ||
63 | if domain == str(ddomain): | ||
64 | bb.msg.debug_level[ddomain] = bb.msg.debug_level[ddomain] + 1 | ||
65 | found = True | ||
66 | if not found: | ||
67 | std_warn("Logging domain %s is not valid, ignoring" % domain) | ||
68 | |||
69 | # | ||
70 | # Message handling functions | ||
71 | # | ||
72 | |||
73 | def debug(level, domain, msg, fn = None): | ||
74 | if debug_level[domain] >= level: | ||
75 | print 'DEBUG: ' + msg | ||
76 | |||
77 | def note(level, domain, msg, fn = None): | ||
78 | if level == 1 or verbose or debug_level[domain] >= 1: | ||
79 | std_note(msg) | ||
80 | |||
81 | def warn(domain, msg, fn = None): | ||
82 | std_warn(msg) | ||
83 | |||
84 | def error(domain, msg, fn = None): | ||
85 | std_error(msg) | ||
86 | |||
87 | def fatal(domain, msg, fn = None): | ||
88 | std_fatal(msg) | ||
89 | |||
90 | # | ||
91 | # Compatibility functions for the original message interface | ||
92 | # | ||
93 | def std_debug(lvl, msg): | ||
94 | if debug_level['default'] >= lvl: | ||
95 | print 'DEBUG: ' + msg | ||
96 | |||
97 | def std_note(msg): | ||
98 | print 'NOTE: ' + msg | ||
99 | |||
100 | def std_warn(msg): | ||
101 | print 'WARNING: ' + msg | ||
102 | |||
103 | def std_error(msg): | ||
104 | print 'ERROR: ' + msg | ||
105 | |||
106 | def std_fatal(msg): | ||
107 | print 'ERROR: ' + msg | ||
108 | sys.exit(1) | ||
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index 58e17d154a..70fdba03b4 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
@@ -37,11 +37,16 @@ class SkipPackage(Exception): | |||
37 | __mtime_cache = {} | 37 | __mtime_cache = {} |
38 | def cached_mtime(f): | 38 | def cached_mtime(f): |
39 | if not __mtime_cache.has_key(f): | 39 | if not __mtime_cache.has_key(f): |
40 | update_mtime(f) | 40 | __mtime_cache[f] = os.stat(f)[8] |
41 | return __mtime_cache[f] | 41 | return __mtime_cache[f] |
42 | 42 | ||
43 | def update_mtime(f): | 43 | def cached_mtime_noerror(f): |
44 | __mtime_cache[f] = os.stat(f)[8] | 44 | if not __mtime_cache.has_key(f): |
45 | try: | ||
46 | __mtime_cache[f] = os.stat(f)[8] | ||
47 | except OSError: | ||
48 | return 0 | ||
49 | return __mtime_cache[f] | ||
45 | 50 | ||
46 | def mark_dependency(d, f): | 51 | def mark_dependency(d, f): |
47 | if f.startswith('./'): | 52 | if f.startswith('./'): |
diff --git a/bitbake/lib/bb/parse/parse_c/BBHandler.py b/bitbake/lib/bb/parse/parse_c/BBHandler.py index d9f48db17b..b430e1f4e5 100644 --- a/bitbake/lib/bb/parse/parse_c/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_c/BBHandler.py | |||
@@ -5,33 +5,33 @@ | |||
5 | Reads a .bb file and obtains its metadata (using a C++ parser) | 5 | Reads a .bb file and obtains its metadata (using a C++ parser) |
6 | 6 | ||
7 | Copyright (C) 2006 Tim Robert Ansell | 7 | Copyright (C) 2006 Tim Robert Ansell |
8 | Copyright (C) 2006 Holger Hans Peter Freyther | 8 | Copyright (C) 2006 Holger Hans Peter Freyther |
9 | 9 | ||
10 | This program is free software; you can redistribute it and/or modify it under | 10 | This program is free software; you can redistribute it and/or modify it under |
11 | the terms of the GNU General Public License as published by the Free Software | 11 | the terms of the GNU General Public License as published by the Free Software |
12 | Foundation; either version 2 of the License, or (at your option) any later | 12 | Foundation; either version 2 of the License, or (at your option) any later |
13 | version. | 13 | version. |
14 | 14 | ||
15 | Permission is hereby granted, free of charge, to any person obtaining a copy | 15 | Permission is hereby granted, free of charge, to any person obtaining a copy |
16 | of this software and associated documentation files (the "Software"), to deal | 16 | of this software and associated documentation files (the "Software"), to deal |
17 | in the Software without restriction, including without limitation the rights | 17 | in the Software without restriction, including without limitation the rights |
18 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | 18 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
19 | copies of the Software, and to permit persons to whom the Software is | 19 | copies of the Software, and to permit persons to whom the Software is |
20 | furnished to do so, subject to the following conditions: | 20 | furnished to do so, subject to the following conditions: |
21 | 21 | ||
22 | The above copyright notice and this permission notice shall be included in all | 22 | The above copyright notice and this permission notice shall be included in all |
23 | copies or substantial portions of the Software. | 23 | copies or substantial portions of the Software. |
24 | 24 | ||
25 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | 25 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
26 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | 26 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
27 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | 27 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT |
28 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | 28 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
29 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | 29 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
30 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR | 30 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR |
31 | THE USE OR OTHER DEALINGS IN THE SOFTWARE. | 31 | THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
32 | """ | 32 | """ |
33 | 33 | ||
34 | import os | 34 | import os, sys |
35 | 35 | ||
36 | # The Module we will use here | 36 | # The Module we will use here |
37 | import bb | 37 | import bb |
@@ -61,51 +61,126 @@ def supports(fn, data): | |||
61 | return fn[-3:] == ".bb" or fn[-8:] == ".bbclass" or fn[-4:] == ".inc" or fn[-5:] == ".conf" | 61 | return fn[-3:] == ".bb" or fn[-8:] == ".bbclass" or fn[-4:] == ".inc" or fn[-5:] == ".conf" |
62 | 62 | ||
63 | def init(fn, data): | 63 | def init(fn, data): |
64 | if not data.getVar('TOPDIR'): | 64 | if not bb.data.getVar('TOPDIR', data): |
65 | bb.error('TOPDIR is not set') | 65 | bb.data.setVar('TOPDIR', os.getcwd(), data) |
66 | if not data.getVar('BBPATH'): | 66 | if not bb.data.getVar('BBPATH', data): |
67 | bb.error('BBPATH is not set') | 67 | bb.data.setVar('BBPATH', os.path.join(sys.prefix, 'share', 'bitbake'), data) |
68 | |||
69 | def handle_inherit(d): | ||
70 | """ | ||
71 | Handle inheriting of classes. This will load all default classes. | ||
72 | It could be faster, it could detect infinite loops but this is todo | ||
73 | Also this delayed loading of bb.parse could impose a penalty | ||
74 | """ | ||
75 | from bb.parse import handle | ||
76 | |||
77 | files = (data.getVar('INHERIT', d, True) or "").split() | ||
78 | if not "base" in i: | ||
79 | files[0:0] = ["base"] | ||
80 | |||
81 | __inherit_cache = data.getVar('__inherit_cache', d) or [] | ||
82 | for f in files: | ||
83 | file = data.expand(f, d) | ||
84 | if file[0] != "/" and file[-8:] != ".bbclass": | ||
85 | file = os.path.join('classes', '%s.bbclass' % file) | ||
86 | |||
87 | if not file in __inherit_cache: | ||
88 | debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file)) | ||
89 | __inherit_cache.append( file ) | ||
90 | |||
91 | try: | ||
92 | handle(file, d, True) | ||
93 | except IOError: | ||
94 | print "Failed to inherit %s" % file | ||
95 | data.setVar('__inherit_cache', __inherit_cache, d) | ||
68 | 96 | ||
69 | 97 | ||
70 | def handle(fn, d, include): | 98 | def handle(fn, d, include): |
71 | print "" | 99 | from bb import data, parse |
72 | print "fn: %s" % fn | 100 | |
73 | print "data: %s" % d | 101 | (root, ext) = os.path.splitext(os.path.basename(fn)) |
74 | print dir(d) | 102 | base_name = "%s%s" % (root,ext) |
75 | print d.getVar.__doc__ | 103 | |
76 | print "include: %s" % include | 104 | # initialize with some data |
105 | init(fn,d) | ||
77 | 106 | ||
78 | # check if we include or are the beginning | 107 | # check if we include or are the beginning |
108 | oldfile = None | ||
79 | if include: | 109 | if include: |
80 | oldfile = d.getVar('FILE') | 110 | oldfile = d.getVar('FILE', False) |
81 | else: | 111 | is_conf = False |
82 | #d.inheritFromOS() | 112 | elif ext == ".conf": |
83 | oldfile = None | 113 | is_conf = True |
114 | data.inheritFromOS(d) | ||
84 | 115 | ||
85 | # find the file | 116 | # find the file |
86 | if not os.path.isabs(fn): | 117 | if not os.path.isabs(fn): |
87 | bb.error("No Absolute FILE name") | 118 | abs_fn = bb.which(d.getVar('BBPATH', True), fn) |
88 | abs_fn = bb.which(d.getVar('BBPATH'), fn) | ||
89 | else: | 119 | else: |
90 | abs_fn = fn | 120 | abs_fn = fn |
91 | 121 | ||
92 | # check if the file exists | 122 | # check if the file exists |
93 | if not os.path.exists(abs_fn): | 123 | if not os.path.exists(abs_fn): |
94 | raise IOError("file '%(fn)' not found" % locals() ) | 124 | raise IOError("file '%(fn)s' not found" % locals() ) |
95 | 125 | ||
96 | # now we know the file is around mark it as dep | 126 | # now we know the file is around mark it as dep |
97 | if include: | 127 | if include: |
98 | parse.mark_dependency(d, abs_fn) | 128 | parse.mark_dependency(d, abs_fn) |
99 | 129 | ||
130 | # manipulate the bbpath | ||
131 | if ext != ".bbclass" and ext != ".conf": | ||
132 | old_bb_path = data.getVar('BBPATH', d) | ||
133 | data.setVar('BBPATH', os.path.dirname(abs_fn) + (":%s" %old_bb_path) , d) | ||
134 | |||
135 | # handle INHERITS and base inherit | ||
136 | if ext != ".bbclass" and ext != ".conf": | ||
137 | data.setVar('FILE', fn, d) | ||
138 | handle_interit(d) | ||
139 | |||
100 | # now parse this file - by defering it to C++ | 140 | # now parse this file - by defering it to C++ |
101 | parsefile(fn, d) | 141 | parsefile(abs_fn, d, is_conf) |
142 | |||
143 | # Finish it up | ||
144 | if include == 0: | ||
145 | data.expandKeys(d) | ||
146 | data.update_data(d) | ||
147 | #### !!! XXX Finish it up by executing the anonfunc | ||
148 | |||
102 | 149 | ||
103 | # restore the original FILE | 150 | # restore the original FILE |
104 | if oldfile: | 151 | if oldfile: |
105 | d.setVar('FILE', oldfile) | 152 | d.setVar('FILE', oldfile) |
106 | 153 | ||
154 | # restore bbpath | ||
155 | if ext != ".bbclass" and ext != ".conf": | ||
156 | data.setVar('BBPATH', old_bb_path, d ) | ||
157 | |||
158 | |||
107 | return d | 159 | return d |
108 | 160 | ||
161 | |||
162 | # Needed for BitBake files... | ||
163 | __pkgsplit_cache__={} | ||
164 | def vars_from_file(mypkg, d): | ||
165 | if not mypkg: | ||
166 | return (None, None, None) | ||
167 | if mypkg in __pkgsplit_cache__: | ||
168 | return __pkgsplit_cache__[mypkg] | ||
169 | |||
170 | myfile = os.path.splitext(os.path.basename(mypkg)) | ||
171 | parts = myfile[0].split('_') | ||
172 | __pkgsplit_cache__[mypkg] = parts | ||
173 | exp = 3 - len(parts) | ||
174 | tmplist = [] | ||
175 | while exp != 0: | ||
176 | exp -= 1 | ||
177 | tmplist.append(None) | ||
178 | parts.extend(tmplist) | ||
179 | return parts | ||
180 | |||
181 | |||
182 | |||
183 | |||
109 | # Inform bitbake that we are a parser | 184 | # Inform bitbake that we are a parser |
110 | # We need to define all three | 185 | # We need to define all three |
111 | from bb.parse import handlers | 186 | from bb.parse import handlers |
diff --git a/bitbake/lib/bb/parse/parse_c/Makefile b/bitbake/lib/bb/parse/parse_c/Makefile index 9eb7ce9d08..77daccb72d 100644 --- a/bitbake/lib/bb/parse/parse_c/Makefile +++ b/bitbake/lib/bb/parse/parse_c/Makefile | |||
@@ -1,6 +1,6 @@ | |||
1 | 1 | ||
2 | test: bitbakec.so | 2 | buil: bitbakec.so |
3 | python test.py | 3 | echo "Done" |
4 | 4 | ||
5 | bitbakescanner.cc: bitbakescanner.l | 5 | bitbakescanner.cc: bitbakescanner.l |
6 | flex -t bitbakescanner.l > bitbakescanner.cc | 6 | flex -t bitbakescanner.l > bitbakescanner.cc |
@@ -28,9 +28,9 @@ bitbakec.so: bitbakec.o bitbakeparser.o bitbakescanner.o | |||
28 | g++ -shared -fPIC bitbakeparser.o bitbakescanner.o bitbakec.o -o bitbakec.so | 28 | g++ -shared -fPIC bitbakeparser.o bitbakescanner.o bitbakec.o -o bitbakec.so |
29 | 29 | ||
30 | clean: | 30 | clean: |
31 | rm *.out | 31 | rm -f *.out |
32 | rm *.cc | 32 | rm -f *.cc |
33 | rm bitbakec.c | 33 | rm -f bitbakec.c |
34 | rm bitbakec-processed.c | 34 | rm -f bitbakec-processed.c |
35 | rm *.o | 35 | rm -f *.o |
36 | rm *.so | 36 | rm -f *.so |
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakec.pyx b/bitbake/lib/bb/parse/parse_c/bitbakec.pyx index 362cc2021e..c666e9b6b1 100644 --- a/bitbake/lib/bb/parse/parse_c/bitbakec.pyx +++ b/bitbake/lib/bb/parse/parse_c/bitbakec.pyx | |||
@@ -6,96 +6,107 @@ cdef extern from "stdio.h": | |||
6 | FILE *fopen(char*, char*) | 6 | FILE *fopen(char*, char*) |
7 | int fclose(FILE *fp) | 7 | int fclose(FILE *fp) |
8 | 8 | ||
9 | cdef extern from "string.h": | ||
10 | int strlen(char*) | ||
9 | 11 | ||
10 | cdef extern from "lexerc.h": | 12 | cdef extern from "lexerc.h": |
11 | ctypedef struct lex_t: | 13 | ctypedef struct lex_t: |
12 | void* parser | 14 | void* parser |
13 | void* scanner | 15 | void* scanner |
16 | char* name | ||
14 | FILE* file | 17 | FILE* file |
18 | int config | ||
15 | void* data | 19 | void* data |
16 | 20 | ||
17 | int lineError | 21 | int lineError |
18 | int errorParse | 22 | int errorParse |
19 | 23 | ||
20 | cdef extern void parse(FILE*, object) | 24 | cdef extern int parse(FILE*, char*, object, int) |
21 | 25 | ||
22 | def parsefile(object file, object data): | 26 | def parsefile(object file, object data, object config): |
23 | print "parsefile: 1", file, data | 27 | #print "parsefile: 1", file, data |
24 | 28 | ||
25 | # Open the file | 29 | # Open the file |
26 | cdef FILE* f | 30 | cdef FILE* f |
27 | 31 | ||
28 | f = fopen(file, "r") | 32 | f = fopen(file, "r") |
29 | print "parsefile: 2 opening file" | 33 | #print "parsefile: 2 opening file" |
30 | if (f == NULL): | 34 | if (f == NULL): |
31 | raise IOError("No such file %s." % file) | 35 | raise IOError("No such file %s." % file) |
32 | 36 | ||
33 | print "parsefile: 3 parse" | 37 | #print "parsefile: 3 parse" |
34 | parse(f, data) | 38 | parse(f, file, data, config) |
35 | 39 | ||
36 | # Close the file | 40 | # Close the file |
37 | print "parsefile: 4 closing" | ||
38 | fclose(f) | 41 | fclose(f) |
39 | 42 | ||
40 | 43 | ||
41 | cdef public void e_assign(lex_t* container, char* key, char* what): | 44 | cdef public void e_assign(lex_t* container, char* key, char* what): |
42 | print "e_assign", key, what | 45 | #print "e_assign", key, what |
46 | if what == NULL: | ||
47 | print "FUTURE Warning empty string: use \"\"" | ||
48 | what = "" | ||
49 | |||
43 | d = <object>container.data | 50 | d = <object>container.data |
44 | d.setVar(key, what) | 51 | d.setVar(key, what) |
45 | 52 | ||
46 | cdef public void e_export(lex_t* c, char* what): | 53 | cdef public void e_export(lex_t* c, char* what): |
47 | print "e_export", what | 54 | #print "e_export", what |
48 | #exp: | 55 | #exp: |
49 | # bb.data.setVarFlag(key, "export", 1, data) | 56 | # bb.data.setVarFlag(key, "export", 1, data) |
50 | d = <object>container.data | 57 | d = <object>c.data |
51 | d.setVarFlag(key, "export", 1) | 58 | d.setVarFlag(what, "export", 1) |
52 | 59 | ||
53 | cdef public void e_immediate(lex_t* c, char* key, char* what): | 60 | cdef public void e_immediate(lex_t* c, char* key, char* what): |
54 | print "e_immediate", key, what | 61 | #print "e_immediate", key, what |
55 | #colon: | 62 | #colon: |
56 | # val = bb.data.expand(groupd["value"], data) | 63 | # val = bb.data.expand(groupd["value"], data) |
57 | d = <object>c.data | 64 | d = <object>c.data |
58 | d.setVar(key, d.expand(what)) | 65 | d.setVar(key, d.expand(what,d)) |
59 | 66 | ||
60 | cdef public void e_cond(lex_t* c, char* key, char* what): | 67 | cdef public void e_cond(lex_t* c, char* key, char* what): |
61 | print "e_cond", key, what | 68 | #print "e_cond", key, what |
62 | #ques: | 69 | #ques: |
63 | # val = bb.data.getVar(key, data) | 70 | # val = bb.data.getVar(key, data) |
64 | # if val == None: | 71 | # if val == None: |
65 | # val = groupd["value"] | 72 | # val = groupd["value"] |
73 | if what == NULL: | ||
74 | print "FUTURE warning: Use \"\" for", key | ||
75 | what = "" | ||
76 | |||
66 | d = <object>c.data | 77 | d = <object>c.data |
67 | d.setVar(key, (d.getVar(key) or what)) | 78 | d.setVar(key, (d.getVar(key,False) or what)) |
68 | 79 | ||
69 | cdef public void e_prepend(lex_t* c, char* key, char* what): | 80 | cdef public void e_prepend(lex_t* c, char* key, char* what): |
70 | print "e_prepend", key, what | 81 | #print "e_prepend", key, what |
71 | #prepend: | 82 | #prepend: |
72 | # val = "%s %s" % (groupd["value"], (bb.data.getVar(key, data) or "")) | 83 | # val = "%s %s" % (groupd["value"], (bb.data.getVar(key, data) or "")) |
73 | d = <object>c.data | 84 | d = <object>c.data |
74 | d.setVar(key, what + " " + (d.getVar(key) or "")) | 85 | d.setVar(key, what + " " + (d.getVar(key,0) or "")) |
75 | 86 | ||
76 | cdef public void e_append(lex_t* c, char* key, char* what): | 87 | cdef public void e_append(lex_t* c, char* key, char* what): |
77 | print "e_append", key, what | 88 | #print "e_append", key, what |
78 | #append: | 89 | #append: |
79 | # val = "%s %s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) | 90 | # val = "%s %s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) |
80 | d = <object>c.data | 91 | d = <object>c.data |
81 | d.setVar(key, (d.getVar(key) or "") + " " + what) | 92 | d.setVar(key, (d.getVar(key,0) or "") + " " + what) |
82 | 93 | ||
83 | cdef public void e_precat(lex_t* c, char* key, char* what): | 94 | cdef public void e_precat(lex_t* c, char* key, char* what): |
84 | print "e_precat", key, what | 95 | #print "e_precat", key, what |
85 | #predot: | 96 | #predot: |
86 | # val = "%s%s" % (groupd["value"], (bb.data.getVar(key, data) or "")) | 97 | # val = "%s%s" % (groupd["value"], (bb.data.getVar(key, data) or "")) |
87 | d = <object>c.data | 98 | d = <object>c.data |
88 | d.setVar(key, what + (d.getVar(key) or "")) | 99 | d.setVar(key, what + (d.getVar(key,0) or "")) |
89 | 100 | ||
90 | cdef public void e_postcat(lex_t* c, char* key, char* what): | 101 | cdef public void e_postcat(lex_t* c, char* key, char* what): |
91 | print "e_postcat", key, what | 102 | #print "e_postcat", key, what |
92 | #postdot: | 103 | #postdot: |
93 | # val = "%s%s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) | 104 | # val = "%s%s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) |
94 | d = <object>c.data | 105 | d = <object>c.data |
95 | d.setVar(key, (d.getVar(key) or "") + what) | 106 | d.setVar(key, (d.getVar(key,0) or "") + what) |
96 | 107 | ||
97 | cdef public void e_addtask(lex_t* c, char* name, char* before, char* after): | 108 | cdef public int e_addtask(lex_t* c, char* name, char* before, char* after) except -1: |
98 | print "e_addtask", name, before, after | 109 | #print "e_addtask", name |
99 | # func = m.group("func") | 110 | # func = m.group("func") |
100 | # before = m.group("before") | 111 | # before = m.group("before") |
101 | # after = m.group("after") | 112 | # after = m.group("after") |
@@ -112,69 +123,131 @@ cdef public void e_addtask(lex_t* c, char* name, char* before, char* after): | |||
112 | # # set up things that depend on this func | 123 | # # set up things that depend on this func |
113 | # data.setVarFlag(var, "postdeps", before.split(), d) | 124 | # data.setVarFlag(var, "postdeps", before.split(), d) |
114 | # return | 125 | # return |
115 | 126 | ||
116 | do = "do_%s" % name | 127 | if c.config == 1: |
128 | from bb.parse import ParseError | ||
129 | raise ParseError("No tasks allowed in config files") | ||
130 | return -1 | ||
131 | |||
117 | d = <object>c.data | 132 | d = <object>c.data |
133 | do = "do_%s" % name | ||
118 | d.setVarFlag(do, "task", 1) | 134 | d.setVarFlag(do, "task", 1) |
119 | 135 | ||
120 | if strlen(before) > 0: | 136 | if before != NULL and strlen(before) > 0: |
137 | #print "Before", before | ||
138 | d.setVarFlag(do, "postdeps", ("%s" % before).split()) | ||
139 | if after != NULL and strlen(after) > 0: | ||
140 | #print "After", after | ||
121 | d.setVarFlag(do, "deps", ("%s" % after).split()) | 141 | d.setVarFlag(do, "deps", ("%s" % after).split()) |
122 | if strlen(after) > 0: | ||
123 | d.setVarFlag(do, "deps", ("%s" % before).split()) | ||
124 | 142 | ||
143 | return 0 | ||
125 | 144 | ||
126 | cdef public void e_addhandler(lex_t* c, char* h): | 145 | cdef public int e_addhandler(lex_t* c, char* h) except -1: |
127 | print "e_addhandler", h | 146 | #print "e_addhandler", h |
128 | # data.setVarFlag(h, "handler", 1, d) | 147 | # data.setVarFlag(h, "handler", 1, d) |
148 | if c.config == 1: | ||
149 | from bb.parse import ParseError | ||
150 | raise ParseError("No handlers allowed in config files") | ||
151 | return -1 | ||
152 | |||
129 | d = <object>c.data | 153 | d = <object>c.data |
130 | d.setVarFlag(h, "handler", 1) | 154 | d.setVarFlag(h, "handler", 1) |
155 | return 0 | ||
156 | |||
157 | cdef public int e_export_func(lex_t* c, char* function) except -1: | ||
158 | #print "e_export_func", function | ||
159 | if c.config == 1: | ||
160 | from bb.parse import ParseError | ||
161 | raise ParseError("No functions allowed in config files") | ||
162 | return -1 | ||
163 | |||
164 | return 0 | ||
165 | |||
166 | cdef public int e_inherit(lex_t* c, char* file) except -1: | ||
167 | #print "e_inherit", file | ||
131 | 168 | ||
132 | cdef public void e_export_func(lex_t* c, char* function): | 169 | if c.config == 1: |
133 | print "e_export_func", function | 170 | from bb.parse import ParseError |
134 | pass | 171 | raise ParseError("No inherits allowed in config files") |
172 | return -1 | ||
135 | 173 | ||
136 | cdef public void e_inherit(lex_t* c, char* file): | 174 | return 0 |
137 | print "e_inherit", file | ||
138 | pass | ||
139 | 175 | ||
140 | cdef public void e_include(lex_t* c, char* file): | 176 | cdef public void e_include(lex_t* c, char* file): |
141 | print "e_include", file | 177 | from bb.parse import handle |
142 | d = <object>c.data | 178 | d = <object>c.data |
143 | d.expand(file) | 179 | |
144 | |||
145 | try: | 180 | try: |
146 | parsefile(file, d) | 181 | handle(d.expand(file,d), d, True) |
147 | except IOError: | 182 | except IOError: |
148 | print "Could not include required file %s" % file | 183 | print "Could not include file", file |
149 | 184 | ||
150 | 185 | ||
151 | cdef public void e_require(lex_t* c, char* file): | 186 | cdef public int e_require(lex_t* c, char* file) except -1: |
152 | print "e_require", file | 187 | #print "e_require", file |
188 | from bb.parse import handle | ||
153 | d = <object>c.data | 189 | d = <object>c.data |
154 | d.expand(file) | 190 | |
155 | |||
156 | try: | 191 | try: |
157 | parsefile(file, d) | 192 | handle(d.expand(file,d), d, True) |
158 | except IOError: | 193 | except IOError: |
159 | raise CParseError("Could not include required file %s" % file) | 194 | print "ParseError", file |
195 | from bb.parse import ParseError | ||
196 | raise ParseError("Could not include required file %s" % file) | ||
197 | return -1 | ||
198 | |||
199 | return 0 | ||
200 | |||
201 | cdef public int e_proc(lex_t* c, char* key, char* what) except -1: | ||
202 | #print "e_proc", key, what | ||
203 | if c.config == 1: | ||
204 | from bb.parse import ParseError | ||
205 | raise ParseError("No inherits allowed in config files") | ||
206 | return -1 | ||
207 | |||
208 | return 0 | ||
209 | |||
210 | cdef public int e_proc_python(lex_t* c, char* key, char* what) except -1: | ||
211 | #print "e_proc_python" | ||
212 | if c.config == 1: | ||
213 | from bb.parse import ParseError | ||
214 | raise ParseError("No pythin allowed in config files") | ||
215 | return -1 | ||
216 | |||
217 | if key != NULL: | ||
218 | pass | ||
219 | #print "Key", key | ||
220 | if what != NULL: | ||
221 | pass | ||
222 | #print "What", what | ||
223 | |||
224 | return 0 | ||
225 | |||
226 | cdef public int e_proc_fakeroot(lex_t* c, char* key, char* what) except -1: | ||
227 | #print "e_fakeroot", key, what | ||
228 | |||
229 | if c.config == 1: | ||
230 | from bb.parse import ParseError | ||
231 | raise ParseError("No fakeroot allowed in config files") | ||
232 | return -1 | ||
233 | |||
234 | return 0 | ||
235 | |||
236 | cdef public int e_def(lex_t* c, char* a, char* b, char* d) except -1: | ||
237 | #print "e_def", a, b, d | ||
160 | 238 | ||
161 | cdef public void e_proc(lex_t* c, char* key, char* what): | 239 | if c.config == 1: |
162 | print "e_proc", key, what | 240 | from bb.parse import ParseError |
163 | pass | 241 | raise ParseError("No defs allowed in config files") |
242 | return -1 | ||
164 | 243 | ||
165 | cdef public void e_proc_python(lex_t* c, char* key, char* what): | 244 | return 0 |
166 | print "e_proc_python", key, what | ||
167 | pass | ||
168 | 245 | ||
169 | cdef public void e_proc_fakeroot(lex_t* c, char* key, char* what): | 246 | cdef public int e_parse_error(lex_t* c) except -1: |
170 | print "e_fakeroot", key, what | 247 | print "e_parse_error", c.name, "line:", lineError, "parse:", errorParse |
171 | pass | ||
172 | 248 | ||
173 | cdef public void e_def(lex_t* c, char* a, char* b, char* d): | ||
174 | print "e_def", key, what | ||
175 | pass | ||
176 | 249 | ||
177 | cdef public void e_parse_error(lex_t* c): | 250 | from bb.parse import ParseError |
178 | print "e_parse_error", "line:", lineError, "parse:", errorParse | 251 | raise ParseError("There was an parse error, sorry unable to give more information at the current time. File: %s Line: %d" % (c.name,lineError) ) |
179 | raise CParseError("There was an parse error, sorry unable to give more information at the current time.") | 252 | return -1 |
180 | 253 | ||
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakeparser.cc b/bitbake/lib/bb/parse/parse_c/bitbakeparser.cc index ee9a901b70..9d9793f8df 100644 --- a/bitbake/lib/bb/parse/parse_c/bitbakeparser.cc +++ b/bitbake/lib/bb/parse/parse_c/bitbakeparser.cc | |||
@@ -128,51 +128,49 @@ typedef union { | |||
128 | */ | 128 | */ |
129 | static const YYACTIONTYPE yy_action[] = { | 129 | static const YYACTIONTYPE yy_action[] = { |
130 | /* 0 */ 82, 3, 7, 8, 38, 22, 39, 24, 26, 32, | 130 | /* 0 */ 82, 3, 7, 8, 38, 22, 39, 24, 26, 32, |
131 | /* 10 */ 34, 28, 30, 128, 1, 40, 53, 70, 55, 5, | 131 | /* 10 */ 34, 28, 30, 2, 21, 40, 53, 70, 55, 44, |
132 | /* 20 */ 60, 65, 67, 2, 21, 36, 69, 77, 9, 7, | 132 | /* 20 */ 60, 65, 67, 128, 1, 36, 69, 77, 42, 46, |
133 | /* 30 */ 11, 6, 13, 15, 17, 19, 12, 52, 50, 4, | 133 | /* 30 */ 11, 66, 13, 15, 17, 19, 64, 62, 9, 7, |
134 | /* 40 */ 74, 42, 46, 59, 57, 10, 64, 62, 38, 14, | 134 | /* 40 */ 74, 38, 45, 81, 59, 57, 38, 38, 73, 76, |
135 | /* 50 */ 73, 16, 38, 38, 76, 81, 18, 20, 23, 25, | 135 | /* 50 */ 5, 68, 52, 50, 14, 31, 47, 71, 48, 10, |
136 | /* 60 */ 27, 29, 31, 33, 35, 37, 56, 51, 90, 54, | 136 | /* 60 */ 72, 33, 23, 49, 6, 41, 51, 78, 75, 16, |
137 | /* 70 */ 58, 71, 41, 43, 63, 45, 44, 47, 72, 48, | 137 | /* 70 */ 4, 54, 35, 25, 18, 80, 79, 56, 27, 37, |
138 | /* 80 */ 75, 78, 80, 61, 90, 49, 66, 90, 90, 68, | 138 | /* 80 */ 58, 12, 61, 29, 43, 63, 20, |
139 | /* 90 */ 90, 90, 90, 90, 90, 79, | ||
140 | }; | 139 | }; |
141 | static const YYCODETYPE yy_lookahead[] = { | 140 | static const YYCODETYPE yy_lookahead[] = { |
142 | /* 0 */ 0, 1, 2, 3, 23, 4, 25, 6, 7, 8, | 141 | /* 0 */ 0, 1, 2, 3, 23, 4, 25, 6, 7, 8, |
143 | /* 10 */ 9, 10, 11, 31, 32, 15, 16, 1, 18, 42, | 142 | /* 10 */ 9, 10, 11, 33, 34, 15, 16, 1, 18, 14, |
144 | /* 20 */ 20, 21, 22, 33, 34, 24, 26, 27, 1, 2, | 143 | /* 20 */ 20, 21, 22, 31, 32, 24, 26, 27, 13, 14, |
145 | /* 30 */ 4, 28, 6, 7, 8, 9, 5, 35, 36, 29, | 144 | /* 30 */ 4, 19, 6, 7, 8, 9, 39, 40, 1, 2, |
146 | /* 40 */ 24, 13, 14, 37, 38, 34, 39, 40, 23, 5, | 145 | /* 40 */ 24, 23, 12, 25, 37, 38, 23, 23, 25, 25, |
147 | /* 50 */ 25, 5, 23, 23, 25, 25, 5, 5, 5, 5, | 146 | /* 50 */ 42, 19, 35, 36, 5, 5, 12, 24, 13, 34, |
148 | /* 60 */ 5, 5, 5, 5, 5, 41, 17, 35, 43, 1, | 147 | /* 60 */ 41, 5, 5, 12, 28, 12, 35, 1, 41, 5, |
149 | /* 70 */ 37, 24, 12, 12, 39, 12, 14, 12, 41, 13, | 148 | /* 70 */ 29, 1, 5, 5, 5, 41, 24, 17, 5, 41, |
150 | /* 80 */ 41, 1, 41, 19, 43, 12, 19, 43, 43, 19, | 149 | /* 80 */ 37, 5, 19, 5, 12, 39, 5, |
151 | /* 90 */ 43, 43, 43, 43, 43, 24, | ||
152 | }; | 150 | }; |
153 | #define YY_SHIFT_USE_DFLT (-20) | 151 | #define YY_SHIFT_USE_DFLT (-20) |
154 | static const signed char yy_shift_ofst[] = { | 152 | static const signed char yy_shift_ofst[] = { |
155 | /* 0 */ -20, 0, -20, 10, -20, 3, -20, -20, 27, -20, | 153 | /* 0 */ -20, 0, -20, 41, -20, 36, -20, -20, 37, -20, |
156 | /* 10 */ 26, 31, -20, 44, -20, 46, -20, 51, -20, 52, | 154 | /* 10 */ 26, 76, -20, 49, -20, 64, -20, 69, -20, 81, |
157 | /* 20 */ -20, 1, 53, -20, 54, -20, 55, -20, 56, -20, | 155 | /* 20 */ -20, 1, 57, -20, 68, -20, 73, -20, 78, -20, |
158 | /* 30 */ 57, -20, 58, -20, 59, -20, -20, -19, -20, -20, | 156 | /* 30 */ 50, -20, 56, -20, 67, -20, -20, -19, -20, -20, |
159 | /* 40 */ 60, 28, 61, 62, 63, -20, 65, 66, 73, -20, | 157 | /* 40 */ 53, 15, 72, 5, 30, -20, 44, 45, 51, -20, |
160 | /* 50 */ 60, -20, -20, 68, -20, 49, -20, 49, -20, -20, | 158 | /* 50 */ 53, -20, -20, 70, -20, 60, -20, 60, -20, -20, |
161 | /* 60 */ 64, -20, 64, -20, -20, 67, -20, 70, -20, 16, | 159 | /* 60 */ 63, -20, 63, -20, -20, 12, -20, 32, -20, 16, |
162 | /* 70 */ 47, -20, 25, -20, -20, 29, -20, 80, 71, -20, | 160 | /* 70 */ 33, -20, 23, -20, -20, 24, -20, 66, 52, -20, |
163 | /* 80 */ 30, -20, | 161 | /* 80 */ 18, -20, |
164 | }; | 162 | }; |
165 | #define YY_REDUCE_USE_DFLT (-24) | 163 | #define YY_REDUCE_USE_DFLT (-21) |
166 | static const signed char yy_reduce_ofst[] = { | 164 | static const signed char yy_reduce_ofst[] = { |
167 | /* 0 */ -18, -10, -24, -24, -23, -24, -24, -24, 11, -24, | 165 | /* 0 */ -8, -20, -21, -21, 8, -21, -21, -21, 25, -21, |
168 | /* 10 */ -24, -24, -24, -24, -24, -24, -24, -24, -24, -24, | 166 | /* 10 */ -21, -21, -21, -21, -21, -21, -21, -21, -21, -21, |
169 | /* 20 */ -24, -24, -24, -24, -24, -24, -24, -24, -24, -24, | 167 | /* 20 */ -21, -21, -21, -21, -21, -21, -21, -21, -21, -21, |
170 | /* 30 */ -24, -24, -24, -24, -24, -24, 24, -24, -24, -24, | 168 | /* 30 */ -21, -21, -21, -21, -21, -21, 38, -21, -21, -21, |
171 | /* 40 */ 2, -24, -24, -24, -24, -24, -24, -24, -24, -24, | 169 | /* 40 */ 17, -21, -21, -21, -21, -21, -21, -21, -21, -21, |
172 | /* 50 */ 32, -24, -24, -24, -24, 6, -24, 33, -24, -24, | 170 | /* 50 */ 31, -21, -21, -21, -21, 7, -21, 43, -21, -21, |
173 | /* 60 */ 7, -24, 35, -24, -24, -24, -24, -24, -24, -24, | 171 | /* 60 */ -3, -21, 46, -21, -21, -21, -21, -21, -21, -21, |
174 | /* 70 */ -24, 37, -24, -24, 39, -24, -24, -24, -24, 41, | 172 | /* 70 */ -21, 19, -21, -21, 27, -21, -21, -21, -21, 34, |
175 | /* 80 */ -24, -24, | 173 | /* 80 */ -21, -21, |
176 | }; | 174 | }; |
177 | static const YYACTIONTYPE yy_default[] = { | 175 | static const YYACTIONTYPE yy_default[] = { |
178 | /* 0 */ 84, 127, 83, 85, 125, 126, 124, 86, 127, 85, | 176 | /* 0 */ 84, 127, 83, 85, 125, 126, 124, 86, 127, 85, |
@@ -420,7 +418,7 @@ static void yy_destructor(YYCODETYPE yymajor, YYMINORTYPE *yypminor){ | |||
420 | case 29: | 418 | case 29: |
421 | #line 50 "bitbakeparser.y" | 419 | #line 50 "bitbakeparser.y" |
422 | { (yypminor->yy0).release_this (); } | 420 | { (yypminor->yy0).release_this (); } |
423 | #line 425 "bitbakeparser.c" | 421 | #line 423 "bitbakeparser.c" |
424 | break; | 422 | break; |
425 | default: break; /* If no destructor action specified: do nothing */ | 423 | default: break; /* If no destructor action specified: do nothing */ |
426 | } | 424 | } |
@@ -694,7 +692,7 @@ static void yy_reduce( | |||
694 | { yygotominor.yy0.assignString( (char*)yymsp[0].minor.yy0.string() ); | 692 | { yygotominor.yy0.assignString( (char*)yymsp[0].minor.yy0.string() ); |
695 | yymsp[0].minor.yy0.assignString( 0 ); | 693 | yymsp[0].minor.yy0.assignString( 0 ); |
696 | yymsp[0].minor.yy0.release_this(); } | 694 | yymsp[0].minor.yy0.release_this(); } |
697 | #line 699 "bitbakeparser.c" | 695 | #line 697 "bitbakeparser.c" |
698 | break; | 696 | break; |
699 | case 4: | 697 | case 4: |
700 | #line 64 "bitbakeparser.y" | 698 | #line 64 "bitbakeparser.y" |
@@ -702,7 +700,7 @@ static void yy_reduce( | |||
702 | yygotominor.yy0.assignString( (char*)yymsp[0].minor.yy0.string() ); | 700 | yygotominor.yy0.assignString( (char*)yymsp[0].minor.yy0.string() ); |
703 | yymsp[0].minor.yy0.assignString( 0 ); | 701 | yymsp[0].minor.yy0.assignString( 0 ); |
704 | yymsp[0].minor.yy0.release_this(); } | 702 | yymsp[0].minor.yy0.release_this(); } |
705 | #line 707 "bitbakeparser.c" | 703 | #line 705 "bitbakeparser.c" |
706 | break; | 704 | break; |
707 | case 5: | 705 | case 5: |
708 | #line 70 "bitbakeparser.y" | 706 | #line 70 "bitbakeparser.y" |
@@ -711,7 +709,7 @@ static void yy_reduce( | |||
711 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); | 709 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); |
712 | yy_destructor(4,&yymsp[-1].minor); | 710 | yy_destructor(4,&yymsp[-1].minor); |
713 | } | 711 | } |
714 | #line 716 "bitbakeparser.c" | 712 | #line 714 "bitbakeparser.c" |
715 | break; | 713 | break; |
716 | case 6: | 714 | case 6: |
717 | #line 74 "bitbakeparser.y" | 715 | #line 74 "bitbakeparser.y" |
@@ -720,7 +718,7 @@ static void yy_reduce( | |||
720 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); | 718 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); |
721 | yy_destructor(6,&yymsp[-1].minor); | 719 | yy_destructor(6,&yymsp[-1].minor); |
722 | } | 720 | } |
723 | #line 725 "bitbakeparser.c" | 721 | #line 723 "bitbakeparser.c" |
724 | break; | 722 | break; |
725 | case 7: | 723 | case 7: |
726 | #line 78 "bitbakeparser.y" | 724 | #line 78 "bitbakeparser.y" |
@@ -729,7 +727,7 @@ static void yy_reduce( | |||
729 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); | 727 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); |
730 | yy_destructor(7,&yymsp[-1].minor); | 728 | yy_destructor(7,&yymsp[-1].minor); |
731 | } | 729 | } |
732 | #line 734 "bitbakeparser.c" | 730 | #line 732 "bitbakeparser.c" |
733 | break; | 731 | break; |
734 | case 8: | 732 | case 8: |
735 | #line 82 "bitbakeparser.y" | 733 | #line 82 "bitbakeparser.y" |
@@ -738,7 +736,7 @@ static void yy_reduce( | |||
738 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); | 736 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); |
739 | yy_destructor(8,&yymsp[-1].minor); | 737 | yy_destructor(8,&yymsp[-1].minor); |
740 | } | 738 | } |
741 | #line 743 "bitbakeparser.c" | 739 | #line 741 "bitbakeparser.c" |
742 | break; | 740 | break; |
743 | case 9: | 741 | case 9: |
744 | #line 86 "bitbakeparser.y" | 742 | #line 86 "bitbakeparser.y" |
@@ -746,56 +744,56 @@ static void yy_reduce( | |||
746 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); | 744 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor); |
747 | yy_destructor(9,&yymsp[-1].minor); | 745 | yy_destructor(9,&yymsp[-1].minor); |
748 | } | 746 | } |
749 | #line 751 "bitbakeparser.c" | 747 | #line 749 "bitbakeparser.c" |
750 | break; | 748 | break; |
751 | case 10: | 749 | case 10: |
752 | #line 90 "bitbakeparser.y" | 750 | #line 90 "bitbakeparser.y" |
753 | { e_assign( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); | 751 | { e_assign( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); |
754 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(4,&yymsp[-1].minor); | 752 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(4,&yymsp[-1].minor); |
755 | } | 753 | } |
756 | #line 758 "bitbakeparser.c" | 754 | #line 756 "bitbakeparser.c" |
757 | break; | 755 | break; |
758 | case 11: | 756 | case 11: |
759 | #line 93 "bitbakeparser.y" | 757 | #line 93 "bitbakeparser.y" |
760 | { e_precat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); | 758 | { e_precat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); |
761 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(6,&yymsp[-1].minor); | 759 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(6,&yymsp[-1].minor); |
762 | } | 760 | } |
763 | #line 765 "bitbakeparser.c" | 761 | #line 763 "bitbakeparser.c" |
764 | break; | 762 | break; |
765 | case 12: | 763 | case 12: |
766 | #line 96 "bitbakeparser.y" | 764 | #line 96 "bitbakeparser.y" |
767 | { e_postcat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); | 765 | { e_postcat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); |
768 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(7,&yymsp[-1].minor); | 766 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(7,&yymsp[-1].minor); |
769 | } | 767 | } |
770 | #line 772 "bitbakeparser.c" | 768 | #line 770 "bitbakeparser.c" |
771 | break; | 769 | break; |
772 | case 13: | 770 | case 13: |
773 | #line 99 "bitbakeparser.y" | 771 | #line 99 "bitbakeparser.y" |
774 | { e_prepend( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); | 772 | { e_prepend( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); |
775 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(10,&yymsp[-1].minor); | 773 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(10,&yymsp[-1].minor); |
776 | } | 774 | } |
777 | #line 779 "bitbakeparser.c" | 775 | #line 777 "bitbakeparser.c" |
778 | break; | 776 | break; |
779 | case 14: | 777 | case 14: |
780 | #line 102 "bitbakeparser.y" | 778 | #line 102 "bitbakeparser.y" |
781 | { e_append( lex, yymsp[-2].minor.yy0.string() , yymsp[0].minor.yy0.string() ); | 779 | { e_append( lex, yymsp[-2].minor.yy0.string() , yymsp[0].minor.yy0.string() ); |
782 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(11,&yymsp[-1].minor); | 780 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(11,&yymsp[-1].minor); |
783 | } | 781 | } |
784 | #line 786 "bitbakeparser.c" | 782 | #line 784 "bitbakeparser.c" |
785 | break; | 783 | break; |
786 | case 15: | 784 | case 15: |
787 | #line 105 "bitbakeparser.y" | 785 | #line 105 "bitbakeparser.y" |
788 | { e_immediate( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); | 786 | { e_immediate( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); |
789 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(8,&yymsp[-1].minor); | 787 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(8,&yymsp[-1].minor); |
790 | } | 788 | } |
791 | #line 793 "bitbakeparser.c" | 789 | #line 791 "bitbakeparser.c" |
792 | break; | 790 | break; |
793 | case 16: | 791 | case 16: |
794 | #line 108 "bitbakeparser.y" | 792 | #line 108 "bitbakeparser.y" |
795 | { e_cond( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); | 793 | { e_cond( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() ); |
796 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(9,&yymsp[-1].minor); | 794 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(9,&yymsp[-1].minor); |
797 | } | 795 | } |
798 | #line 800 "bitbakeparser.c" | 796 | #line 798 "bitbakeparser.c" |
799 | break; | 797 | break; |
800 | case 17: | 798 | case 17: |
801 | #line 112 "bitbakeparser.y" | 799 | #line 112 "bitbakeparser.y" |
@@ -803,7 +801,7 @@ static void yy_reduce( | |||
803 | yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(13,&yymsp[-3].minor); | 801 | yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(13,&yymsp[-3].minor); |
804 | yy_destructor(14,&yymsp[-1].minor); | 802 | yy_destructor(14,&yymsp[-1].minor); |
805 | } | 803 | } |
806 | #line 808 "bitbakeparser.c" | 804 | #line 806 "bitbakeparser.c" |
807 | break; | 805 | break; |
808 | case 18: | 806 | case 18: |
809 | #line 115 "bitbakeparser.y" | 807 | #line 115 "bitbakeparser.y" |
@@ -811,55 +809,55 @@ static void yy_reduce( | |||
811 | yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(14,&yymsp[-3].minor); | 809 | yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(14,&yymsp[-3].minor); |
812 | yy_destructor(13,&yymsp[-1].minor); | 810 | yy_destructor(13,&yymsp[-1].minor); |
813 | } | 811 | } |
814 | #line 816 "bitbakeparser.c" | 812 | #line 814 "bitbakeparser.c" |
815 | break; | 813 | break; |
816 | case 19: | 814 | case 19: |
817 | #line 118 "bitbakeparser.y" | 815 | #line 118 "bitbakeparser.y" |
818 | { e_addtask( lex, yymsp[0].minor.yy0.string(), NULL, NULL); | 816 | { e_addtask( lex, yymsp[0].minor.yy0.string(), NULL, NULL); |
819 | yymsp[0].minor.yy0.release_this();} | 817 | yymsp[0].minor.yy0.release_this();} |
820 | #line 822 "bitbakeparser.c" | 818 | #line 820 "bitbakeparser.c" |
821 | break; | 819 | break; |
822 | case 20: | 820 | case 20: |
823 | #line 121 "bitbakeparser.y" | 821 | #line 121 "bitbakeparser.y" |
824 | { e_addtask( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string(), NULL); | 822 | { e_addtask( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string(), NULL); |
825 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(13,&yymsp[-1].minor); | 823 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(13,&yymsp[-1].minor); |
826 | } | 824 | } |
827 | #line 829 "bitbakeparser.c" | 825 | #line 827 "bitbakeparser.c" |
828 | break; | 826 | break; |
829 | case 21: | 827 | case 21: |
830 | #line 124 "bitbakeparser.y" | 828 | #line 124 "bitbakeparser.y" |
831 | { e_addtask( lex, yymsp[-2].minor.yy0.string(), NULL, yymsp[0].minor.yy0.string()); | 829 | { e_addtask( lex, yymsp[-2].minor.yy0.string(), NULL, yymsp[0].minor.yy0.string()); |
832 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(14,&yymsp[-1].minor); | 830 | yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(14,&yymsp[-1].minor); |
833 | } | 831 | } |
834 | #line 836 "bitbakeparser.c" | 832 | #line 834 "bitbakeparser.c" |
835 | break; | 833 | break; |
836 | case 25: | 834 | case 25: |
837 | #line 131 "bitbakeparser.y" | 835 | #line 131 "bitbakeparser.y" |
838 | { e_addhandler( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this (); yy_destructor(16,&yymsp[-1].minor); | 836 | { e_addhandler( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this (); yy_destructor(16,&yymsp[-1].minor); |
839 | } | 837 | } |
840 | #line 842 "bitbakeparser.c" | 838 | #line 840 "bitbakeparser.c" |
841 | break; | 839 | break; |
842 | case 26: | 840 | case 26: |
843 | #line 133 "bitbakeparser.y" | 841 | #line 133 "bitbakeparser.y" |
844 | { e_export_func( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this(); } | 842 | { e_export_func( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this(); } |
845 | #line 847 "bitbakeparser.c" | 843 | #line 845 "bitbakeparser.c" |
846 | break; | 844 | break; |
847 | case 30: | 845 | case 30: |
848 | #line 138 "bitbakeparser.y" | 846 | #line 138 "bitbakeparser.y" |
849 | { e_inherit( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this (); } | 847 | { e_inherit( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this (); } |
850 | #line 852 "bitbakeparser.c" | 848 | #line 850 "bitbakeparser.c" |
851 | break; | 849 | break; |
852 | case 34: | 850 | case 34: |
853 | #line 144 "bitbakeparser.y" | 851 | #line 144 "bitbakeparser.y" |
854 | { e_include( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(21,&yymsp[-1].minor); | 852 | { e_include( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(21,&yymsp[-1].minor); |
855 | } | 853 | } |
856 | #line 858 "bitbakeparser.c" | 854 | #line 856 "bitbakeparser.c" |
857 | break; | 855 | break; |
858 | case 35: | 856 | case 35: |
859 | #line 147 "bitbakeparser.y" | 857 | #line 147 "bitbakeparser.y" |
860 | { e_require( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(22,&yymsp[-1].minor); | 858 | { e_require( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(22,&yymsp[-1].minor); |
861 | } | 859 | } |
862 | #line 864 "bitbakeparser.c" | 860 | #line 862 "bitbakeparser.c" |
863 | break; | 861 | break; |
864 | case 36: | 862 | case 36: |
865 | #line 150 "bitbakeparser.y" | 863 | #line 150 "bitbakeparser.y" |
@@ -868,12 +866,12 @@ static void yy_reduce( | |||
868 | yymsp[-1].minor.yy0.release_this (); | 866 | yymsp[-1].minor.yy0.release_this (); |
869 | yymsp[0].minor.yy0.release_this (); | 867 | yymsp[0].minor.yy0.release_this (); |
870 | } | 868 | } |
871 | #line 873 "bitbakeparser.c" | 869 | #line 871 "bitbakeparser.c" |
872 | break; | 870 | break; |
873 | case 37: | 871 | case 37: |
874 | #line 155 "bitbakeparser.y" | 872 | #line 155 "bitbakeparser.y" |
875 | { yygotominor.yy0.assignString(0); } | 873 | { yygotominor.yy0.assignString(0); } |
876 | #line 878 "bitbakeparser.c" | 874 | #line 876 "bitbakeparser.c" |
877 | break; | 875 | break; |
878 | case 38: | 876 | case 38: |
879 | #line 157 "bitbakeparser.y" | 877 | #line 157 "bitbakeparser.y" |
@@ -881,7 +879,7 @@ static void yy_reduce( | |||
881 | yymsp[-3].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yy_destructor(24,&yymsp[-2].minor); | 879 | yymsp[-3].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yy_destructor(24,&yymsp[-2].minor); |
882 | yy_destructor(25,&yymsp[0].minor); | 880 | yy_destructor(25,&yymsp[0].minor); |
883 | } | 881 | } |
884 | #line 886 "bitbakeparser.c" | 882 | #line 884 "bitbakeparser.c" |
885 | break; | 883 | break; |
886 | case 39: | 884 | case 39: |
887 | #line 160 "bitbakeparser.y" | 885 | #line 160 "bitbakeparser.y" |
@@ -890,7 +888,7 @@ static void yy_reduce( | |||
890 | yy_destructor(24,&yymsp[-2].minor); | 888 | yy_destructor(24,&yymsp[-2].minor); |
891 | yy_destructor(25,&yymsp[0].minor); | 889 | yy_destructor(25,&yymsp[0].minor); |
892 | } | 890 | } |
893 | #line 895 "bitbakeparser.c" | 891 | #line 893 "bitbakeparser.c" |
894 | break; | 892 | break; |
895 | case 40: | 893 | case 40: |
896 | #line 163 "bitbakeparser.y" | 894 | #line 163 "bitbakeparser.y" |
@@ -899,7 +897,7 @@ static void yy_reduce( | |||
899 | yy_destructor(24,&yymsp[-2].minor); | 897 | yy_destructor(24,&yymsp[-2].minor); |
900 | yy_destructor(25,&yymsp[0].minor); | 898 | yy_destructor(25,&yymsp[0].minor); |
901 | } | 899 | } |
902 | #line 904 "bitbakeparser.c" | 900 | #line 902 "bitbakeparser.c" |
903 | break; | 901 | break; |
904 | case 41: | 902 | case 41: |
905 | #line 167 "bitbakeparser.y" | 903 | #line 167 "bitbakeparser.y" |
@@ -908,7 +906,7 @@ static void yy_reduce( | |||
908 | yy_destructor(24,&yymsp[-2].minor); | 906 | yy_destructor(24,&yymsp[-2].minor); |
909 | yy_destructor(25,&yymsp[0].minor); | 907 | yy_destructor(25,&yymsp[0].minor); |
910 | } | 908 | } |
911 | #line 913 "bitbakeparser.c" | 909 | #line 911 "bitbakeparser.c" |
912 | break; | 910 | break; |
913 | case 42: | 911 | case 42: |
914 | #line 171 "bitbakeparser.y" | 912 | #line 171 "bitbakeparser.y" |
@@ -916,18 +914,18 @@ static void yy_reduce( | |||
916 | yygotominor.yy0.assignString( token_t::concatString(yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string()) ); | 914 | yygotominor.yy0.assignString( token_t::concatString(yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string()) ); |
917 | yymsp[-1].minor.yy0.release_this (); yymsp[0].minor.yy0.release_this (); | 915 | yymsp[-1].minor.yy0.release_this (); yymsp[0].minor.yy0.release_this (); |
918 | } | 916 | } |
919 | #line 921 "bitbakeparser.c" | 917 | #line 919 "bitbakeparser.c" |
920 | break; | 918 | break; |
921 | case 43: | 919 | case 43: |
922 | #line 175 "bitbakeparser.y" | 920 | #line 175 "bitbakeparser.y" |
923 | { yygotominor.yy0.assignString( 0 ); } | 921 | { yygotominor.yy0.assignString( 0 ); } |
924 | #line 926 "bitbakeparser.c" | 922 | #line 924 "bitbakeparser.c" |
925 | break; | 923 | break; |
926 | case 44: | 924 | case 44: |
927 | #line 177 "bitbakeparser.y" | 925 | #line 177 "bitbakeparser.y" |
928 | { e_def( lex, yymsp[-2].minor.yy0.string(), yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string()); | 926 | { e_def( lex, yymsp[-2].minor.yy0.string(), yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string()); |
929 | yymsp[-2].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); } | 927 | yymsp[-2].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); } |
930 | #line 932 "bitbakeparser.c" | 928 | #line 930 "bitbakeparser.c" |
931 | break; | 929 | break; |
932 | }; | 930 | }; |
933 | yygoto = yyRuleInfo[yyruleno].lhs; | 931 | yygoto = yyRuleInfo[yyruleno].lhs; |
@@ -986,7 +984,7 @@ static void yy_syntax_error( | |||
986 | #define TOKEN (yyminor.yy0) | 984 | #define TOKEN (yyminor.yy0) |
987 | #line 52 "bitbakeparser.y" | 985 | #line 52 "bitbakeparser.y" |
988 | e_parse_error( lex ); | 986 | e_parse_error( lex ); |
989 | #line 992 "bitbakeparser.c" | 987 | #line 990 "bitbakeparser.c" |
990 | bbparseARG_STORE; /* Suppress warning about unused %extra_argument variable */ | 988 | bbparseARG_STORE; /* Suppress warning about unused %extra_argument variable */ |
991 | } | 989 | } |
992 | 990 | ||
@@ -1042,7 +1040,7 @@ void bbparse( | |||
1042 | /* (re)initialize the parser, if necessary */ | 1040 | /* (re)initialize the parser, if necessary */ |
1043 | yypParser = (yyParser*)yyp; | 1041 | yypParser = (yyParser*)yyp; |
1044 | if( yypParser->yyidx<0 ){ | 1042 | if( yypParser->yyidx<0 ){ |
1045 | /* if( yymajor==0 ) return; // not sure why this was here... */ | 1043 | if( yymajor==0 ) return; |
1046 | yypParser->yyidx = 0; | 1044 | yypParser->yyidx = 0; |
1047 | yypParser->yyerrcnt = -1; | 1045 | yypParser->yyerrcnt = -1; |
1048 | yypParser->yystack[0].stateno = 0; | 1046 | yypParser->yystack[0].stateno = 0; |
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakescanner.cc b/bitbake/lib/bb/parse/parse_c/bitbakescanner.cc index 43dad12d39..acc13f7c34 100644 --- a/bitbake/lib/bb/parse/parse_c/bitbakescanner.cc +++ b/bitbake/lib/bb/parse/parse_c/bitbakescanner.cc | |||
@@ -8,7 +8,7 @@ | |||
8 | #define FLEX_SCANNER | 8 | #define FLEX_SCANNER |
9 | #define YY_FLEX_MAJOR_VERSION 2 | 9 | #define YY_FLEX_MAJOR_VERSION 2 |
10 | #define YY_FLEX_MINOR_VERSION 5 | 10 | #define YY_FLEX_MINOR_VERSION 5 |
11 | #define YY_FLEX_SUBMINOR_VERSION 31 | 11 | #define YY_FLEX_SUBMINOR_VERSION 33 |
12 | #if YY_FLEX_SUBMINOR_VERSION > 0 | 12 | #if YY_FLEX_SUBMINOR_VERSION > 0 |
13 | #define FLEX_BETA | 13 | #define FLEX_BETA |
14 | #endif | 14 | #endif |
@@ -30,7 +30,15 @@ | |||
30 | 30 | ||
31 | /* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */ | 31 | /* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */ |
32 | 32 | ||
33 | #if defined __STDC_VERSION__ && __STDC_VERSION__ >= 199901L | 33 | #if __STDC_VERSION__ >= 199901L |
34 | |||
35 | /* C99 says to define __STDC_LIMIT_MACROS before including stdint.h, | ||
36 | * if you want the limit (max/min) macros for int types. | ||
37 | */ | ||
38 | #ifndef __STDC_LIMIT_MACROS | ||
39 | #define __STDC_LIMIT_MACROS 1 | ||
40 | #endif | ||
41 | |||
34 | #include <inttypes.h> | 42 | #include <inttypes.h> |
35 | typedef int8_t flex_int8_t; | 43 | typedef int8_t flex_int8_t; |
36 | typedef uint8_t flex_uint8_t; | 44 | typedef uint8_t flex_uint8_t; |
@@ -153,6 +161,10 @@ int yylex_init (yyscan_t* scanner); | |||
153 | #define YY_BUF_SIZE 16384 | 161 | #define YY_BUF_SIZE 16384 |
154 | #endif | 162 | #endif |
155 | 163 | ||
164 | /* The state buf must be large enough to hold one state per character in the main buffer. | ||
165 | */ | ||
166 | #define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type)) | ||
167 | |||
156 | #ifndef YY_TYPEDEF_YY_BUFFER_STATE | 168 | #ifndef YY_TYPEDEF_YY_BUFFER_STATE |
157 | #define YY_TYPEDEF_YY_BUFFER_STATE | 169 | #define YY_TYPEDEF_YY_BUFFER_STATE |
158 | typedef struct yy_buffer_state *YY_BUFFER_STATE; | 170 | typedef struct yy_buffer_state *YY_BUFFER_STATE; |
@@ -493,7 +505,7 @@ static yyconst flex_int32_t yy_ec[256] = | |||
493 | static yyconst flex_int32_t yy_meta[59] = | 505 | static yyconst flex_int32_t yy_meta[59] = |
494 | { 0, | 506 | { 0, |
495 | 1, 1, 2, 3, 1, 1, 4, 1, 1, 1, | 507 | 1, 1, 2, 3, 1, 1, 4, 1, 1, 1, |
496 | 5, 6, 5, 5, 7, 8, 1, 7, 1, 9, | 508 | 5, 6, 5, 5, 5, 7, 1, 8, 1, 9, |
497 | 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, | 509 | 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, |
498 | 9, 9, 10, 1, 11, 9, 9, 9, 9, 9, | 510 | 9, 9, 10, 1, 11, 9, 9, 9, 9, 9, |
499 | 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, | 511 | 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, |
@@ -565,18 +577,18 @@ static yyconst flex_int16_t yy_base[847] = | |||
565 | 2077, 2072, 2066, 2069, 2056, 2067, 1398, 1343, 1408, 1404, | 577 | 2077, 2072, 2066, 2069, 2056, 2067, 1398, 1343, 1408, 1404, |
566 | 643, 1409, 2071, 2066, 2060, 2063, 2050, 2061, 2065, 2060, | 578 | 643, 1409, 2071, 2066, 2060, 2063, 2050, 2061, 2065, 2060, |
567 | 2054, 2057, 2044, 2055, 1420, 1445, 1413, 1447, 1453, 1454, | 579 | 2054, 2057, 2044, 2055, 1420, 1445, 1413, 1447, 1453, 1454, |
568 | 2059, 2054, 2047, 2050, 2035, 2043, 1455, 1459, 1460, 1461, | 580 | 2059, 2053, 2047, 2049, 2032, 2043, 1455, 1459, 1460, 1461, |
569 | 1462, 1463, 1471, 1436, 1430, 1192, 1433, 1479, 1482, 1492, | 581 | 1462, 1463, 1471, 1436, 1430, 1192, 1433, 1479, 1482, 1492, |
570 | 582 | ||
571 | 1506, 1519, 1520, 1528, 2047, 2040, 2031, 0, 2034, 2019, | 583 | 1506, 1519, 1520, 1528, 2046, 2037, 2031, 0, 2033, 2016, |
572 | 2027, 1486, 1496, 1505, 1506, 1510, 1516, 1524, 2044, 2018, | 584 | 2027, 1486, 1496, 1505, 1506, 1510, 1516, 1524, 2043, 2015, |
573 | 0, 0, 0, 0, 1281, 1517, 2043, 2042, 2039, 2035, | 585 | 0, 0, 0, 0, 1281, 1517, 2043, 2041, 2036, 2034, |
574 | 2023, 1994, 2309, 2309, 2309, 2309, 2005, 1981, 0, 0, | 586 | 2024, 1995, 2309, 2309, 2309, 2309, 2005, 1981, 0, 0, |
575 | 0, 0, 1538, 1528, 1530, 1534, 1537, 1540, 1981, 1957, | 587 | 0, 0, 1538, 1528, 1530, 1534, 1537, 1540, 1981, 1957, |
576 | 0, 0, 0, 0, 1557, 1558, 1559, 1560, 1561, 1563, | 588 | 0, 0, 0, 0, 1557, 1558, 1559, 1560, 1561, 1563, |
577 | 1568, 1547, 1988, 1959, 1954, 1948, 1580, 1581, 1582, 1590, | 589 | 1568, 1547, 1988, 1959, 1955, 1948, 1580, 1581, 1582, 1590, |
578 | 1592, 1594, 1923, 1863, 0, 0, 0, 0, 1598, 1599, | 590 | 1592, 1594, 1924, 1863, 0, 0, 0, 0, 1598, 1599, |
579 | 1600, 1874, 1858, 1350, 1584, 1803, 1792, 1801, 1790, 1603, | 591 | 1600, 1875, 1859, 1350, 1584, 1803, 1792, 1801, 1790, 1603, |
580 | 1601, 1799, 1788, 1604, 1602, 1610, 1609, 1643, 1644, 1797, | 592 | 1601, 1799, 1788, 1604, 1602, 1610, 1609, 1643, 1644, 1797, |
581 | 593 | ||
582 | 1786, 1611, 1630, 1800, 1773, 1010, 1606, 1798, 1771, 1795, | 594 | 1786, 1611, 1630, 1800, 1773, 1010, 1606, 1798, 1771, 1795, |
@@ -593,8 +605,8 @@ static yyconst flex_int16_t yy_base[847] = | |||
593 | 1768, 0, 742, 2309, 0, 1764, 0, 1778, 678, 1801, | 605 | 1768, 0, 742, 2309, 0, 1764, 0, 1778, 678, 1801, |
594 | 0, 2309, 1835, 1847, 1859, 1871, 1883, 550, 1892, 1898, | 606 | 0, 2309, 1835, 1847, 1859, 1871, 1883, 550, 1892, 1898, |
595 | 1907, 1919, 1931, 1939, 1945, 1950, 1956, 1965, 1977, 1989, | 607 | 1907, 1919, 1931, 1939, 1945, 1950, 1956, 1965, 1977, 1989, |
596 | 2001, 2013, 2025, 2033, 2039, 2042, 306, 304, 301, 2049, | 608 | 2001, 2013, 2025, 2033, 2039, 2043, 306, 304, 301, 2050, |
597 | 213, 2057, 136, 2065, 2073, 2081 | 609 | 213, 2058, 136, 2066, 2074, 2082 |
598 | } ; | 610 | } ; |
599 | 611 | ||
600 | static yyconst flex_int16_t yy_def[847] = | 612 | static yyconst flex_int16_t yy_def[847] = |
@@ -903,14 +915,14 @@ static yyconst flex_int16_t yy_nxt[2368] = | |||
903 | 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, | 915 | 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, |
904 | 112, 128, 128, 128, 128, 128, 128, 128, 128, 128, | 916 | 112, 128, 128, 128, 128, 128, 128, 128, 128, 128, |
905 | 128, 128, 128, 155, 155, 155, 155, 155, 155, 155, | 917 | 128, 128, 128, 155, 155, 155, 155, 155, 155, 155, |
906 | 155, 155, 155, 155, 155, 167, 167, 167, 705, 167, | 918 | 155, 155, 155, 155, 155, 167, 167, 167, 167, 705, |
907 | 919 | ||
908 | 167, 167, 177, 177, 704, 177, 177, 183, 701, 183, | 920 | 167, 167, 177, 177, 177, 704, 177, 183, 701, 183, |
909 | 183, 183, 183, 183, 183, 183, 183, 183, 183, 187, | 921 | 183, 183, 183, 183, 183, 183, 183, 183, 183, 187, |
910 | 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, | 922 | 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, |
911 | 187, 201, 201, 201, 201, 201, 201, 201, 201, 201, | 923 | 187, 201, 201, 201, 201, 201, 201, 201, 201, 201, |
912 | 201, 201, 201, 209, 209, 700, 209, 209, 217, 217, | 924 | 201, 201, 201, 209, 209, 209, 700, 209, 217, 217, |
913 | 238, 217, 217, 217, 223, 223, 238, 223, 223, 231, | 925 | 238, 217, 217, 217, 223, 223, 223, 238, 223, 231, |
914 | 231, 238, 231, 231, 231, 237, 237, 237, 237, 237, | 926 | 231, 238, 231, 231, 231, 237, 237, 237, 237, 237, |
915 | 237, 237, 237, 237, 237, 237, 237, 239, 239, 239, | 927 | 237, 237, 237, 237, 237, 237, 237, 239, 239, 239, |
916 | 239, 239, 239, 239, 239, 239, 239, 239, 239, 256, | 928 | 239, 239, 239, 239, 239, 239, 239, 239, 239, 256, |
@@ -919,13 +931,13 @@ static yyconst flex_int16_t yy_nxt[2368] = | |||
919 | 256, 261, 693, 692, 261, 261, 261, 261, 261, 261, | 931 | 256, 261, 693, 692, 261, 261, 261, 261, 261, 261, |
920 | 261, 261, 261, 264, 264, 264, 264, 264, 264, 264, | 932 | 261, 261, 261, 264, 264, 264, 264, 264, 264, 264, |
921 | 264, 264, 264, 264, 264, 267, 689, 688, 267, 267, | 933 | 264, 264, 264, 264, 264, 267, 689, 688, 267, 267, |
922 | 267, 267, 267, 267, 267, 267, 267, 284, 284, 687, | 934 | 267, 267, 267, 267, 267, 267, 267, 284, 284, 284, |
923 | 284, 284, 292, 292, 292, 686, 292, 292, 292, 296, | 935 | 687, 284, 292, 292, 292, 292, 686, 292, 292, 296, |
924 | 296, 184, 296, 418, 418, 184, 418, 418, 184, 184, | 936 | 184, 296, 184, 296, 418, 418, 418, 184, 418, 184, |
925 | 418, 433, 433, 683, 433, 433, 682, 678, 433, 465, | 937 | 683, 418, 433, 433, 433, 682, 433, 678, 677, 433, |
926 | 465, 677, 465, 465, 676, 675, 465, 500, 500, 674, | 938 | 465, 465, 465, 676, 465, 675, 674, 465, 500, 500, |
927 | 500, 500, 673, 654, 500, 514, 514, 653, 514, 514, | 939 | 500, 673, 500, 654, 653, 500, 514, 514, 514, 652, |
928 | 652, 651, 514, 650, 649, 642, 641, 640, 639, 638, | 940 | 514, 651, 650, 514, 649, 642, 641, 640, 639, 638, |
929 | 941 | ||
930 | 637, 636, 635, 634, 633, 632, 631, 624, 623, 622, | 942 | 637, 636, 635, 634, 633, 632, 631, 624, 623, 622, |
931 | 621, 620, 619, 611, 610, 609, 608, 607, 606, 605, | 943 | 621, 620, 619, 611, 610, 609, 608, 607, 606, 605, |
@@ -1167,14 +1179,14 @@ static yyconst flex_int16_t yy_chk[2368] = | |||
1167 | 815, 815, 815, 815, 815, 815, 815, 815, 815, 815, | 1179 | 815, 815, 815, 815, 815, 815, 815, 815, 815, 815, |
1168 | 815, 816, 816, 816, 816, 816, 816, 816, 816, 816, | 1180 | 815, 816, 816, 816, 816, 816, 816, 816, 816, 816, |
1169 | 816, 816, 816, 817, 817, 817, 817, 817, 817, 817, | 1181 | 816, 816, 816, 817, 817, 817, 817, 817, 817, 817, |
1170 | 817, 817, 817, 817, 817, 819, 819, 819, 683, 819, | 1182 | 817, 817, 817, 817, 817, 819, 819, 819, 819, 683, |
1171 | 1183 | ||
1172 | 819, 819, 820, 820, 682, 820, 820, 821, 674, 821, | 1184 | 819, 819, 820, 820, 820, 682, 820, 821, 674, 821, |
1173 | 821, 821, 821, 821, 821, 821, 821, 821, 821, 822, | 1185 | 821, 821, 821, 821, 821, 821, 821, 821, 821, 822, |
1174 | 822, 822, 822, 822, 822, 822, 822, 822, 822, 822, | 1186 | 822, 822, 822, 822, 822, 822, 822, 822, 822, 822, |
1175 | 822, 823, 823, 823, 823, 823, 823, 823, 823, 823, | 1187 | 822, 823, 823, 823, 823, 823, 823, 823, 823, 823, |
1176 | 823, 823, 823, 824, 824, 673, 824, 824, 825, 825, | 1188 | 823, 823, 823, 824, 824, 824, 673, 824, 825, 825, |
1177 | 666, 825, 825, 825, 826, 826, 665, 826, 826, 827, | 1189 | 666, 825, 825, 825, 826, 826, 826, 665, 826, 827, |
1178 | 827, 664, 827, 827, 827, 828, 828, 828, 828, 828, | 1190 | 827, 664, 827, 827, 827, 828, 828, 828, 828, 828, |
1179 | 828, 828, 828, 828, 828, 828, 828, 829, 829, 829, | 1191 | 828, 828, 828, 828, 828, 828, 828, 829, 829, 829, |
1180 | 829, 829, 829, 829, 829, 829, 829, 829, 829, 830, | 1192 | 829, 829, 829, 829, 829, 829, 829, 829, 829, 830, |
@@ -1183,13 +1195,13 @@ static yyconst flex_int16_t yy_chk[2368] = | |||
1183 | 830, 831, 650, 649, 831, 831, 831, 831, 831, 831, | 1195 | 830, 831, 650, 649, 831, 831, 831, 831, 831, 831, |
1184 | 831, 831, 831, 832, 832, 832, 832, 832, 832, 832, | 1196 | 831, 831, 831, 832, 832, 832, 832, 832, 832, 832, |
1185 | 832, 832, 832, 832, 832, 833, 638, 637, 833, 833, | 1197 | 832, 832, 832, 832, 832, 833, 638, 637, 833, 833, |
1186 | 833, 833, 833, 833, 833, 833, 833, 834, 834, 632, | 1198 | 833, 833, 833, 833, 833, 833, 833, 834, 834, 834, |
1187 | 834, 834, 835, 835, 835, 631, 835, 835, 835, 836, | 1199 | 632, 834, 835, 835, 835, 835, 631, 835, 835, 836, |
1188 | 836, 630, 836, 840, 840, 629, 840, 840, 628, 627, | 1200 | 630, 836, 629, 836, 840, 840, 840, 628, 840, 627, |
1189 | 840, 842, 842, 620, 842, 842, 619, 611, 842, 844, | 1201 | 620, 840, 842, 842, 842, 619, 842, 611, 610, 842, |
1190 | 844, 610, 844, 844, 609, 607, 844, 845, 845, 606, | 1202 | 844, 844, 844, 609, 844, 607, 606, 844, 845, 845, |
1191 | 845, 845, 605, 586, 845, 846, 846, 585, 846, 846, | 1203 | 845, 605, 845, 586, 585, 845, 846, 846, 846, 584, |
1192 | 584, 583, 846, 582, 581, 574, 573, 572, 571, 570, | 1204 | 846, 583, 582, 846, 581, 574, 573, 572, 571, 570, |
1193 | 1205 | ||
1194 | 569, 568, 567, 566, 565, 564, 563, 556, 555, 554, | 1206 | 569, 568, 567, 566, 565, 564, 563, 556, 555, 554, |
1195 | 553, 552, 551, 541, 540, 539, 538, 536, 535, 534, | 1207 | 553, 552, 551, 541, 540, 539, 538, 536, 535, 534, |
@@ -1323,7 +1335,7 @@ int errorParse; | |||
1323 | enum { | 1335 | enum { |
1324 | errorNone = 0, | 1336 | errorNone = 0, |
1325 | errorUnexpectedInput, | 1337 | errorUnexpectedInput, |
1326 | errorUnsupportedFeature, | 1338 | errorUnsupportedFeature, |
1327 | }; | 1339 | }; |
1328 | 1340 | ||
1329 | } | 1341 | } |
@@ -1351,7 +1363,7 @@ static const char* fixup_escapes (const char* sz); | |||
1351 | 1363 | ||
1352 | 1364 | ||
1353 | 1365 | ||
1354 | #line 1355 "<stdout>" | 1366 | #line 1367 "<stdout>" |
1355 | 1367 | ||
1356 | #define INITIAL 0 | 1368 | #define INITIAL 0 |
1357 | #define S_DEF 1 | 1369 | #define S_DEF 1 |
@@ -1587,11 +1599,11 @@ YY_DECL | |||
1587 | #line 164 "bitbakescanner.l" | 1599 | #line 164 "bitbakescanner.l" |
1588 | 1600 | ||
1589 | 1601 | ||
1590 | #line 1591 "<stdout>" | 1602 | #line 1603 "<stdout>" |
1591 | 1603 | ||
1592 | if ( yyg->yy_init ) | 1604 | if ( !yyg->yy_init ) |
1593 | { | 1605 | { |
1594 | yyg->yy_init = 0; | 1606 | yyg->yy_init = 1; |
1595 | 1607 | ||
1596 | #ifdef YY_USER_INIT | 1608 | #ifdef YY_USER_INIT |
1597 | YY_USER_INIT; | 1609 | YY_USER_INIT; |
@@ -1972,7 +1984,7 @@ YY_RULE_SETUP | |||
1972 | #line 254 "bitbakescanner.l" | 1984 | #line 254 "bitbakescanner.l" |
1973 | ECHO; | 1985 | ECHO; |
1974 | YY_BREAK | 1986 | YY_BREAK |
1975 | #line 1976 "<stdout>" | 1987 | #line 1988 "<stdout>" |
1976 | 1988 | ||
1977 | case YY_END_OF_BUFFER: | 1989 | case YY_END_OF_BUFFER: |
1978 | { | 1990 | { |
@@ -2274,7 +2286,7 @@ static int yy_get_next_buffer (yyscan_t yyscanner) | |||
2274 | static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state , yyscan_t yyscanner) | 2286 | static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state , yyscan_t yyscanner) |
2275 | { | 2287 | { |
2276 | register int yy_is_jam; | 2288 | register int yy_is_jam; |
2277 | struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; | 2289 | struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* This var may be unused depending upon options. */ |
2278 | register char *yy_cp = yyg->yy_c_buf_p; | 2290 | register char *yy_cp = yyg->yy_c_buf_p; |
2279 | 2291 | ||
2280 | register YY_CHAR yy_c = 1; | 2292 | register YY_CHAR yy_c = 1; |
@@ -2730,10 +2742,10 @@ YY_BUFFER_STATE yy_scan_buffer (char * base, yy_size_t size , yyscan_t yyscann | |||
2730 | * @note If you want to scan bytes that may contain NUL values, then use | 2742 | * @note If you want to scan bytes that may contain NUL values, then use |
2731 | * yy_scan_bytes() instead. | 2743 | * yy_scan_bytes() instead. |
2732 | */ | 2744 | */ |
2733 | YY_BUFFER_STATE yy_scan_string (yyconst char * yy_str , yyscan_t yyscanner) | 2745 | YY_BUFFER_STATE yy_scan_string (yyconst char * yystr , yyscan_t yyscanner) |
2734 | { | 2746 | { |
2735 | 2747 | ||
2736 | return yy_scan_bytes(yy_str,strlen(yy_str) ,yyscanner); | 2748 | return yy_scan_bytes(yystr,strlen(yystr) ,yyscanner); |
2737 | } | 2749 | } |
2738 | 2750 | ||
2739 | /** Setup the input buffer state to scan the given bytes. The next call to yylex() will | 2751 | /** Setup the input buffer state to scan the given bytes. The next call to yylex() will |
@@ -2743,7 +2755,7 @@ YY_BUFFER_STATE yy_scan_string (yyconst char * yy_str , yyscan_t yyscanner) | |||
2743 | * @param yyscanner The scanner object. | 2755 | * @param yyscanner The scanner object. |
2744 | * @return the newly allocated buffer state object. | 2756 | * @return the newly allocated buffer state object. |
2745 | */ | 2757 | */ |
2746 | YY_BUFFER_STATE yy_scan_bytes (yyconst char * bytes, int len , yyscan_t yyscanner) | 2758 | YY_BUFFER_STATE yy_scan_bytes (yyconst char * yybytes, int _yybytes_len , yyscan_t yyscanner) |
2747 | { | 2759 | { |
2748 | YY_BUFFER_STATE b; | 2760 | YY_BUFFER_STATE b; |
2749 | char *buf; | 2761 | char *buf; |
@@ -2751,15 +2763,15 @@ YY_BUFFER_STATE yy_scan_bytes (yyconst char * bytes, int len , yyscan_t yyscan | |||
2751 | int i; | 2763 | int i; |
2752 | 2764 | ||
2753 | /* Get memory for full buffer, including space for trailing EOB's. */ | 2765 | /* Get memory for full buffer, including space for trailing EOB's. */ |
2754 | n = len + 2; | 2766 | n = _yybytes_len + 2; |
2755 | buf = (char *) yyalloc(n ,yyscanner ); | 2767 | buf = (char *) yyalloc(n ,yyscanner ); |
2756 | if ( ! buf ) | 2768 | if ( ! buf ) |
2757 | YY_FATAL_ERROR( "out of dynamic memory in yy_scan_bytes()" ); | 2769 | YY_FATAL_ERROR( "out of dynamic memory in yy_scan_bytes()" ); |
2758 | 2770 | ||
2759 | for ( i = 0; i < len; ++i ) | 2771 | for ( i = 0; i < _yybytes_len; ++i ) |
2760 | buf[i] = bytes[i]; | 2772 | buf[i] = yybytes[i]; |
2761 | 2773 | ||
2762 | buf[len] = buf[len+1] = YY_END_OF_BUFFER_CHAR; | 2774 | buf[_yybytes_len] = buf[_yybytes_len+1] = YY_END_OF_BUFFER_CHAR; |
2763 | 2775 | ||
2764 | b = yy_scan_buffer(buf,n ,yyscanner); | 2776 | b = yy_scan_buffer(buf,n ,yyscanner); |
2765 | if ( ! b ) | 2777 | if ( ! b ) |
@@ -2987,21 +2999,51 @@ void yyset_debug (int bdebug , yyscan_t yyscanner) | |||
2987 | 2999 | ||
2988 | /* Accessor methods for yylval and yylloc */ | 3000 | /* Accessor methods for yylval and yylloc */ |
2989 | 3001 | ||
3002 | /* User-visible API */ | ||
3003 | |||
3004 | /* yylex_init is special because it creates the scanner itself, so it is | ||
3005 | * the ONLY reentrant function that doesn't take the scanner as the last argument. | ||
3006 | * That's why we explicitly handle the declaration, instead of using our macros. | ||
3007 | */ | ||
3008 | |||
3009 | int yylex_init(yyscan_t* ptr_yy_globals) | ||
3010 | |||
3011 | { | ||
3012 | if (ptr_yy_globals == NULL){ | ||
3013 | errno = EINVAL; | ||
3014 | return 1; | ||
3015 | } | ||
3016 | |||
3017 | *ptr_yy_globals = (yyscan_t) yyalloc ( sizeof( struct yyguts_t ), NULL ); | ||
3018 | |||
3019 | if (*ptr_yy_globals == NULL){ | ||
3020 | errno = ENOMEM; | ||
3021 | return 1; | ||
3022 | } | ||
3023 | |||
3024 | /* By setting to 0xAA, we expose bugs in yy_init_globals. Leave at 0x00 for releases. */ | ||
3025 | memset(*ptr_yy_globals,0x00,sizeof(struct yyguts_t)); | ||
3026 | |||
3027 | return yy_init_globals ( *ptr_yy_globals ); | ||
3028 | } | ||
3029 | |||
2990 | static int yy_init_globals (yyscan_t yyscanner) | 3030 | static int yy_init_globals (yyscan_t yyscanner) |
2991 | { | 3031 | { |
2992 | struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; | 3032 | struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; |
2993 | /* Initialization is the same as for the non-reentrant scanner. | 3033 | /* Initialization is the same as for the non-reentrant scanner. |
2994 | This function is called once per scanner lifetime. */ | 3034 | * This function is called from yylex_destroy(), so don't allocate here. |
3035 | */ | ||
2995 | 3036 | ||
2996 | yyg->yy_buffer_stack = 0; | 3037 | yyg->yy_buffer_stack = 0; |
2997 | yyg->yy_buffer_stack_top = 0; | 3038 | yyg->yy_buffer_stack_top = 0; |
2998 | yyg->yy_buffer_stack_max = 0; | 3039 | yyg->yy_buffer_stack_max = 0; |
2999 | yyg->yy_c_buf_p = (char *) 0; | 3040 | yyg->yy_c_buf_p = (char *) 0; |
3000 | yyg->yy_init = 1; | 3041 | yyg->yy_init = 0; |
3001 | yyg->yy_start = 0; | 3042 | yyg->yy_start = 0; |
3043 | |||
3002 | yyg->yy_start_stack_ptr = 0; | 3044 | yyg->yy_start_stack_ptr = 0; |
3003 | yyg->yy_start_stack_depth = 0; | 3045 | yyg->yy_start_stack_depth = 0; |
3004 | yyg->yy_start_stack = (int *) 0; | 3046 | yyg->yy_start_stack = NULL; |
3005 | 3047 | ||
3006 | /* Defined in main.c */ | 3048 | /* Defined in main.c */ |
3007 | #ifdef YY_STDINIT | 3049 | #ifdef YY_STDINIT |
@@ -3018,33 +3060,6 @@ static int yy_init_globals (yyscan_t yyscanner) | |||
3018 | return 0; | 3060 | return 0; |
3019 | } | 3061 | } |
3020 | 3062 | ||
3021 | /* User-visible API */ | ||
3022 | |||
3023 | /* yylex_init is special because it creates the scanner itself, so it is | ||
3024 | * the ONLY reentrant function that doesn't take the scanner as the last argument. | ||
3025 | * That's why we explicitly handle the declaration, instead of using our macros. | ||
3026 | */ | ||
3027 | |||
3028 | int yylex_init(yyscan_t* ptr_yy_globals) | ||
3029 | |||
3030 | { | ||
3031 | if (ptr_yy_globals == NULL){ | ||
3032 | errno = EINVAL; | ||
3033 | return 1; | ||
3034 | } | ||
3035 | |||
3036 | *ptr_yy_globals = (yyscan_t) yyalloc ( sizeof( struct yyguts_t ), NULL ); | ||
3037 | |||
3038 | if (*ptr_yy_globals == NULL){ | ||
3039 | errno = ENOMEM; | ||
3040 | return 1; | ||
3041 | } | ||
3042 | |||
3043 | memset(*ptr_yy_globals,0,sizeof(struct yyguts_t)); | ||
3044 | |||
3045 | return yy_init_globals ( *ptr_yy_globals ); | ||
3046 | } | ||
3047 | |||
3048 | /* yylex_destroy is for both reentrant and non-reentrant scanners. */ | 3063 | /* yylex_destroy is for both reentrant and non-reentrant scanners. */ |
3049 | int yylex_destroy (yyscan_t yyscanner) | 3064 | int yylex_destroy (yyscan_t yyscanner) |
3050 | { | 3065 | { |
@@ -3065,8 +3080,13 @@ int yylex_destroy (yyscan_t yyscanner) | |||
3065 | yyfree(yyg->yy_start_stack ,yyscanner ); | 3080 | yyfree(yyg->yy_start_stack ,yyscanner ); |
3066 | yyg->yy_start_stack = NULL; | 3081 | yyg->yy_start_stack = NULL; |
3067 | 3082 | ||
3083 | /* Reset the globals. This is important in a non-reentrant scanner so the next time | ||
3084 | * yylex() is called, initialization will occur. */ | ||
3085 | yy_init_globals( yyscanner); | ||
3086 | |||
3068 | /* Destroy the main struct (reentrant only). */ | 3087 | /* Destroy the main struct (reentrant only). */ |
3069 | yyfree ( yyscanner , yyscanner ); | 3088 | yyfree ( yyscanner , yyscanner ); |
3089 | yyscanner = NULL; | ||
3070 | return 0; | 3090 | return 0; |
3071 | } | 3091 | } |
3072 | 3092 | ||
@@ -3078,7 +3098,6 @@ int yylex_destroy (yyscan_t yyscanner) | |||
3078 | static void yy_flex_strncpy (char* s1, yyconst char * s2, int n , yyscan_t yyscanner) | 3098 | static void yy_flex_strncpy (char* s1, yyconst char * s2, int n , yyscan_t yyscanner) |
3079 | { | 3099 | { |
3080 | register int i; | 3100 | register int i; |
3081 | struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; | ||
3082 | for ( i = 0; i < n; ++i ) | 3101 | for ( i = 0; i < n; ++i ) |
3083 | s1[i] = s2[i]; | 3102 | s1[i] = s2[i]; |
3084 | } | 3103 | } |
@@ -3088,7 +3107,6 @@ static void yy_flex_strncpy (char* s1, yyconst char * s2, int n , yyscan_t yysca | |||
3088 | static int yy_flex_strlen (yyconst char * s , yyscan_t yyscanner) | 3107 | static int yy_flex_strlen (yyconst char * s , yyscan_t yyscanner) |
3089 | { | 3108 | { |
3090 | register int n; | 3109 | register int n; |
3091 | struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; | ||
3092 | for ( n = 0; s[n]; ++n ) | 3110 | for ( n = 0; s[n]; ++n ) |
3093 | ; | 3111 | ; |
3094 | 3112 | ||
@@ -3120,18 +3138,6 @@ void yyfree (void * ptr , yyscan_t yyscanner) | |||
3120 | 3138 | ||
3121 | #define YYTABLES_NAME "yytables" | 3139 | #define YYTABLES_NAME "yytables" |
3122 | 3140 | ||
3123 | #undef YY_NEW_FILE | ||
3124 | #undef YY_FLUSH_BUFFER | ||
3125 | #undef yy_set_bol | ||
3126 | #undef yy_new_buffer | ||
3127 | #undef yy_set_interactive | ||
3128 | #undef yytext_ptr | ||
3129 | #undef YY_DO_BEFORE_ACTION | ||
3130 | |||
3131 | #ifdef YY_DECL_IS_OURS | ||
3132 | #undef YY_DECL_IS_OURS | ||
3133 | #undef YY_DECL | ||
3134 | #endif | ||
3135 | #line 254 "bitbakescanner.l" | 3141 | #line 254 "bitbakescanner.l" |
3136 | 3142 | ||
3137 | 3143 | ||
@@ -3148,47 +3154,49 @@ void lex_t::accept (int token, const char* sz) | |||
3148 | 3154 | ||
3149 | void lex_t::input (char *buf, int *result, int max_size) | 3155 | void lex_t::input (char *buf, int *result, int max_size) |
3150 | { | 3156 | { |
3151 | printf("lex_t::input %p %d\n", buf, max_size); | 3157 | /* printf("lex_t::input %p %d\n", buf, max_size); */ |
3152 | *result = fread(buf, 1, max_size, file); | 3158 | *result = fread(buf, 1, max_size, file); |
3153 | printf("lex_t::input result %d\n", *result); | 3159 | /* printf("lex_t::input result %d\n", *result); */ |
3154 | } | 3160 | } |
3155 | 3161 | ||
3156 | int lex_t::line ()const | 3162 | int lex_t::line ()const |
3157 | { | 3163 | { |
3158 | printf("lex_t::line\n"); | 3164 | /* printf("lex_t::line\n"); */ |
3159 | return yyget_lineno (scanner); | 3165 | return yyget_lineno (scanner); |
3160 | } | 3166 | } |
3161 | 3167 | ||
3162 | 3168 | ||
3163 | extern "C" { | 3169 | extern "C" { |
3164 | 3170 | ||
3165 | void parse (FILE* file, PyObject* data) | 3171 | void parse (FILE* file, char* name, PyObject* data, int config) |
3166 | { | 3172 | { |
3167 | printf("parse bbparseAlloc\n"); | 3173 | /* printf("parse bbparseAlloc\n"); */ |
3168 | void* parser = bbparseAlloc (malloc); | 3174 | void* parser = bbparseAlloc (malloc); |
3169 | yyscan_t scanner; | 3175 | yyscan_t scanner; |
3170 | lex_t lex; | 3176 | lex_t lex; |
3171 | 3177 | ||
3172 | printf("parse yylex_init\n"); | 3178 | /* printf("parse yylex_init\n"); */ |
3173 | yylex_init (&scanner); | 3179 | yylex_init (&scanner); |
3174 | 3180 | ||
3175 | lex.parser = parser; | 3181 | lex.parser = parser; |
3176 | lex.scanner = scanner; | 3182 | lex.scanner = scanner; |
3177 | lex.file = file; | 3183 | lex.file = file; |
3184 | lex.name = name; | ||
3178 | lex.data = data; | 3185 | lex.data = data; |
3186 | lex.config = config; | ||
3179 | lex.parse = bbparse; | 3187 | lex.parse = bbparse; |
3180 | printf("parse yyset_extra\n"); | 3188 | /*printf("parse yyset_extra\n"); */ |
3181 | yyset_extra (&lex, scanner); | 3189 | yyset_extra (&lex, scanner); |
3182 | 3190 | ||
3183 | printf("parse yylex\n"); | 3191 | /* printf("parse yylex\n"); */ |
3184 | int result = yylex (scanner); | 3192 | int result = yylex (scanner); |
3185 | 3193 | ||
3186 | printf("parse result %d\n", result); | 3194 | /* printf("parse result %d\n", result); */ |
3187 | 3195 | ||
3188 | lex.accept (0); | 3196 | lex.accept (0); |
3189 | printf("parse lex.accept\n"); | 3197 | /* printf("parse lex.accept\n"); */ |
3190 | bbparseTrace (NULL, NULL); | 3198 | bbparseTrace (NULL, NULL); |
3191 | printf("parse bbparseTrace\n"); | 3199 | /* printf("parse bbparseTrace\n"); */ |
3192 | 3200 | ||
3193 | if (result != T_EOF) | 3201 | if (result != T_EOF) |
3194 | printf ("premature end of file\n"); | 3202 | printf ("premature end of file\n"); |
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakescanner.l b/bitbake/lib/bb/parse/parse_c/bitbakescanner.l index f69a7325c3..b6592f28e9 100644 --- a/bitbake/lib/bb/parse/parse_c/bitbakescanner.l +++ b/bitbake/lib/bb/parse/parse_c/bitbakescanner.l | |||
@@ -91,7 +91,7 @@ int errorParse; | |||
91 | enum { | 91 | enum { |
92 | errorNone = 0, | 92 | errorNone = 0, |
93 | errorUnexpectedInput, | 93 | errorUnexpectedInput, |
94 | errorUnsupportedFeature, | 94 | errorUnsupportedFeature, |
95 | }; | 95 | }; |
96 | 96 | ||
97 | } | 97 | } |
@@ -142,7 +142,7 @@ SSTRING \'([^\n\r]|"\\\n")*\' | |||
142 | VALUE ([^'" \t\n])|([^'" \t\n]([^\n]|(\\\n))*[^'" \t\n]) | 142 | VALUE ([^'" \t\n])|([^'" \t\n]([^\n]|(\\\n))*[^'" \t\n]) |
143 | 143 | ||
144 | C_SS [a-zA-Z_] | 144 | C_SS [a-zA-Z_] |
145 | C_SB [a-zA-Z0-9_+-.] | 145 | C_SB [a-zA-Z0-9_+-./] |
146 | REF $\{{C_SS}{C_SB}*\} | 146 | REF $\{{C_SS}{C_SB}*\} |
147 | SYMBOL {C_SS}{C_SB}* | 147 | SYMBOL {C_SS}{C_SB}* |
148 | VARIABLE $?{C_SS}({C_SB}*|{REF})*(\[[a-zA-Z0-9_]*\])? | 148 | VARIABLE $?{C_SS}({C_SB}*|{REF})*(\[[a-zA-Z0-9_]*\])? |
@@ -265,47 +265,49 @@ void lex_t::accept (int token, const char* sz) | |||
265 | 265 | ||
266 | void lex_t::input (char *buf, int *result, int max_size) | 266 | void lex_t::input (char *buf, int *result, int max_size) |
267 | { | 267 | { |
268 | printf("lex_t::input %p %d\n", buf, max_size); | 268 | /* printf("lex_t::input %p %d\n", buf, max_size); */ |
269 | *result = fread(buf, 1, max_size, file); | 269 | *result = fread(buf, 1, max_size, file); |
270 | printf("lex_t::input result %d\n", *result); | 270 | /* printf("lex_t::input result %d\n", *result); */ |
271 | } | 271 | } |
272 | 272 | ||
273 | int lex_t::line ()const | 273 | int lex_t::line ()const |
274 | { | 274 | { |
275 | printf("lex_t::line\n"); | 275 | /* printf("lex_t::line\n"); */ |
276 | return yyget_lineno (scanner); | 276 | return yyget_lineno (scanner); |
277 | } | 277 | } |
278 | 278 | ||
279 | 279 | ||
280 | extern "C" { | 280 | extern "C" { |
281 | 281 | ||
282 | void parse (FILE* file, PyObject* data) | 282 | void parse (FILE* file, char* name, PyObject* data, int config) |
283 | { | 283 | { |
284 | printf("parse bbparseAlloc\n"); | 284 | /* printf("parse bbparseAlloc\n"); */ |
285 | void* parser = bbparseAlloc (malloc); | 285 | void* parser = bbparseAlloc (malloc); |
286 | yyscan_t scanner; | 286 | yyscan_t scanner; |
287 | lex_t lex; | 287 | lex_t lex; |
288 | 288 | ||
289 | printf("parse yylex_init\n"); | 289 | /* printf("parse yylex_init\n"); */ |
290 | yylex_init (&scanner); | 290 | yylex_init (&scanner); |
291 | 291 | ||
292 | lex.parser = parser; | 292 | lex.parser = parser; |
293 | lex.scanner = scanner; | 293 | lex.scanner = scanner; |
294 | lex.file = file; | 294 | lex.file = file; |
295 | lex.name = name; | ||
295 | lex.data = data; | 296 | lex.data = data; |
297 | lex.config = config; | ||
296 | lex.parse = bbparse; | 298 | lex.parse = bbparse; |
297 | printf("parse yyset_extra\n"); | 299 | /*printf("parse yyset_extra\n"); */ |
298 | yyset_extra (&lex, scanner); | 300 | yyset_extra (&lex, scanner); |
299 | 301 | ||
300 | printf("parse yylex\n"); | 302 | /* printf("parse yylex\n"); */ |
301 | int result = yylex (scanner); | 303 | int result = yylex (scanner); |
302 | 304 | ||
303 | printf("parse result %d\n", result); | 305 | /* printf("parse result %d\n", result); */ |
304 | 306 | ||
305 | lex.accept (0); | 307 | lex.accept (0); |
306 | printf("parse lex.accept\n"); | 308 | /* printf("parse lex.accept\n"); */ |
307 | bbparseTrace (NULL, NULL); | 309 | bbparseTrace (NULL, NULL); |
308 | printf("parse bbparseTrace\n"); | 310 | /* printf("parse bbparseTrace\n"); */ |
309 | 311 | ||
310 | if (result != T_EOF) | 312 | if (result != T_EOF) |
311 | printf ("premature end of file\n"); | 313 | printf ("premature end of file\n"); |
diff --git a/bitbake/lib/bb/parse/parse_c/lexer.h b/bitbake/lib/bb/parse/parse_c/lexer.h index 651f3a8618..cb32be7037 100644 --- a/bitbake/lib/bb/parse/parse_c/lexer.h +++ b/bitbake/lib/bb/parse/parse_c/lexer.h | |||
@@ -27,13 +27,15 @@ THE USE OR OTHER DEALINGS IN THE SOFTWARE. | |||
27 | #include "Python.h" | 27 | #include "Python.h" |
28 | 28 | ||
29 | extern "C" { | 29 | extern "C" { |
30 | 30 | ||
31 | struct lex_t { | 31 | struct lex_t { |
32 | void* parser; | 32 | void* parser; |
33 | void* scanner; | 33 | void* scanner; |
34 | FILE* file; | 34 | FILE* file; |
35 | char *name; | ||
35 | PyObject *data; | 36 | PyObject *data; |
36 | 37 | int config; | |
38 | |||
37 | void* (*parse)(void*, int, token_t, lex_t*); | 39 | void* (*parse)(void*, int, token_t, lex_t*); |
38 | 40 | ||
39 | void accept(int token, const char* sz = NULL); | 41 | void accept(int token, const char* sz = NULL); |
diff --git a/bitbake/lib/bb/parse/parse_c/lexerc.h b/bitbake/lib/bb/parse/parse_c/lexerc.h index 0163a7d632..c8a19fb222 100644 --- a/bitbake/lib/bb/parse/parse_c/lexerc.h +++ b/bitbake/lib/bb/parse/parse_c/lexerc.h | |||
@@ -11,7 +11,9 @@ typedef struct { | |||
11 | void *parser; | 11 | void *parser; |
12 | void *scanner; | 12 | void *scanner; |
13 | FILE *file; | 13 | FILE *file; |
14 | char *name; | ||
14 | PyObject *data; | 15 | PyObject *data; |
16 | int config; | ||
15 | } lex_t; | 17 | } lex_t; |
16 | 18 | ||
17 | #endif | 19 | #endif |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index c82090fec0..34f4d25996 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
@@ -23,7 +23,7 @@ | |||
23 | 23 | ||
24 | import re, bb, os, sys, time | 24 | import re, bb, os, sys, time |
25 | import bb.fetch, bb.build, bb.utils | 25 | import bb.fetch, bb.build, bb.utils |
26 | from bb import debug, data, fetch, fatal, methodpool | 26 | from bb import data, fetch, methodpool |
27 | 27 | ||
28 | from ConfHandler import include, localpath, obtain, init | 28 | from ConfHandler import include, localpath, obtain, init |
29 | from bb.parse import ParseError | 29 | from bb.parse import ParseError |
@@ -44,6 +44,13 @@ __bbpath_found__ = 0 | |||
44 | __classname__ = "" | 44 | __classname__ = "" |
45 | classes = [ None, ] | 45 | classes = [ None, ] |
46 | 46 | ||
47 | # We need to indicate EOF to the feeder. This code is so messy that | ||
48 | # factoring it out to a close_parse_file method is out of question. | ||
49 | # We will use the IN_PYTHON_EOF as an indicator to just close the method | ||
50 | # | ||
51 | # The two parts using it are tightly integrated anyway | ||
52 | IN_PYTHON_EOF = -9999999999999 | ||
53 | |||
47 | __parsed_methods__ = methodpool.get_parsed_dict() | 54 | __parsed_methods__ = methodpool.get_parsed_dict() |
48 | 55 | ||
49 | def supports(fn, d): | 56 | def supports(fn, d): |
@@ -60,9 +67,9 @@ def inherit(files, d): | |||
60 | file = os.path.join('classes', '%s.bbclass' % file) | 67 | file = os.path.join('classes', '%s.bbclass' % file) |
61 | 68 | ||
62 | if not file in __inherit_cache.split(): | 69 | if not file in __inherit_cache.split(): |
63 | debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file)) | 70 | bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file)) |
64 | __inherit_cache += " %s" % file | 71 | __inherit_cache += " %s" % file |
65 | include(fn, file, d) | 72 | include(fn, file, d, "inherit") |
66 | data.setVar('__inherit_cache', __inherit_cache, d) | 73 | data.setVar('__inherit_cache', __inherit_cache, d) |
67 | 74 | ||
68 | 75 | ||
@@ -75,9 +82,9 @@ def handle(fn, d, include = 0): | |||
75 | __residue__ = [] | 82 | __residue__ = [] |
76 | 83 | ||
77 | if include == 0: | 84 | if include == 0: |
78 | debug(2, "BB " + fn + ": handle(data)") | 85 | bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)") |
79 | else: | 86 | else: |
80 | debug(2, "BB " + fn + ": handle(data, include)") | 87 | bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)") |
81 | 88 | ||
82 | (root, ext) = os.path.splitext(os.path.basename(fn)) | 89 | (root, ext) = os.path.splitext(os.path.basename(fn)) |
83 | base_name = "%s%s" % (root,ext) | 90 | base_name = "%s%s" % (root,ext) |
@@ -132,7 +139,7 @@ def handle(fn, d, include = 0): | |||
132 | feeder(lineno, s, fn, base_name, d) | 139 | feeder(lineno, s, fn, base_name, d) |
133 | if __inpython__: | 140 | if __inpython__: |
134 | # add a blank line to close out any python definition | 141 | # add a blank line to close out any python definition |
135 | feeder(lineno + 1, "", fn, base_name, d) | 142 | feeder(IN_PYTHON_EOF, "", fn, base_name, d) |
136 | if ext == ".bbclass": | 143 | if ext == ".bbclass": |
137 | classes.remove(__classname__) | 144 | classes.remove(__classname__) |
138 | else: | 145 | else: |
@@ -152,7 +159,7 @@ def handle(fn, d, include = 0): | |||
152 | if t: | 159 | if t: |
153 | data.setVar('T', t, d) | 160 | data.setVar('T', t, d) |
154 | except Exception, e: | 161 | except Exception, e: |
155 | bb.debug(1, "executing anonymous function: %s" % e) | 162 | bb.msg.debug(1, bb.msg.domain.Parsing, "executing anonymous function: %s" % e) |
156 | raise | 163 | raise |
157 | data.delVar("__anonqueue", d) | 164 | data.delVar("__anonqueue", d) |
158 | data.delVar("__anonfunc", d) | 165 | data.delVar("__anonfunc", d) |
@@ -220,7 +227,7 @@ def feeder(lineno, s, fn, root, d): | |||
220 | 227 | ||
221 | if __inpython__: | 228 | if __inpython__: |
222 | m = __python_func_regexp__.match(s) | 229 | m = __python_func_regexp__.match(s) |
223 | if m: | 230 | if m and lineno != IN_PYTHON_EOF: |
224 | __body__.append(s) | 231 | __body__.append(s) |
225 | return | 232 | return |
226 | else: | 233 | else: |
@@ -240,6 +247,9 @@ def feeder(lineno, s, fn, root, d): | |||
240 | __body__ = [] | 247 | __body__ = [] |
241 | __inpython__ = False | 248 | __inpython__ = False |
242 | 249 | ||
250 | if lineno == IN_PYTHON_EOF: | ||
251 | return | ||
252 | |||
243 | # fall through | 253 | # fall through |
244 | 254 | ||
245 | if s == '' or s[0] == '#': return # skip comments and empty lines | 255 | if s == '' or s[0] == '#': return # skip comments and empty lines |
@@ -374,7 +384,7 @@ def vars_from_file(mypkg, d): | |||
374 | def set_additional_vars(file, d, include): | 384 | def set_additional_vars(file, d, include): |
375 | """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}""" | 385 | """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}""" |
376 | 386 | ||
377 | debug(2,"BB %s: set_additional_vars" % file) | 387 | bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file) |
378 | 388 | ||
379 | src_uri = data.getVar('SRC_URI', d) | 389 | src_uri = data.getVar('SRC_URI', d) |
380 | if not src_uri: | 390 | if not src_uri: |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index 90978300af..4bc2bbc2b7 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
@@ -22,7 +22,6 @@ | |||
22 | Place, Suite 330, Boston, MA 02111-1307 USA.""" | 22 | Place, Suite 330, Boston, MA 02111-1307 USA.""" |
23 | 23 | ||
24 | import re, bb.data, os, sys | 24 | import re, bb.data, os, sys |
25 | from bb import debug, fatal | ||
26 | from bb.parse import ParseError | 25 | from bb.parse import ParseError |
27 | 26 | ||
28 | #__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") | 27 | #__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") |
@@ -53,7 +52,7 @@ def localpath(fn, d): | |||
53 | localfn = fn | 52 | localfn = fn |
54 | return localfn | 53 | return localfn |
55 | 54 | ||
56 | def obtain(fn, data = bb.data.init()): | 55 | def obtain(fn, data): |
57 | import sys, bb | 56 | import sys, bb |
58 | fn = bb.data.expand(fn, data) | 57 | fn = bb.data.expand(fn, data) |
59 | localfn = bb.data.expand(localpath(fn, data), data) | 58 | localfn = bb.data.expand(localpath(fn, data), data) |
@@ -61,30 +60,30 @@ def obtain(fn, data = bb.data.init()): | |||
61 | if localfn != fn: | 60 | if localfn != fn: |
62 | dldir = bb.data.getVar('DL_DIR', data, 1) | 61 | dldir = bb.data.getVar('DL_DIR', data, 1) |
63 | if not dldir: | 62 | if not dldir: |
64 | debug(1, "obtain: DL_DIR not defined") | 63 | bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: DL_DIR not defined") |
65 | return localfn | 64 | return localfn |
66 | bb.mkdirhier(dldir) | 65 | bb.mkdirhier(dldir) |
67 | try: | 66 | try: |
68 | bb.fetch.init([fn]) | 67 | bb.fetch.init([fn]) |
69 | except bb.fetch.NoMethodError: | 68 | except bb.fetch.NoMethodError: |
70 | (type, value, traceback) = sys.exc_info() | 69 | (type, value, traceback) = sys.exc_info() |
71 | debug(1, "obtain: no method: %s" % value) | 70 | bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value) |
72 | return localfn | 71 | return localfn |
73 | 72 | ||
74 | try: | 73 | try: |
75 | bb.fetch.go(data) | 74 | bb.fetch.go(data) |
76 | except bb.fetch.MissingParameterError: | 75 | except bb.fetch.MissingParameterError: |
77 | (type, value, traceback) = sys.exc_info() | 76 | (type, value, traceback) = sys.exc_info() |
78 | debug(1, "obtain: missing parameters: %s" % value) | 77 | bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value) |
79 | return localfn | 78 | return localfn |
80 | except bb.fetch.FetchError: | 79 | except bb.fetch.FetchError: |
81 | (type, value, traceback) = sys.exc_info() | 80 | (type, value, traceback) = sys.exc_info() |
82 | debug(1, "obtain: failed: %s" % value) | 81 | bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: failed: %s" % value) |
83 | return localfn | 82 | return localfn |
84 | return localfn | 83 | return localfn |
85 | 84 | ||
86 | 85 | ||
87 | def include(oldfn, fn, data = bb.data.init(), error_out = False): | 86 | def include(oldfn, fn, data, error_out): |
88 | """ | 87 | """ |
89 | 88 | ||
90 | error_out If True a ParseError will be reaised if the to be included | 89 | error_out If True a ParseError will be reaised if the to be included |
@@ -101,10 +100,10 @@ def include(oldfn, fn, data = bb.data.init(), error_out = False): | |||
101 | ret = handle(fn, data, True) | 100 | ret = handle(fn, data, True) |
102 | except IOError: | 101 | except IOError: |
103 | if error_out: | 102 | if error_out: |
104 | raise ParseError("Could not include required file %(fn)s" % vars() ) | 103 | raise ParseError("Could not %(error_out)s file %(fn)s" % vars() ) |
105 | debug(2, "CONF file '%s' not found" % fn) | 104 | bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn) |
106 | 105 | ||
107 | def handle(fn, data = bb.data.init(), include = 0): | 106 | def handle(fn, data, include = 0): |
108 | if include: | 107 | if include: |
109 | inc_string = "including" | 108 | inc_string = "including" |
110 | else: | 109 | else: |
@@ -129,13 +128,13 @@ def handle(fn, data = bb.data.init(), include = 0): | |||
129 | if os.access(currname, os.R_OK): | 128 | if os.access(currname, os.R_OK): |
130 | f = open(currname, 'r') | 129 | f = open(currname, 'r') |
131 | abs_fn = currname | 130 | abs_fn = currname |
132 | debug(1, "CONF %s %s" % (inc_string, currname)) | 131 | bb.msg.debug(2, bb.msg.domain.Parsing, "CONF %s %s" % (inc_string, currname)) |
133 | break | 132 | break |
134 | if f is None: | 133 | if f is None: |
135 | raise IOError("file '%s' not found" % fn) | 134 | raise IOError("file '%s' not found" % fn) |
136 | else: | 135 | else: |
137 | f = open(fn,'r') | 136 | f = open(fn,'r') |
138 | debug(1, "CONF %s %s" % (inc_string,fn)) | 137 | bb.msg.debug(1, bb.msg.domain.Parsing, "CONF %s %s" % (inc_string,fn)) |
139 | abs_fn = fn | 138 | abs_fn = fn |
140 | 139 | ||
141 | if include: | 140 | if include: |
@@ -161,7 +160,7 @@ def handle(fn, data = bb.data.init(), include = 0): | |||
161 | bb.data.setVar('FILE', oldfile, data) | 160 | bb.data.setVar('FILE', oldfile, data) |
162 | return data | 161 | return data |
163 | 162 | ||
164 | def feeder(lineno, s, fn, data = bb.data.init()): | 163 | def feeder(lineno, s, fn, data): |
165 | m = __config_regexp__.match(s) | 164 | m = __config_regexp__.match(s) |
166 | if m: | 165 | if m: |
167 | groupd = m.groupdict() | 166 | groupd = m.groupdict() |
@@ -185,7 +184,7 @@ def feeder(lineno, s, fn, data = bb.data.init()): | |||
185 | else: | 184 | else: |
186 | val = groupd["value"] | 185 | val = groupd["value"] |
187 | if 'flag' in groupd and groupd['flag'] != None: | 186 | if 'flag' in groupd and groupd['flag'] != None: |
188 | # bb.note("setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val)) | 187 | bb.msg.debug(3, bb.msg.domain.Parsing, "setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val)) |
189 | bb.data.setVarFlag(key, groupd['flag'], val, data) | 188 | bb.data.setVarFlag(key, groupd['flag'], val, data) |
190 | else: | 189 | else: |
191 | bb.data.setVar(key, val, data) | 190 | bb.data.setVar(key, val, data) |
@@ -194,14 +193,14 @@ def feeder(lineno, s, fn, data = bb.data.init()): | |||
194 | m = __include_regexp__.match(s) | 193 | m = __include_regexp__.match(s) |
195 | if m: | 194 | if m: |
196 | s = bb.data.expand(m.group(1), data) | 195 | s = bb.data.expand(m.group(1), data) |
197 | # debug(2, "CONF %s:%d: including %s" % (fn, lineno, s)) | 196 | bb.msg.debug(3, bb.msg.domain.Parsing, "CONF %s:%d: including %s" % (fn, lineno, s)) |
198 | include(fn, s, data) | 197 | include(fn, s, data, False) |
199 | return | 198 | return |
200 | 199 | ||
201 | m = __require_regexp__.match(s) | 200 | m = __require_regexp__.match(s) |
202 | if m: | 201 | if m: |
203 | s = bb.data.expand(m.group(1), data) | 202 | s = bb.data.expand(m.group(1), data) |
204 | include(fn, s, data, True) | 203 | include(fn, s, data, "include required") |
205 | return | 204 | return |
206 | 205 | ||
207 | raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); | 206 | raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); |
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py new file mode 100644 index 0000000000..3cb7cc1f07 --- /dev/null +++ b/bitbake/lib/bb/providers.py | |||
@@ -0,0 +1,209 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | # | ||
5 | # Copyright (C) 2003, 2004 Chris Larson | ||
6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
7 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
8 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
9 | # Copyright (C) 2005 ROAD GmbH | ||
10 | # Copyright (C) 2006 Richard Purdie | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify it under | ||
13 | # the terms of the GNU General Public License as published by the Free Software | ||
14 | # Foundation; either version 2 of the License, or (at your option) any later | ||
15 | # version. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, but WITHOUT | ||
18 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
19 | # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along with | ||
22 | # this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
23 | # Place, Suite 330, Boston, MA 02111-1307 USA. | ||
24 | |||
25 | import os, re | ||
26 | from bb import data, utils | ||
27 | import bb | ||
28 | |||
29 | class NoProvider(Exception): | ||
30 | """Exception raised when no provider of a build dependency can be found""" | ||
31 | |||
32 | class NoRProvider(Exception): | ||
33 | """Exception raised when no provider of a runtime dependency can be found""" | ||
34 | |||
35 | def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | ||
36 | """ | ||
37 | If there is a PREFERRED_VERSION, find the highest-priority bbfile | ||
38 | providing that version. If not, find the latest version provided by | ||
39 | an bbfile in the highest-priority set. | ||
40 | """ | ||
41 | if not pkg_pn: | ||
42 | pkg_pn = dataCache.pkg_pn | ||
43 | |||
44 | files = pkg_pn[pn] | ||
45 | priorities = {} | ||
46 | for f in files: | ||
47 | priority = dataCache.bbfile_priority[f] | ||
48 | if priority not in priorities: | ||
49 | priorities[priority] = [] | ||
50 | priorities[priority].append(f) | ||
51 | p_list = priorities.keys() | ||
52 | p_list.sort(lambda a, b: a - b) | ||
53 | tmp_pn = [] | ||
54 | for p in p_list: | ||
55 | tmp_pn = [priorities[p]] + tmp_pn | ||
56 | |||
57 | preferred_file = None | ||
58 | |||
59 | localdata = data.createCopy(cfgData) | ||
60 | bb.data.setVar('OVERRIDES', "%s:%s" % (pn, data.getVar('OVERRIDES', localdata)), localdata) | ||
61 | bb.data.update_data(localdata) | ||
62 | |||
63 | preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True) | ||
64 | if preferred_v: | ||
65 | m = re.match('(.*)_(.*)', preferred_v) | ||
66 | if m: | ||
67 | preferred_v = m.group(1) | ||
68 | preferred_r = m.group(2) | ||
69 | else: | ||
70 | preferred_r = None | ||
71 | |||
72 | for file_set in tmp_pn: | ||
73 | for f in file_set: | ||
74 | pv,pr = dataCache.pkg_pvpr[f] | ||
75 | if preferred_v == pv and (preferred_r == pr or preferred_r == None): | ||
76 | preferred_file = f | ||
77 | preferred_ver = (pv, pr) | ||
78 | break | ||
79 | if preferred_file: | ||
80 | break; | ||
81 | if preferred_r: | ||
82 | pv_str = '%s-%s' % (preferred_v, preferred_r) | ||
83 | else: | ||
84 | pv_str = preferred_v | ||
85 | itemstr = "" | ||
86 | if item: | ||
87 | itemstr = " (for item %s)" % item | ||
88 | if preferred_file is None: | ||
89 | bb.msg.note(1, bb.msg.domain.Provider, "preferred version %s of %s not available%s" % (pv_str, pn, itemstr)) | ||
90 | else: | ||
91 | bb.msg.debug(1, bb.msg.domain.Provider, "selecting %s as PREFERRED_VERSION %s of package %s%s" % (preferred_file, pv_str, pn, itemstr)) | ||
92 | |||
93 | del localdata | ||
94 | |||
95 | # get highest priority file set | ||
96 | files = tmp_pn[0] | ||
97 | latest = None | ||
98 | latest_p = 0 | ||
99 | latest_f = None | ||
100 | for file_name in files: | ||
101 | pv,pr = dataCache.pkg_pvpr[file_name] | ||
102 | dp = dataCache.pkg_dp[file_name] | ||
103 | |||
104 | if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pv, pr)) < 0)) or (dp > latest_p): | ||
105 | latest = (pv, pr) | ||
106 | latest_f = file_name | ||
107 | latest_p = dp | ||
108 | if preferred_file is None: | ||
109 | preferred_file = latest_f | ||
110 | preferred_ver = latest | ||
111 | |||
112 | return (latest,latest_f,preferred_ver, preferred_file) | ||
113 | |||
114 | # | ||
115 | # RP - build_cache_fail needs to move elsewhere | ||
116 | # | ||
117 | def filterProviders(providers, item, cfgData, dataCache, build_cache_fail = {}): | ||
118 | """ | ||
119 | Take a list of providers and filter/reorder according to the | ||
120 | environment variables and previous build results | ||
121 | """ | ||
122 | eligible = [] | ||
123 | preferred_versions = {} | ||
124 | |||
125 | # Collate providers by PN | ||
126 | pkg_pn = {} | ||
127 | for p in providers: | ||
128 | pn = dataCache.pkg_fn[p] | ||
129 | if pn not in pkg_pn: | ||
130 | pkg_pn[pn] = [] | ||
131 | pkg_pn[pn].append(p) | ||
132 | |||
133 | bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys())) | ||
134 | |||
135 | for pn in pkg_pn.keys(): | ||
136 | preferred_versions[pn] = bb.providers.findBestProvider(pn, cfgData, dataCache, pkg_pn, item)[2:4] | ||
137 | eligible.append(preferred_versions[pn][1]) | ||
138 | |||
139 | |||
140 | for p in eligible: | ||
141 | if p in build_cache_fail: | ||
142 | bb.msg.debug(1, bb.msg.domain.Provider, "rejecting already-failed %s" % p) | ||
143 | eligible.remove(p) | ||
144 | |||
145 | if len(eligible) == 0: | ||
146 | bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item) | ||
147 | return 0 | ||
148 | |||
149 | |||
150 | # If pn == item, give it a slight default preference | ||
151 | # This means PREFERRED_PROVIDER_foobar defaults to foobar if available | ||
152 | for p in providers: | ||
153 | pn = dataCache.pkg_fn[p] | ||
154 | if pn != item: | ||
155 | continue | ||
156 | (newvers, fn) = preferred_versions[pn] | ||
157 | if not fn in eligible: | ||
158 | continue | ||
159 | eligible.remove(fn) | ||
160 | eligible = [fn] + eligible | ||
161 | |||
162 | # look to see if one of them is already staged, or marked as preferred. | ||
163 | # if so, bump it to the head of the queue | ||
164 | for p in providers: | ||
165 | pn = dataCache.pkg_fn[p] | ||
166 | pv, pr = dataCache.pkg_pvpr[p] | ||
167 | |||
168 | stamp = '%s.do_populate_staging' % dataCache.stamp[p] | ||
169 | if os.path.exists(stamp): | ||
170 | (newvers, fn) = preferred_versions[pn] | ||
171 | if not fn in eligible: | ||
172 | # package was made ineligible by already-failed check | ||
173 | continue | ||
174 | oldver = "%s-%s" % (pv, pr) | ||
175 | newver = '-'.join(newvers) | ||
176 | if (newver != oldver): | ||
177 | extra_chat = "%s (%s) already staged but upgrading to %s to satisfy %s" % (pn, oldver, newver, item) | ||
178 | else: | ||
179 | extra_chat = "Selecting already-staged %s (%s) to satisfy %s" % (pn, oldver, item) | ||
180 | |||
181 | bb.msg.note(2, bb.msg.domain.Provider, "%s" % extra_chat) | ||
182 | eligible.remove(fn) | ||
183 | eligible = [fn] + eligible | ||
184 | break | ||
185 | |||
186 | return eligible | ||
187 | |||
188 | def getRuntimeProviders(dataCache, rdepend): | ||
189 | """ | ||
190 | Return any providers of runtime dependency | ||
191 | """ | ||
192 | rproviders = [] | ||
193 | |||
194 | if rdepend in dataCache.rproviders: | ||
195 | rproviders += dataCache.rproviders[rdepend] | ||
196 | |||
197 | if rdepend in dataCache.packages: | ||
198 | rproviders += dataCache.packages[rdepend] | ||
199 | |||
200 | if rproviders: | ||
201 | return rproviders | ||
202 | |||
203 | # Only search dynamic packages if we can't find anything in other variables | ||
204 | for pattern in dataCache.packages_dynamic: | ||
205 | regexp = re.compile(pattern) | ||
206 | if regexp.match(rdepend): | ||
207 | rproviders += dataCache.packages_dynamic[pattern] | ||
208 | |||
209 | return rproviders | ||
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py new file mode 100644 index 0000000000..3dde9a9ffb --- /dev/null +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -0,0 +1,491 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | BitBake 'RunQueue' implementation | ||
6 | |||
7 | Handles preparation and execution of a queue of tasks | ||
8 | |||
9 | Copyright (C) 2006 Richard Purdie | ||
10 | |||
11 | This program is free software; you can redistribute it and/or modify it under | ||
12 | the terms of the GNU General Public License version 2 as published by the Free | ||
13 | Software Foundation | ||
14 | |||
15 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
16 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
17 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
18 | |||
19 | You should have received a copy of the GNU General Public License along with | ||
20 | """ | ||
21 | |||
22 | from bb import msg, data, fetch, event, mkdirhier, utils | ||
23 | from sets import Set | ||
24 | import bb, os, sys | ||
25 | |||
26 | class TaskFailure(Exception): | ||
27 | """Exception raised when a task in a runqueue fails""" | ||
28 | |||
29 | def __init__(self, fnid, fn, taskname): | ||
30 | self.args = fnid, fn, taskname | ||
31 | |||
32 | class RunQueue: | ||
33 | """ | ||
34 | BitBake Run Queue implementation | ||
35 | """ | ||
36 | def __init__(self): | ||
37 | self.reset_runqueue() | ||
38 | |||
39 | def reset_runqueue(self): | ||
40 | self.runq_fnid = [] | ||
41 | self.runq_task = [] | ||
42 | self.runq_depends = [] | ||
43 | self.runq_revdeps = [] | ||
44 | self.runq_weight = [] | ||
45 | self.prio_map = [] | ||
46 | |||
47 | def get_user_idstring(self, task, taskData): | ||
48 | fn = taskData.fn_index[self.runq_fnid[task]] | ||
49 | taskname = self.runq_task[task] | ||
50 | return "%s, %s" % (fn, taskname) | ||
51 | |||
52 | def prepare_runqueue(self, cfgData, dataCache, taskData, targets): | ||
53 | """ | ||
54 | Turn a set of taskData into a RunQueue and compute data needed | ||
55 | to optimise the execution order. | ||
56 | targets is list of paired values - a provider name and the task to run | ||
57 | """ | ||
58 | |||
59 | depends = [] | ||
60 | runq_weight1 = [] | ||
61 | runq_build = [] | ||
62 | runq_done = [] | ||
63 | |||
64 | bb.msg.note(1, bb.msg.domain.RunQueue, "Preparing Runqueue") | ||
65 | |||
66 | for task in range(len(taskData.tasks_name)): | ||
67 | fnid = taskData.tasks_fnid[task] | ||
68 | fn = taskData.fn_index[fnid] | ||
69 | task_deps = dataCache.task_deps[fn] | ||
70 | |||
71 | if fnid not in taskData.failed_fnids: | ||
72 | |||
73 | depends = taskData.tasks_tdepends[task] | ||
74 | |||
75 | # Resolve Depends | ||
76 | if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']: | ||
77 | taskname = task_deps['deptask'][taskData.tasks_name[task]] | ||
78 | for depid in taskData.depids[fnid]: | ||
79 | if depid in taskData.build_targets: | ||
80 | depdata = taskData.build_targets[depid][0] | ||
81 | if depdata: | ||
82 | dep = taskData.fn_index[depdata] | ||
83 | depends.append(taskData.gettask_id(dep, taskname)) | ||
84 | |||
85 | # Resolve Runtime Depends | ||
86 | if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']: | ||
87 | taskname = task_deps['rdeptask'][taskData.tasks_name[task]] | ||
88 | for depid in taskData.rdepids[fnid]: | ||
89 | if depid in taskData.run_targets: | ||
90 | depdata = taskData.run_targets[depid][0] | ||
91 | if depdata: | ||
92 | dep = taskData.fn_index[depdata] | ||
93 | depends.append(taskData.gettask_id(dep, taskname)) | ||
94 | |||
95 | def add_recursive_build(depid): | ||
96 | """ | ||
97 | Add build depends of depid to depends | ||
98 | (if we've not see it before) | ||
99 | (calls itself recursively) | ||
100 | """ | ||
101 | if str(depid) in dep_seen: | ||
102 | return | ||
103 | dep_seen.append(depid) | ||
104 | if depid in taskData.build_targets: | ||
105 | depdata = taskData.build_targets[depid][0] | ||
106 | if depdata: | ||
107 | dep = taskData.fn_index[depdata] | ||
108 | taskid = taskData.gettask_id(dep, taskname) | ||
109 | depends.append(taskid) | ||
110 | fnid = taskData.tasks_fnid[taskid] | ||
111 | for nextdepid in taskData.depids[fnid]: | ||
112 | if nextdepid not in dep_seen: | ||
113 | add_recursive_build(nextdepid) | ||
114 | for nextdepid in taskData.rdepids[fnid]: | ||
115 | if nextdepid not in rdep_seen: | ||
116 | add_recursive_run(nextdepid) | ||
117 | |||
118 | def add_recursive_run(rdepid): | ||
119 | """ | ||
120 | Add runtime depends of rdepid to depends | ||
121 | (if we've not see it before) | ||
122 | (calls itself recursively) | ||
123 | """ | ||
124 | if str(rdepid) in rdep_seen: | ||
125 | return | ||
126 | rdep_seen.append(rdepid) | ||
127 | if rdepid in taskData.run_targets: | ||
128 | depdata = taskData.run_targets[rdepid][0] | ||
129 | if depdata: | ||
130 | dep = taskData.fn_index[depdata] | ||
131 | taskid = taskData.gettask_id(dep, taskname) | ||
132 | depends.append(taskid) | ||
133 | fnid = taskData.tasks_fnid[taskid] | ||
134 | for nextdepid in taskData.depids[fnid]: | ||
135 | if nextdepid not in dep_seen: | ||
136 | add_recursive_build(nextdepid) | ||
137 | for nextdepid in taskData.rdepids[fnid]: | ||
138 | if nextdepid not in rdep_seen: | ||
139 | add_recursive_run(nextdepid) | ||
140 | |||
141 | |||
142 | # Resolve Recursive Runtime Depends | ||
143 | # Also includes all Build Depends (and their runtime depends) | ||
144 | if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']: | ||
145 | dep_seen = [] | ||
146 | rdep_seen = [] | ||
147 | taskname = task_deps['recrdeptask'][taskData.tasks_name[task]] | ||
148 | for depid in taskData.depids[fnid]: | ||
149 | add_recursive_build(depid) | ||
150 | for rdepid in taskData.rdepids[fnid]: | ||
151 | add_recursive_run(rdepid) | ||
152 | |||
153 | #Prune self references | ||
154 | if task in depends: | ||
155 | newdep = [] | ||
156 | bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)) | ||
157 | for dep in depends: | ||
158 | if task != dep: | ||
159 | newdep.append(dep) | ||
160 | depends = newdep | ||
161 | |||
162 | |||
163 | self.runq_fnid.append(taskData.tasks_fnid[task]) | ||
164 | self.runq_task.append(taskData.tasks_name[task]) | ||
165 | self.runq_depends.append(Set(depends)) | ||
166 | self.runq_revdeps.append(Set()) | ||
167 | self.runq_weight.append(0) | ||
168 | |||
169 | runq_weight1.append(0) | ||
170 | runq_build.append(0) | ||
171 | runq_done.append(0) | ||
172 | |||
173 | bb.msg.note(2, bb.msg.domain.RunQueue, "Marking Active Tasks") | ||
174 | |||
175 | def mark_active(listid, depth): | ||
176 | """ | ||
177 | Mark an item as active along with its depends | ||
178 | (calls itself recursively) | ||
179 | """ | ||
180 | |||
181 | if runq_build[listid] == 1: | ||
182 | return | ||
183 | |||
184 | runq_build[listid] = 1 | ||
185 | |||
186 | depends = self.runq_depends[listid] | ||
187 | for depend in depends: | ||
188 | mark_active(depend, depth+1) | ||
189 | |||
190 | for target in targets: | ||
191 | targetid = taskData.getbuild_id(target[0]) | ||
192 | if targetid in taskData.failed_deps: | ||
193 | continue | ||
194 | |||
195 | if targetid not in taskData.build_targets: | ||
196 | continue | ||
197 | |||
198 | fnid = taskData.build_targets[targetid][0] | ||
199 | if fnid in taskData.failed_fnids: | ||
200 | continue | ||
201 | |||
202 | listid = taskData.tasks_lookup[fnid][target[1]] | ||
203 | |||
204 | mark_active(listid, 1) | ||
205 | |||
206 | # Prune inactive tasks | ||
207 | maps = [] | ||
208 | delcount = 0 | ||
209 | for listid in range(len(self.runq_fnid)): | ||
210 | if runq_build[listid-delcount] == 1: | ||
211 | maps.append(listid-delcount) | ||
212 | else: | ||
213 | del self.runq_fnid[listid-delcount] | ||
214 | del self.runq_task[listid-delcount] | ||
215 | del self.runq_depends[listid-delcount] | ||
216 | del self.runq_weight[listid-delcount] | ||
217 | del runq_weight1[listid-delcount] | ||
218 | del runq_build[listid-delcount] | ||
219 | del runq_done[listid-delcount] | ||
220 | del self.runq_revdeps[listid-delcount] | ||
221 | delcount = delcount + 1 | ||
222 | maps.append(-1) | ||
223 | |||
224 | if len(self.runq_fnid) == 0: | ||
225 | if not taskData.abort: | ||
226 | bb.msg.note(1, bb.msg.domain.RunQueue, "All possible tasks have been run but build incomplete (--continue mode). See errors above for incomplete tasks.") | ||
227 | return | ||
228 | bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.") | ||
229 | |||
230 | bb.msg.note(2, bb.msg.domain.RunQueue, "Pruned %s inactive tasks, %s left" % (delcount, len(self.runq_fnid))) | ||
231 | |||
232 | for listid in range(len(self.runq_fnid)): | ||
233 | newdeps = [] | ||
234 | origdeps = self.runq_depends[listid] | ||
235 | for origdep in origdeps: | ||
236 | if maps[origdep] == -1: | ||
237 | bb.msg.fatal(bb.msg.domain.RunQueue, "Invalid mapping - Should never happen!") | ||
238 | newdeps.append(maps[origdep]) | ||
239 | self.runq_depends[listid] = Set(newdeps) | ||
240 | |||
241 | bb.msg.note(2, bb.msg.domain.RunQueue, "Assign Weightings") | ||
242 | |||
243 | for listid in range(len(self.runq_fnid)): | ||
244 | for dep in self.runq_depends[listid]: | ||
245 | self.runq_revdeps[dep].add(listid) | ||
246 | |||
247 | endpoints = [] | ||
248 | for listid in range(len(self.runq_fnid)): | ||
249 | revdeps = self.runq_revdeps[listid] | ||
250 | if len(revdeps) == 0: | ||
251 | runq_done[listid] = 1 | ||
252 | self.runq_weight[listid] = 1 | ||
253 | endpoints.append(listid) | ||
254 | for dep in revdeps: | ||
255 | if dep in self.runq_depends[listid]: | ||
256 | #self.dump_data(taskData) | ||
257 | bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep] , taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid])) | ||
258 | runq_weight1[listid] = len(revdeps) | ||
259 | |||
260 | bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints)) | ||
261 | |||
262 | while 1: | ||
263 | next_points = [] | ||
264 | for listid in endpoints: | ||
265 | for revdep in self.runq_depends[listid]: | ||
266 | self.runq_weight[revdep] = self.runq_weight[revdep] + self.runq_weight[listid] | ||
267 | runq_weight1[revdep] = runq_weight1[revdep] - 1 | ||
268 | if runq_weight1[revdep] == 0: | ||
269 | next_points.append(revdep) | ||
270 | runq_done[revdep] = 1 | ||
271 | endpoints = next_points | ||
272 | if len(next_points) == 0: | ||
273 | break | ||
274 | |||
275 | # Sanity Checks | ||
276 | for task in range(len(self.runq_fnid)): | ||
277 | if runq_done[task] == 0: | ||
278 | seen = [] | ||
279 | deps_seen = [] | ||
280 | def print_chain(taskid, finish): | ||
281 | seen.append(taskid) | ||
282 | for revdep in self.runq_revdeps[taskid]: | ||
283 | if runq_done[revdep] == 0 and revdep not in seen and not finish: | ||
284 | bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) (depends: %s)" % (revdep, self.get_user_idstring(revdep, taskData), self.runq_depends[revdep])) | ||
285 | if revdep in deps_seen: | ||
286 | bb.msg.error(bb.msg.domain.RunQueue, "Chain ends at Task %s (%s)" % (revdep, self.get_user_idstring(revdep, taskData))) | ||
287 | finish = True | ||
288 | return | ||
289 | for dep in self.runq_depends[revdep]: | ||
290 | deps_seen.append(dep) | ||
291 | print_chain(revdep, finish) | ||
292 | print_chain(task, False) | ||
293 | bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) not processed!\nThis is probably a circular dependency (the chain might be printed above)." % (task, self.get_user_idstring(task, taskData))) | ||
294 | if runq_weight1[task] != 0: | ||
295 | bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) count not zero!" % (task, self.get_user_idstring(task, taskData))) | ||
296 | |||
297 | # Make a weight sorted map | ||
298 | from copy import deepcopy | ||
299 | |||
300 | sortweight = deepcopy(self.runq_weight) | ||
301 | sortweight.sort() | ||
302 | copyweight = deepcopy(self.runq_weight) | ||
303 | self.prio_map = [] | ||
304 | |||
305 | for weight in sortweight: | ||
306 | idx = copyweight.index(weight) | ||
307 | self.prio_map.append(idx) | ||
308 | copyweight[idx] = -1 | ||
309 | self.prio_map.reverse() | ||
310 | |||
311 | #self.dump_data(taskData) | ||
312 | |||
313 | def execute_runqueue(self, cooker, cfgData, dataCache, taskData, runlist): | ||
314 | """ | ||
315 | Run the tasks in a queue prepared by prepare_runqueue | ||
316 | Upon failure, optionally try to recover the build using any alternate providers | ||
317 | (if the abort on failure configuration option isn't set) | ||
318 | """ | ||
319 | |||
320 | failures = 0 | ||
321 | while 1: | ||
322 | try: | ||
323 | self.execute_runqueue_internal(cooker, cfgData, dataCache, taskData) | ||
324 | return failures | ||
325 | except bb.runqueue.TaskFailure, (fnid, taskData.fn_index[fnid], taskname): | ||
326 | if taskData.abort: | ||
327 | raise | ||
328 | taskData.fail_fnid(fnid) | ||
329 | self.reset_runqueue() | ||
330 | self.prepare_runqueue(cfgData, dataCache, taskData, runlist) | ||
331 | failures = failures + 1 | ||
332 | |||
333 | def execute_runqueue_internal(self, cooker, cfgData, dataCache, taskData): | ||
334 | """ | ||
335 | Run the tasks in a queue prepared by prepare_runqueue | ||
336 | """ | ||
337 | |||
338 | bb.msg.note(1, bb.msg.domain.RunQueue, "Executing runqueue") | ||
339 | |||
340 | runq_buildable = [] | ||
341 | runq_running = [] | ||
342 | runq_complete = [] | ||
343 | active_builds = 0 | ||
344 | build_pids = {} | ||
345 | |||
346 | if len(self.runq_fnid) == 0: | ||
347 | # nothing to do | ||
348 | return | ||
349 | |||
350 | def get_next_task(data): | ||
351 | """ | ||
352 | Return the id of the highest priority task that is buildable | ||
353 | """ | ||
354 | for task1 in range(len(data.runq_fnid)): | ||
355 | task = data.prio_map[task1] | ||
356 | if runq_running[task] == 1: | ||
357 | continue | ||
358 | if runq_buildable[task] == 1: | ||
359 | return task | ||
360 | return None | ||
361 | |||
362 | def task_complete(data, task): | ||
363 | """ | ||
364 | Mark a task as completed | ||
365 | Look at the reverse dependencies and mark any task with | ||
366 | completed dependencies as buildable | ||
367 | """ | ||
368 | runq_complete[task] = 1 | ||
369 | for revdep in data.runq_revdeps[task]: | ||
370 | if runq_running[revdep] == 1: | ||
371 | continue | ||
372 | if runq_buildable[revdep] == 1: | ||
373 | continue | ||
374 | alldeps = 1 | ||
375 | for dep in data.runq_depends[revdep]: | ||
376 | if runq_complete[dep] != 1: | ||
377 | alldeps = 0 | ||
378 | if alldeps == 1: | ||
379 | runq_buildable[revdep] = 1 | ||
380 | fn = taskData.fn_index[self.runq_fnid[revdep]] | ||
381 | taskname = self.runq_task[revdep] | ||
382 | bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname)) | ||
383 | |||
384 | # Mark initial buildable tasks | ||
385 | for task in range(len(self.runq_fnid)): | ||
386 | runq_running.append(0) | ||
387 | runq_complete.append(0) | ||
388 | if len(self.runq_depends[task]) == 0: | ||
389 | runq_buildable.append(1) | ||
390 | else: | ||
391 | runq_buildable.append(0) | ||
392 | |||
393 | |||
394 | number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", cfgData) or 1) | ||
395 | |||
396 | try: | ||
397 | while 1: | ||
398 | task = get_next_task(self) | ||
399 | if task is not None: | ||
400 | fn = taskData.fn_index[self.runq_fnid[task]] | ||
401 | taskname = self.runq_task[task] | ||
402 | |||
403 | if bb.build.stamp_is_current_cache(dataCache, fn, taskname): | ||
404 | targetid = taskData.gettask_id(fn, taskname) | ||
405 | if not (targetid in taskData.external_targets and cooker.configuration.force): | ||
406 | bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task, taskData))) | ||
407 | runq_running[task] = 1 | ||
408 | task_complete(self, task) | ||
409 | continue | ||
410 | |||
411 | bb.msg.debug(1, bb.msg.domain.RunQueue, "Running task %s (%s)" % (task, self.get_user_idstring(task, taskData))) | ||
412 | try: | ||
413 | pid = os.fork() | ||
414 | except OSError, e: | ||
415 | bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) | ||
416 | if pid == 0: | ||
417 | cooker.configuration.cmd = taskname[3:] | ||
418 | try: | ||
419 | cooker.tryBuild(fn, False) | ||
420 | except bb.build.EventException: | ||
421 | bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed") | ||
422 | sys.exit(1) | ||
423 | except: | ||
424 | bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed") | ||
425 | raise | ||
426 | sys.exit(0) | ||
427 | build_pids[pid] = task | ||
428 | runq_running[task] = 1 | ||
429 | active_builds = active_builds + 1 | ||
430 | if active_builds < number_tasks: | ||
431 | continue | ||
432 | if active_builds > 0: | ||
433 | result = os.waitpid(-1, 0) | ||
434 | active_builds = active_builds - 1 | ||
435 | task = build_pids[result[0]] | ||
436 | if result[1] != 0: | ||
437 | bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed" % (task, self.get_user_idstring(task, taskData))) | ||
438 | raise bb.runqueue.TaskFailure(self.runq_fnid[task], taskData.fn_index[self.runq_fnid[task]], self.runq_task[task]) | ||
439 | task_complete(self, task) | ||
440 | del build_pids[result[0]] | ||
441 | continue | ||
442 | break | ||
443 | except SystemExit: | ||
444 | raise | ||
445 | except: | ||
446 | bb.msg.error(bb.msg.domain.RunQueue, "Exception received") | ||
447 | while active_builds > 0: | ||
448 | bb.msg.note(1, bb.msg.domain.RunQueue, "Waiting for %s active tasks to finish" % active_builds) | ||
449 | tasknum = 1 | ||
450 | for k, v in build_pids.iteritems(): | ||
451 | bb.msg.note(1, bb.msg.domain.RunQueue, "%s: %s (%s)" % (tasknum, self.get_user_idstring(v, taskData), k)) | ||
452 | tasknum = tasknum + 1 | ||
453 | result = os.waitpid(-1, 0) | ||
454 | del build_pids[result[0]] | ||
455 | active_builds = active_builds - 1 | ||
456 | raise | ||
457 | |||
458 | # Sanity Checks | ||
459 | for task in range(len(self.runq_fnid)): | ||
460 | if runq_buildable[task] == 0: | ||
461 | bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task) | ||
462 | if runq_running[task] == 0: | ||
463 | bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task) | ||
464 | if runq_complete[task] == 0: | ||
465 | bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task) | ||
466 | |||
467 | return 0 | ||
468 | |||
469 | def dump_data(self, taskQueue): | ||
470 | """ | ||
471 | Dump some debug information on the internal data structures | ||
472 | """ | ||
473 | bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:") | ||
474 | for task in range(len(self.runq_fnid)): | ||
475 | bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, | ||
476 | taskQueue.fn_index[self.runq_fnid[task]], | ||
477 | self.runq_task[task], | ||
478 | self.runq_weight[task], | ||
479 | self.runq_depends[task], | ||
480 | self.runq_revdeps[task])) | ||
481 | |||
482 | bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:") | ||
483 | for task1 in range(len(self.runq_fnid)): | ||
484 | if task1 in self.prio_map: | ||
485 | task = self.prio_map[task1] | ||
486 | bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, | ||
487 | taskQueue.fn_index[self.runq_fnid[task]], | ||
488 | self.runq_task[task], | ||
489 | self.runq_weight[task], | ||
490 | self.runq_depends[task], | ||
491 | self.runq_revdeps[task])) | ||
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py index 93ad00d1ed..760c371d90 100644 --- a/bitbake/lib/bb/shell.py +++ b/bitbake/lib/bb/shell.py | |||
@@ -56,9 +56,8 @@ try: | |||
56 | set | 56 | set |
57 | except NameError: | 57 | except NameError: |
58 | from sets import Set as set | 58 | from sets import Set as set |
59 | import sys, os, imp, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch | 59 | import sys, os, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch |
60 | imp.load_source( "bitbake", os.path.dirname( sys.argv[0] )+"/bitbake" ) | 60 | from bb import data, parse, build, fatal, cache, taskdata, runqueue, providers as Providers |
61 | from bb import data, parse, build, fatal | ||
62 | 61 | ||
63 | __version__ = "0.5.3.1" | 62 | __version__ = "0.5.3.1" |
64 | __credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de> | 63 | __credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de> |
@@ -108,7 +107,7 @@ class BitBakeShellCommands: | |||
108 | preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 ) | 107 | preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 ) |
109 | if not preferred: preferred = item | 108 | if not preferred: preferred = item |
110 | try: | 109 | try: |
111 | lv, lf, pv, pf = cooker.findBestProvider( preferred ) | 110 | lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status, cooker.build_cache_fail) |
112 | except KeyError: | 111 | except KeyError: |
113 | if item in cooker.status.providers: | 112 | if item in cooker.status.providers: |
114 | pf = cooker.status.providers[item][0] | 113 | pf = cooker.status.providers[item][0] |
@@ -156,14 +155,39 @@ class BitBakeShellCommands: | |||
156 | cooker.build_cache = [] | 155 | cooker.build_cache = [] |
157 | cooker.build_cache_fail = [] | 156 | cooker.build_cache_fail = [] |
158 | 157 | ||
159 | for name in names: | 158 | td = taskdata.TaskData(cooker.configuration.abort) |
160 | try: | 159 | |
161 | cooker.buildProvider( name, data.getVar("BUILD_ALL_DEPS", cooker.configuration.data, True) ) | 160 | try: |
162 | except build.EventException, e: | 161 | tasks = [] |
163 | print "ERROR: Couldn't build '%s'" % name | 162 | for name in names: |
164 | global last_exception | 163 | td.add_provider(cooker.configuration.data, cooker.status, name) |
165 | last_exception = e | 164 | providers = td.get_provider(name) |
166 | break | 165 | |
166 | if len(providers) == 0: | ||
167 | raise Providers.NoProvider | ||
168 | |||
169 | tasks.append([name, "do_%s" % cooker.configuration.cmd]) | ||
170 | |||
171 | td.add_unresolved(cooker.configuration.data, cooker.status) | ||
172 | |||
173 | rq = runqueue.RunQueue() | ||
174 | rq.prepare_runqueue(cooker.configuration.data, cooker.status, td, tasks) | ||
175 | rq.execute_runqueue(cooker, cooker.configuration.data, cooker.status, td, tasks) | ||
176 | |||
177 | except Providers.NoProvider: | ||
178 | print "ERROR: No Provider" | ||
179 | global last_exception | ||
180 | last_exception = Providers.NoProvider | ||
181 | |||
182 | except runqueue.TaskFailure, (fnid, fn, taskname): | ||
183 | print "ERROR: '%s, %s' failed" % (fn, taskname) | ||
184 | global last_exception | ||
185 | last_exception = runqueue.TaskFailure | ||
186 | |||
187 | except build.EventException, e: | ||
188 | print "ERROR: Couldn't build '%s'" % names | ||
189 | global last_exception | ||
190 | last_exception = e | ||
167 | 191 | ||
168 | cooker.configuration.cmd = oldcmd | 192 | cooker.configuration.cmd = oldcmd |
169 | 193 | ||
@@ -233,7 +257,7 @@ class BitBakeShellCommands: | |||
233 | item = data.getVar('PN', bbfile_data, 1) | 257 | item = data.getVar('PN', bbfile_data, 1) |
234 | data.setVar( "_task_cache", [], bbfile_data ) # force | 258 | data.setVar( "_task_cache", [], bbfile_data ) # force |
235 | try: | 259 | try: |
236 | cooker.tryBuildPackage( os.path.abspath( bf ), item, bbfile_data ) | 260 | cooker.tryBuildPackage( os.path.abspath( bf ), item, cmd, bbfile_data, True ) |
237 | except build.EventException, e: | 261 | except build.EventException, e: |
238 | print "ERROR: Couldn't build '%s'" % name | 262 | print "ERROR: Couldn't build '%s'" % name |
239 | global last_exception | 263 | global last_exception |
@@ -255,8 +279,7 @@ class BitBakeShellCommands: | |||
255 | 279 | ||
256 | def fileRebuild( self, params ): | 280 | def fileRebuild( self, params ): |
257 | """Rebuild (clean & build) a .bb file""" | 281 | """Rebuild (clean & build) a .bb file""" |
258 | self.fileClean( params ) | 282 | self.fileBuild( params, "rebuild" ) |
259 | self.fileBuild( params ) | ||
260 | fileRebuild.usage = "<bbfile>" | 283 | fileRebuild.usage = "<bbfile>" |
261 | 284 | ||
262 | def fileReparse( self, params ): | 285 | def fileReparse( self, params ): |
@@ -265,14 +288,19 @@ class BitBakeShellCommands: | |||
265 | print "SHELL: Parsing '%s'" % bbfile | 288 | print "SHELL: Parsing '%s'" % bbfile |
266 | parse.update_mtime( bbfile ) | 289 | parse.update_mtime( bbfile ) |
267 | cooker.bb_cache.cacheValidUpdate(bbfile) | 290 | cooker.bb_cache.cacheValidUpdate(bbfile) |
268 | fromCache = cooker.bb_cache.loadData(bbfile, cooker) | 291 | fromCache = cooker.bb_cache.loadData(bbfile, cooker.configuration.data) |
269 | cooker.bb_cache.sync() | 292 | cooker.bb_cache.sync() |
270 | if False: #from Cache | 293 | if False: #fromCache: |
271 | print "SHELL: File has not been updated, not reparsing" | 294 | print "SHELL: File has not been updated, not reparsing" |
272 | else: | 295 | else: |
273 | print "SHELL: Parsed" | 296 | print "SHELL: Parsed" |
274 | fileReparse.usage = "<bbfile>" | 297 | fileReparse.usage = "<bbfile>" |
275 | 298 | ||
299 | def abort( self, params ): | ||
300 | """Toggle abort task execution flag (see bitbake -k)""" | ||
301 | cooker.configuration.abort = not cooker.configuration.abort | ||
302 | print "SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ) | ||
303 | |||
276 | def force( self, params ): | 304 | def force( self, params ): |
277 | """Toggle force task execution flag (see bitbake -f)""" | 305 | """Toggle force task execution flag (see bitbake -f)""" |
278 | cooker.configuration.force = not cooker.configuration.force | 306 | cooker.configuration.force = not cooker.configuration.force |
@@ -365,18 +393,14 @@ SRC_URI = "" | |||
365 | new.usage = "<directory> <filename>" | 393 | new.usage = "<directory> <filename>" |
366 | 394 | ||
367 | def pasteBin( self, params ): | 395 | def pasteBin( self, params ): |
368 | """Send a command + output buffer to http://pastebin.com""" | 396 | """Send a command + output buffer to the pastebin at http://rafb.net/paste""" |
369 | index = params[0] | 397 | index = params[0] |
370 | contents = self._shell.myout.buffer( int( index ) ) | 398 | contents = self._shell.myout.buffer( int( index ) ) |
371 | status, error, location = sendToPastebin( contents ) | 399 | sendToPastebin( "output of " + params[0], contents ) |
372 | if status == 302: | ||
373 | print "SHELL: Pasted to %s" % location | ||
374 | else: | ||
375 | print "ERROR: %s %s" % ( status, error ) | ||
376 | pasteBin.usage = "<index>" | 400 | pasteBin.usage = "<index>" |
377 | 401 | ||
378 | def pasteLog( self, params ): | 402 | def pasteLog( self, params ): |
379 | """Send the last event exception error log (if there is one) to http://oe.pastebin.com""" | 403 | """Send the last event exception error log (if there is one) to http://rafb.net/paste""" |
380 | if last_exception is None: | 404 | if last_exception is None: |
381 | print "SHELL: No Errors yet (Phew)..." | 405 | print "SHELL: No Errors yet (Phew)..." |
382 | else: | 406 | else: |
@@ -387,12 +411,8 @@ SRC_URI = "" | |||
387 | filename = filename.strip() | 411 | filename = filename.strip() |
388 | print "SHELL: Pasting log file to pastebin..." | 412 | print "SHELL: Pasting log file to pastebin..." |
389 | 413 | ||
390 | status, error, location = sendToPastebin( open( filename ).read() ) | 414 | file = open( filename ).read() |
391 | 415 | sendToPastebin( "contents of " + filename, file ) | |
392 | if status == 302: | ||
393 | print "SHELL: Pasted to %s" % location | ||
394 | else: | ||
395 | print "ERROR: %s %s" % ( status, error ) | ||
396 | 416 | ||
397 | def patch( self, params ): | 417 | def patch( self, params ): |
398 | """Execute 'patch' command on a providee""" | 418 | """Execute 'patch' command on a providee""" |
@@ -401,12 +421,13 @@ SRC_URI = "" | |||
401 | 421 | ||
402 | def parse( self, params ): | 422 | def parse( self, params ): |
403 | """(Re-)parse .bb files and calculate the dependency graph""" | 423 | """(Re-)parse .bb files and calculate the dependency graph""" |
404 | cooker.status = cooker.ParsingStatus() | 424 | cooker.status = cache.CacheData() |
405 | ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or "" | 425 | ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or "" |
406 | cooker.status.ignored_dependencies = set( ignore.split() ) | 426 | cooker.status.ignored_dependencies = set( ignore.split() ) |
407 | cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) ) | 427 | cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) ) |
408 | 428 | ||
409 | cooker.collect_bbfiles( cooker.myProgressCallback ) | 429 | (filelist, masked) = cooker.collect_bbfiles() |
430 | cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback) | ||
410 | cooker.buildDepgraph() | 431 | cooker.buildDepgraph() |
411 | global parsed | 432 | global parsed |
412 | parsed = True | 433 | parsed = True |
@@ -434,7 +455,7 @@ SRC_URI = "" | |||
434 | name, var = params | 455 | name, var = params |
435 | bbfile = self._findProvider( name ) | 456 | bbfile = self._findProvider( name ) |
436 | if bbfile is not None: | 457 | if bbfile is not None: |
437 | the_data = cooker.bb_cache.loadDataFull(bbfile, cooker) | 458 | the_data = cooker.bb_cache.loadDataFull(bbfile, cooker.configuration.data) |
438 | value = the_data.getVar( var, 1 ) | 459 | value = the_data.getVar( var, 1 ) |
439 | print value | 460 | print value |
440 | else: | 461 | else: |
@@ -538,7 +559,8 @@ SRC_URI = "" | |||
538 | if not preferred: preferred = item | 559 | if not preferred: preferred = item |
539 | 560 | ||
540 | try: | 561 | try: |
541 | lv, lf, pv, pf = cooker.findBestProvider( preferred ) | 562 | lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status, |
563 | cooker.build_cache_fail) | ||
542 | except KeyError: | 564 | except KeyError: |
543 | lv, lf, pv, pf = (None,)*4 | 565 | lv, lf, pv, pf = (None,)*4 |
544 | 566 | ||
@@ -565,24 +587,29 @@ def completeFilePath( bbfile ): | |||
565 | return key | 587 | return key |
566 | return bbfile | 588 | return bbfile |
567 | 589 | ||
568 | def sendToPastebin( content ): | 590 | def sendToPastebin( desc, content ): |
569 | """Send content to http://oe.pastebin.com""" | 591 | """Send content to http://oe.pastebin.com""" |
570 | mydata = {} | 592 | mydata = {} |
571 | mydata["parent_pid"] = "" | 593 | mydata["lang"] = "Plain Text" |
572 | mydata["format"] = "bash" | 594 | mydata["desc"] = desc |
573 | mydata["code2"] = content | 595 | mydata["cvt_tabs"] = "No" |
574 | mydata["paste"] = "Send" | 596 | mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" ) |
575 | mydata["poster"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" ) | 597 | mydata["text"] = content |
576 | params = urllib.urlencode( mydata ) | 598 | params = urllib.urlencode( mydata ) |
577 | headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"} | 599 | headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"} |
578 | 600 | ||
579 | conn = httplib.HTTPConnection( "oe.pastebin.com:80" ) | 601 | host = "rafb.net" |
580 | conn.request("POST", "/", params, headers ) | 602 | conn = httplib.HTTPConnection( "%s:80" % host ) |
603 | conn.request("POST", "/paste/paste.php", params, headers ) | ||
581 | 604 | ||
582 | response = conn.getresponse() | 605 | response = conn.getresponse() |
583 | conn.close() | 606 | conn.close() |
584 | 607 | ||
585 | return response.status, response.reason, response.getheader( "location" ) or "unknown" | 608 | if response.status == 302: |
609 | location = response.getheader( "location" ) or "unknown" | ||
610 | print "SHELL: Pasted to http://%s%s" % ( host, location ) | ||
611 | else: | ||
612 | print "ERROR: %s %s" % ( response.status, response.reason ) | ||
586 | 613 | ||
587 | def completer( text, state ): | 614 | def completer( text, state ): |
588 | """Return a possible readline completion""" | 615 | """Return a possible readline completion""" |
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py new file mode 100644 index 0000000000..181bb5e35b --- /dev/null +++ b/bitbake/lib/bb/taskdata.py | |||
@@ -0,0 +1,558 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | BitBake 'TaskData' implementation | ||
6 | |||
7 | Task data collection and handling | ||
8 | |||
9 | Copyright (C) 2006 Richard Purdie | ||
10 | |||
11 | This program is free software; you can redistribute it and/or modify it under | ||
12 | the terms of the GNU General Public License version 2 as published by the Free | ||
13 | Software Foundation | ||
14 | |||
15 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
16 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
17 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
18 | |||
19 | You should have received a copy of the GNU General Public License along with | ||
20 | """ | ||
21 | |||
22 | from bb import data, fetch, event, mkdirhier, utils | ||
23 | import bb, os | ||
24 | |||
25 | class TaskData: | ||
26 | """ | ||
27 | BitBake Task Data implementation | ||
28 | """ | ||
29 | def __init__(self, abort = True): | ||
30 | self.build_names_index = [] | ||
31 | self.run_names_index = [] | ||
32 | self.fn_index = [] | ||
33 | |||
34 | self.build_targets = {} | ||
35 | self.run_targets = {} | ||
36 | |||
37 | self.external_targets = [] | ||
38 | |||
39 | self.tasks_fnid = [] | ||
40 | self.tasks_name = [] | ||
41 | self.tasks_tdepends = [] | ||
42 | # Cache to speed up task ID lookups | ||
43 | self.tasks_lookup = {} | ||
44 | |||
45 | self.depids = {} | ||
46 | self.rdepids = {} | ||
47 | |||
48 | self.consider_msgs_cache = [] | ||
49 | |||
50 | self.failed_deps = [] | ||
51 | self.failed_rdeps = [] | ||
52 | self.failed_fnids = [] | ||
53 | |||
54 | self.abort = abort | ||
55 | |||
56 | def getbuild_id(self, name): | ||
57 | """ | ||
58 | Return an ID number for the build target name. | ||
59 | If it doesn't exist, create one. | ||
60 | """ | ||
61 | if not name in self.build_names_index: | ||
62 | self.build_names_index.append(name) | ||
63 | return len(self.build_names_index) - 1 | ||
64 | |||
65 | return self.build_names_index.index(name) | ||
66 | |||
67 | def getrun_id(self, name): | ||
68 | """ | ||
69 | Return an ID number for the run target name. | ||
70 | If it doesn't exist, create one. | ||
71 | """ | ||
72 | if not name in self.run_names_index: | ||
73 | self.run_names_index.append(name) | ||
74 | return len(self.run_names_index) - 1 | ||
75 | |||
76 | return self.run_names_index.index(name) | ||
77 | |||
78 | def getfn_id(self, name): | ||
79 | """ | ||
80 | Return an ID number for the filename. | ||
81 | If it doesn't exist, create one. | ||
82 | """ | ||
83 | if not name in self.fn_index: | ||
84 | self.fn_index.append(name) | ||
85 | return len(self.fn_index) - 1 | ||
86 | |||
87 | return self.fn_index.index(name) | ||
88 | |||
89 | def gettask_id(self, fn, task): | ||
90 | """ | ||
91 | Return an ID number for the task matching fn and task. | ||
92 | If it doesn't exist, create one. | ||
93 | """ | ||
94 | fnid = self.getfn_id(fn) | ||
95 | |||
96 | if fnid in self.tasks_lookup: | ||
97 | if task in self.tasks_lookup[fnid]: | ||
98 | return self.tasks_lookup[fnid][task] | ||
99 | |||
100 | self.tasks_name.append(task) | ||
101 | self.tasks_fnid.append(fnid) | ||
102 | self.tasks_tdepends.append([]) | ||
103 | |||
104 | listid = len(self.tasks_name) - 1 | ||
105 | |||
106 | if fnid not in self.tasks_lookup: | ||
107 | self.tasks_lookup[fnid] = {} | ||
108 | self.tasks_lookup[fnid][task] = listid | ||
109 | |||
110 | return listid | ||
111 | |||
112 | def add_tasks(self, fn, dataCache): | ||
113 | """ | ||
114 | Add tasks for a given fn to the database | ||
115 | """ | ||
116 | |||
117 | task_graph = dataCache.task_queues[fn] | ||
118 | task_deps = dataCache.task_deps[fn] | ||
119 | |||
120 | fnid = self.getfn_id(fn) | ||
121 | |||
122 | if fnid in self.failed_fnids: | ||
123 | bb.msg.fatal(bb.msg.domain.TaskData, "Trying to re-add a failed file? Something is broken...") | ||
124 | |||
125 | # Check if we've already seen this fn | ||
126 | if fnid in self.tasks_fnid: | ||
127 | return | ||
128 | |||
129 | # Work out task dependencies | ||
130 | for task in task_graph.allnodes(): | ||
131 | parentids = [] | ||
132 | for dep in task_graph.getparents(task): | ||
133 | parentid = self.gettask_id(fn, dep) | ||
134 | parentids.append(parentid) | ||
135 | taskid = self.gettask_id(fn, task) | ||
136 | self.tasks_tdepends[taskid].extend(parentids) | ||
137 | |||
138 | # Work out build dependencies | ||
139 | if not fnid in self.depids: | ||
140 | dependids = {} | ||
141 | for depend in dataCache.deps[fn]: | ||
142 | bb.msg.debug(2, bb.msg.domain.TaskData, "Added dependency %s for %s" % (depend, fn)) | ||
143 | dependids[self.getbuild_id(depend)] = None | ||
144 | self.depids[fnid] = dependids.keys() | ||
145 | |||
146 | # Work out runtime dependencies | ||
147 | if not fnid in self.rdepids: | ||
148 | rdependids = {} | ||
149 | rdepends = dataCache.rundeps[fn] | ||
150 | rrecs = dataCache.runrecs[fn] | ||
151 | for package in rdepends: | ||
152 | for rdepend in rdepends[package]: | ||
153 | bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn)) | ||
154 | rdependids[self.getrun_id(rdepend)] = None | ||
155 | for package in rrecs: | ||
156 | for rdepend in rrecs[package]: | ||
157 | bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn)) | ||
158 | rdependids[self.getrun_id(rdepend)] = None | ||
159 | self.rdepids[fnid] = rdependids.keys() | ||
160 | |||
161 | for dep in self.depids[fnid]: | ||
162 | if dep in self.failed_deps: | ||
163 | self.fail_fnid(fnid) | ||
164 | return | ||
165 | for dep in self.rdepids[fnid]: | ||
166 | if dep in self.failed_rdeps: | ||
167 | self.fail_fnid(fnid) | ||
168 | return | ||
169 | |||
170 | def have_build_target(self, target): | ||
171 | """ | ||
172 | Have we a build target matching this name? | ||
173 | """ | ||
174 | targetid = self.getbuild_id(target) | ||
175 | |||
176 | if targetid in self.build_targets: | ||
177 | return True | ||
178 | return False | ||
179 | |||
180 | def have_runtime_target(self, target): | ||
181 | """ | ||
182 | Have we a runtime target matching this name? | ||
183 | """ | ||
184 | targetid = self.getrun_id(target) | ||
185 | |||
186 | if targetid in self.run_targets: | ||
187 | return True | ||
188 | return False | ||
189 | |||
190 | def add_build_target(self, fn, item): | ||
191 | """ | ||
192 | Add a build target. | ||
193 | If already present, append the provider fn to the list | ||
194 | """ | ||
195 | targetid = self.getbuild_id(item) | ||
196 | fnid = self.getfn_id(fn) | ||
197 | |||
198 | if targetid in self.build_targets: | ||
199 | if fnid in self.build_targets[targetid]: | ||
200 | return | ||
201 | self.build_targets[targetid].append(fnid) | ||
202 | return | ||
203 | self.build_targets[targetid] = [fnid] | ||
204 | |||
205 | def add_runtime_target(self, fn, item): | ||
206 | """ | ||
207 | Add a runtime target. | ||
208 | If already present, append the provider fn to the list | ||
209 | """ | ||
210 | targetid = self.getrun_id(item) | ||
211 | fnid = self.getfn_id(fn) | ||
212 | |||
213 | if targetid in self.run_targets: | ||
214 | if fnid in self.run_targets[targetid]: | ||
215 | return | ||
216 | self.run_targets[targetid].append(fnid) | ||
217 | return | ||
218 | self.run_targets[targetid] = [fnid] | ||
219 | |||
220 | def mark_external_target(self, item): | ||
221 | """ | ||
222 | Mark a build target as being externally requested | ||
223 | """ | ||
224 | targetid = self.getbuild_id(item) | ||
225 | |||
226 | if targetid not in self.external_targets: | ||
227 | self.external_targets.append(targetid) | ||
228 | |||
229 | def get_unresolved_build_targets(self, dataCache): | ||
230 | """ | ||
231 | Return a list of build targets who's providers | ||
232 | are unknown. | ||
233 | """ | ||
234 | unresolved = [] | ||
235 | for target in self.build_names_index: | ||
236 | if target in dataCache.ignored_dependencies: | ||
237 | continue | ||
238 | if self.build_names_index.index(target) in self.failed_deps: | ||
239 | continue | ||
240 | if not self.have_build_target(target): | ||
241 | unresolved.append(target) | ||
242 | return unresolved | ||
243 | |||
244 | def get_unresolved_run_targets(self, dataCache): | ||
245 | """ | ||
246 | Return a list of runtime targets who's providers | ||
247 | are unknown. | ||
248 | """ | ||
249 | unresolved = [] | ||
250 | for target in self.run_names_index: | ||
251 | if target in dataCache.ignored_dependencies: | ||
252 | continue | ||
253 | if self.run_names_index.index(target) in self.failed_rdeps: | ||
254 | continue | ||
255 | if not self.have_runtime_target(target): | ||
256 | unresolved.append(target) | ||
257 | return unresolved | ||
258 | |||
259 | def get_provider(self, item): | ||
260 | """ | ||
261 | Return a list of providers of item | ||
262 | """ | ||
263 | targetid = self.getbuild_id(item) | ||
264 | |||
265 | return self.build_targets[targetid] | ||
266 | |||
267 | def get_dependees(self, itemid): | ||
268 | """ | ||
269 | Return a list of targets which depend on item | ||
270 | """ | ||
271 | dependees = [] | ||
272 | for fnid in self.depids: | ||
273 | if itemid in self.depids[fnid]: | ||
274 | dependees.append(fnid) | ||
275 | return dependees | ||
276 | |||
277 | def get_dependees_str(self, item): | ||
278 | """ | ||
279 | Return a list of targets which depend on item as a user readable string | ||
280 | """ | ||
281 | itemid = self.getbuild_id(item) | ||
282 | dependees = [] | ||
283 | for fnid in self.depids: | ||
284 | if itemid in self.depids[fnid]: | ||
285 | dependees.append(self.fn_index[fnid]) | ||
286 | return dependees | ||
287 | |||
288 | def get_rdependees(self, itemid): | ||
289 | """ | ||
290 | Return a list of targets which depend on runtime item | ||
291 | """ | ||
292 | dependees = [] | ||
293 | for fnid in self.rdepids: | ||
294 | if itemid in self.rdepids[fnid]: | ||
295 | dependees.append(fnid) | ||
296 | return dependees | ||
297 | |||
298 | def get_rdependees_str(self, item): | ||
299 | """ | ||
300 | Return a list of targets which depend on runtime item as a user readable string | ||
301 | """ | ||
302 | itemid = self.getrun_id(item) | ||
303 | dependees = [] | ||
304 | for fnid in self.rdepids: | ||
305 | if itemid in self.rdepids[fnid]: | ||
306 | dependees.append(self.fn_index[fnid]) | ||
307 | return dependees | ||
308 | |||
309 | def add_provider(self, cfgData, dataCache, item): | ||
310 | try: | ||
311 | self.add_provider_internal(cfgData, dataCache, item) | ||
312 | except bb.providers.NoProvider: | ||
313 | if self.abort: | ||
314 | bb.msg.error(bb.msg.domain.Provider, "No providers of build target %s (for %s)" % (item, self.get_dependees_str(item))) | ||
315 | raise | ||
316 | targetid = self.getbuild_id(item) | ||
317 | self.remove_buildtarget(targetid) | ||
318 | |||
319 | self.mark_external_target(item) | ||
320 | |||
321 | def add_provider_internal(self, cfgData, dataCache, item): | ||
322 | """ | ||
323 | Add the providers of item to the task data | ||
324 | Mark entries were specifically added externally as against dependencies | ||
325 | added internally during dependency resolution | ||
326 | """ | ||
327 | |||
328 | if item in dataCache.ignored_dependencies: | ||
329 | return | ||
330 | |||
331 | if not item in dataCache.providers: | ||
332 | bb.msg.debug(1, bb.msg.domain.Provider, "No providers of build target %s (for %s)" % (item, self.get_dependees_str(item))) | ||
333 | bb.event.fire(bb.event.NoProvider(item, cfgData)) | ||
334 | raise bb.providers.NoProvider(item) | ||
335 | |||
336 | if self.have_build_target(item): | ||
337 | return | ||
338 | |||
339 | all_p = dataCache.providers[item] | ||
340 | |||
341 | eligible = bb.providers.filterProviders(all_p, item, cfgData, dataCache) | ||
342 | |||
343 | for p in eligible: | ||
344 | fnid = self.getfn_id(p) | ||
345 | if fnid in self.failed_fnids: | ||
346 | eligible.remove(p) | ||
347 | |||
348 | if not eligible: | ||
349 | bb.msg.debug(1, bb.msg.domain.Provider, "No providers of build target %s after filtering (for %s)" % (item, self.get_dependees_str(item))) | ||
350 | bb.event.fire(bb.event.NoProvider(item, cfgData)) | ||
351 | raise bb.providers.NoProvider(item) | ||
352 | |||
353 | prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1) | ||
354 | if prefervar: | ||
355 | dataCache.preferred[item] = prefervar | ||
356 | |||
357 | discriminated = False | ||
358 | if item in dataCache.preferred: | ||
359 | for p in eligible: | ||
360 | pn = dataCache.pkg_fn[p] | ||
361 | if dataCache.preferred[item] == pn: | ||
362 | bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item)) | ||
363 | eligible.remove(p) | ||
364 | eligible = [p] + eligible | ||
365 | discriminated = True | ||
366 | break | ||
367 | |||
368 | if len(eligible) > 1 and discriminated == False: | ||
369 | if item not in self.consider_msgs_cache: | ||
370 | providers_list = [] | ||
371 | for fn in eligible: | ||
372 | providers_list.append(dataCache.pkg_fn[fn]) | ||
373 | bb.msg.note(1, bb.msg.domain.Provider, "multiple providers are available for %s (%s);" % (item, ", ".join(providers_list))) | ||
374 | bb.msg.note(1, bb.msg.domain.Provider, "consider defining PREFERRED_PROVIDER_%s" % item) | ||
375 | bb.event.fire(bb.event.MultipleProviders(item,providers_list,cfgData)) | ||
376 | self.consider_msgs_cache.append(item) | ||
377 | |||
378 | for fn in eligible: | ||
379 | fnid = self.getfn_id(fn) | ||
380 | if fnid in self.failed_fnids: | ||
381 | continue | ||
382 | bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy %s" % (fn, item)) | ||
383 | self.add_build_target(fn, item) | ||
384 | self.add_tasks(fn, dataCache) | ||
385 | |||
386 | |||
387 | #item = dataCache.pkg_fn[fn] | ||
388 | |||
389 | def add_rprovider(self, cfgData, dataCache, item): | ||
390 | """ | ||
391 | Add the runtime providers of item to the task data | ||
392 | (takes item names from RDEPENDS/PACKAGES namespace) | ||
393 | """ | ||
394 | |||
395 | if item in dataCache.ignored_dependencies: | ||
396 | return | ||
397 | |||
398 | if self.have_runtime_target(item): | ||
399 | return | ||
400 | |||
401 | all_p = bb.providers.getRuntimeProviders(dataCache, item) | ||
402 | |||
403 | if not all_p: | ||
404 | bb.msg.error(bb.msg.domain.Provider, "No providers of runtime build target %s (for %s)" % (item, self.get_rdependees_str(item))) | ||
405 | bb.event.fire(bb.event.NoProvider(item, cfgData, runtime=True)) | ||
406 | raise bb.providers.NoRProvider(item) | ||
407 | |||
408 | eligible = bb.providers.filterProviders(all_p, item, cfgData, dataCache) | ||
409 | |||
410 | for p in eligible: | ||
411 | fnid = self.getfn_id(p) | ||
412 | if fnid in self.failed_fnids: | ||
413 | eligible.remove(p) | ||
414 | |||
415 | if not eligible: | ||
416 | bb.msg.error(bb.msg.domain.Provider, "No providers of runtime build target %s after filtering (for %s)" % (item, self.get_rdependees_str(item))) | ||
417 | bb.event.fire(bb.event.NoProvider(item, cfgData, runtime=True)) | ||
418 | raise bb.providers.NoRProvider(item) | ||
419 | |||
420 | # Should use dataCache.preferred here? | ||
421 | preferred = [] | ||
422 | for p in eligible: | ||
423 | pn = dataCache.pkg_fn[p] | ||
424 | provides = dataCache.pn_provides[pn] | ||
425 | for provide in provides: | ||
426 | prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1) | ||
427 | if prefervar == pn: | ||
428 | bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy runtime %s due to PREFERRED_PROVIDERS" % (pn, item)) | ||
429 | eligible.remove(p) | ||
430 | eligible = [p] + eligible | ||
431 | preferred.append(p) | ||
432 | |||
433 | if len(eligible) > 1 and len(preferred) == 0: | ||
434 | if item not in self.consider_msgs_cache: | ||
435 | providers_list = [] | ||
436 | for fn in eligible: | ||
437 | providers_list.append(dataCache.pkg_fn[fn]) | ||
438 | bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (%s);" % (item, ", ".join(providers_list))) | ||
439 | bb.msg.note(2, bb.msg.domain.Provider, "consider defining a PREFERRED_PROVIDER entry to match runtime %s" % item) | ||
440 | bb.event.fire(bb.event.MultipleProviders(item,providers_list, cfgData, runtime=True)) | ||
441 | self.consider_msgs_cache.append(item) | ||
442 | |||
443 | if len(preferred) > 1: | ||
444 | if item not in self.consider_msgs_cache: | ||
445 | providers_list = [] | ||
446 | for fn in preferred: | ||
447 | providers_list.append(dataCache.pkg_fn[fn]) | ||
448 | bb.msg.note(2, bb.msg.domain.Provider, "multiple preferred providers are available for runtime %s (%s);" % (item, ", ".join(providers_list))) | ||
449 | bb.msg.note(2, bb.msg.domain.Provider, "consider defining only one PREFERRED_PROVIDER entry to match runtime %s" % item) | ||
450 | bb.event.fire(bb.event.MultipleProviders(item,providers_list, cfgData, runtime=True)) | ||
451 | self.consider_msgs_cache.append(item) | ||
452 | |||
453 | # run through the list until we find one that we can build | ||
454 | for fn in eligible: | ||
455 | fnid = self.getfn_id(fn) | ||
456 | if fnid in self.failed_fnids: | ||
457 | continue | ||
458 | bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy runtime %s" % (fn, item)) | ||
459 | self.add_runtime_target(fn, item) | ||
460 | self.add_tasks(fn, dataCache) | ||
461 | |||
462 | def fail_fnid(self, fnid): | ||
463 | """ | ||
464 | Mark a file as failed (unbuildable) | ||
465 | Remove any references from build and runtime provider lists | ||
466 | """ | ||
467 | if fnid in self.failed_fnids: | ||
468 | return | ||
469 | bb.msg.debug(1, bb.msg.domain.Provider, "Removing failed file %s" % self.fn_index[fnid]) | ||
470 | self.failed_fnids.append(fnid) | ||
471 | for target in self.build_targets: | ||
472 | if fnid in self.build_targets[target]: | ||
473 | self.build_targets[target].remove(fnid) | ||
474 | if len(self.build_targets[target]) == 0: | ||
475 | self.remove_buildtarget(target) | ||
476 | for target in self.run_targets: | ||
477 | if fnid in self.run_targets[target]: | ||
478 | self.run_targets[target].remove(fnid) | ||
479 | if len(self.run_targets[target]) == 0: | ||
480 | self.remove_runtarget(target) | ||
481 | |||
482 | def remove_buildtarget(self, targetid): | ||
483 | """ | ||
484 | Mark a build target as failed (unbuildable) | ||
485 | Trigger removal of any files that have this as a dependency | ||
486 | """ | ||
487 | bb.msg.debug(1, bb.msg.domain.Provider, "Removing failed build target %s" % self.build_names_index[targetid]) | ||
488 | self.failed_deps.append(targetid) | ||
489 | dependees = self.get_dependees(targetid) | ||
490 | for fnid in dependees: | ||
491 | self.fail_fnid(fnid) | ||
492 | if self.abort and targetid in self.external_targets: | ||
493 | bb.msg.error(bb.msg.domain.Provider, "No buildable providers available for required build target %s" % self.build_names_index[targetid]) | ||
494 | raise bb.providers.NoProvider | ||
495 | |||
496 | def remove_runtarget(self, targetid): | ||
497 | """ | ||
498 | Mark a run target as failed (unbuildable) | ||
499 | Trigger removal of any files that have this as a dependency | ||
500 | """ | ||
501 | bb.msg.note(1, bb.msg.domain.Provider, "Removing failed runtime build target %s" % self.run_names_index[targetid]) | ||
502 | self.failed_rdeps.append(targetid) | ||
503 | dependees = self.get_rdependees(targetid) | ||
504 | for fnid in dependees: | ||
505 | self.fail_fnid(fnid) | ||
506 | |||
507 | def add_unresolved(self, cfgData, dataCache): | ||
508 | """ | ||
509 | Resolve all unresolved build and runtime targets | ||
510 | """ | ||
511 | bb.msg.note(1, bb.msg.domain.TaskData, "Resolving missing task queue dependencies") | ||
512 | while 1: | ||
513 | added = 0 | ||
514 | for target in self.get_unresolved_build_targets(dataCache): | ||
515 | try: | ||
516 | self.add_provider_internal(cfgData, dataCache, target) | ||
517 | added = added + 1 | ||
518 | except bb.providers.NoProvider: | ||
519 | targetid = self.getbuild_id(target) | ||
520 | if self.abort and targetid in self.external_targets: | ||
521 | raise | ||
522 | self.remove_buildtarget(targetid) | ||
523 | for target in self.get_unresolved_run_targets(dataCache): | ||
524 | try: | ||
525 | self.add_rprovider(cfgData, dataCache, target) | ||
526 | added = added + 1 | ||
527 | except bb.providers.NoRProvider: | ||
528 | self.remove_runtarget(self.getrun_id(target)) | ||
529 | bb.msg.debug(1, bb.msg.domain.TaskData, "Resolved " + str(added) + " extra dependecies") | ||
530 | if added == 0: | ||
531 | break | ||
532 | |||
533 | def dump_data(self): | ||
534 | """ | ||
535 | Dump some debug information on the internal data structures | ||
536 | """ | ||
537 | bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:") | ||
538 | bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index)) | ||
539 | bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:") | ||
540 | bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index)) | ||
541 | bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:") | ||
542 | for target in self.build_targets.keys(): | ||
543 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s: %s" % (self.build_names_index[target], self.build_targets[target])) | ||
544 | bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:") | ||
545 | for target in self.run_targets.keys(): | ||
546 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s: %s" % (self.run_names_index[target], self.run_targets[target])) | ||
547 | bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:") | ||
548 | for task in range(len(self.tasks_name)): | ||
549 | bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % ( | ||
550 | task, | ||
551 | self.fn_index[self.tasks_fnid[task]], | ||
552 | self.tasks_name[task], | ||
553 | self.tasks_tdepends[task])) | ||
554 | bb.msg.debug(3, bb.msg.domain.TaskData, "runtime ids (per fn):") | ||
555 | for fnid in self.rdepids: | ||
556 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid])) | ||
557 | |||
558 | |||
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 5b3cb38d81..d7383f44b2 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
@@ -103,11 +103,11 @@ def _print_trace(body, line): | |||
103 | import bb | 103 | import bb |
104 | 104 | ||
105 | # print the environment of the method | 105 | # print the environment of the method |
106 | bb.error("Printing the environment of the function") | 106 | bb.msg.error(bb.msg.domain.Util, "Printing the environment of the function") |
107 | min_line = max(1,line-4) | 107 | min_line = max(1,line-4) |
108 | max_line = min(line+4,len(body)-1) | 108 | max_line = min(line+4,len(body)-1) |
109 | for i in range(min_line,max_line+1): | 109 | for i in range(min_line,max_line+1): |
110 | bb.error("\t%.4d:%s" % (i, body[i-1]) ) | 110 | bb.msg.error(bb.msg.domain.Util, "\t%.4d:%s" % (i, body[i-1]) ) |
111 | 111 | ||
112 | 112 | ||
113 | def better_compile(text, file, realfile): | 113 | def better_compile(text, file, realfile): |
@@ -122,9 +122,9 @@ def better_compile(text, file, realfile): | |||
122 | 122 | ||
123 | # split the text into lines again | 123 | # split the text into lines again |
124 | body = text.split('\n') | 124 | body = text.split('\n') |
125 | bb.error("Error in compiling: ", realfile) | 125 | bb.msg.error(bb.msg.domain.Util, "Error in compiling: ", realfile) |
126 | bb.error("The lines resulting into this error were:") | 126 | bb.msg.error(bb.msg.domain.Util, "The lines resulting into this error were:") |
127 | bb.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) | 127 | bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) |
128 | 128 | ||
129 | _print_trace(body, e.lineno) | 129 | _print_trace(body, e.lineno) |
130 | 130 | ||
@@ -147,8 +147,8 @@ def better_exec(code, context, text, realfile): | |||
147 | raise | 147 | raise |
148 | 148 | ||
149 | # print the Header of the Error Message | 149 | # print the Header of the Error Message |
150 | bb.error("Error in executing: ", realfile) | 150 | bb.msg.error(bb.msg.domain.Util, "Error in executing: ", realfile) |
151 | bb.error("Exception:%s Message:%s" % (t,value) ) | 151 | bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t,value) ) |
152 | 152 | ||
153 | # let us find the line number now | 153 | # let us find the line number now |
154 | while tb.tb_next: | 154 | while tb.tb_next: |
@@ -160,3 +160,43 @@ def better_exec(code, context, text, realfile): | |||
160 | _print_trace( text.split('\n'), line ) | 160 | _print_trace( text.split('\n'), line ) |
161 | 161 | ||
162 | raise | 162 | raise |
163 | |||
164 | def Enum(*names): | ||
165 | """ | ||
166 | A simple class to give Enum support | ||
167 | """ | ||
168 | |||
169 | assert names, "Empty enums are not supported" | ||
170 | |||
171 | class EnumClass(object): | ||
172 | __slots__ = names | ||
173 | def __iter__(self): return iter(constants) | ||
174 | def __len__(self): return len(constants) | ||
175 | def __getitem__(self, i): return constants[i] | ||
176 | def __repr__(self): return 'Enum' + str(names) | ||
177 | def __str__(self): return 'enum ' + str(constants) | ||
178 | |||
179 | class EnumValue(object): | ||
180 | __slots__ = ('__value') | ||
181 | def __init__(self, value): self.__value = value | ||
182 | Value = property(lambda self: self.__value) | ||
183 | EnumType = property(lambda self: EnumType) | ||
184 | def __hash__(self): return hash(self.__value) | ||
185 | def __cmp__(self, other): | ||
186 | # C fans might want to remove the following assertion | ||
187 | # to make all enums comparable by ordinal value {;)) | ||
188 | assert self.EnumType is other.EnumType, "Only values from the same enum are comparable" | ||
189 | return cmp(self.__value, other.__value) | ||
190 | def __invert__(self): return constants[maximum - self.__value] | ||
191 | def __nonzero__(self): return bool(self.__value) | ||
192 | def __repr__(self): return str(names[self.__value]) | ||
193 | |||
194 | maximum = len(names) - 1 | ||
195 | constants = [None] * len(names) | ||
196 | for i, each in enumerate(names): | ||
197 | val = EnumValue(i) | ||
198 | setattr(EnumClass, each, val) | ||
199 | constants[i] = val | ||
200 | constants = tuple(constants) | ||
201 | EnumType = EnumClass() | ||
202 | return EnumType | ||