diff options
author | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
commit | c527fd1f14c27855a37f2e8ac5346ce8d940ced2 (patch) | |
tree | bb002c1fdf011c41dbd2f0927bed23ecb5f83c97 /bitbake/lib/bb/parse | |
download | poky-daisy-140929.tar.gz |
initial commit for Enea Linux 4.0-140929daisy-140929
Migrated from the internal git server on the daisy-enea-point-release branch
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib/bb/parse')
-rw-r--r-- | bitbake/lib/bb/parse/__init__.py | 157 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/ast.py | 478 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/parse_py/BBHandler.py | 267 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/parse_py/ConfHandler.py | 189 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/parse_py/__init__.py | 33 |
5 files changed, 1124 insertions, 0 deletions
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py new file mode 100644 index 0000000000..e4a44dda11 --- /dev/null +++ b/bitbake/lib/bb/parse/__init__.py | |||
@@ -0,0 +1,157 @@ | |||
1 | """ | ||
2 | BitBake Parsers | ||
3 | |||
4 | File parsers for the BitBake build tools. | ||
5 | |||
6 | """ | ||
7 | |||
8 | |||
9 | # Copyright (C) 2003, 2004 Chris Larson | ||
10 | # Copyright (C) 2003, 2004 Phil Blundell | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | # | ||
25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | |||
27 | handlers = [] | ||
28 | |||
29 | import os | ||
30 | import stat | ||
31 | import logging | ||
32 | import bb | ||
33 | import bb.utils | ||
34 | import bb.siggen | ||
35 | |||
36 | logger = logging.getLogger("BitBake.Parsing") | ||
37 | |||
38 | class ParseError(Exception): | ||
39 | """Exception raised when parsing fails""" | ||
40 | def __init__(self, msg, filename, lineno=0): | ||
41 | self.msg = msg | ||
42 | self.filename = filename | ||
43 | self.lineno = lineno | ||
44 | Exception.__init__(self, msg, filename, lineno) | ||
45 | |||
46 | def __str__(self): | ||
47 | if self.lineno: | ||
48 | return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg) | ||
49 | else: | ||
50 | return "ParseError in %s: %s" % (self.filename, self.msg) | ||
51 | |||
52 | class SkipPackage(Exception): | ||
53 | """Exception raised to skip this package""" | ||
54 | |||
55 | __mtime_cache = {} | ||
56 | def cached_mtime(f): | ||
57 | if f not in __mtime_cache: | ||
58 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | ||
59 | return __mtime_cache[f] | ||
60 | |||
61 | def cached_mtime_noerror(f): | ||
62 | if f not in __mtime_cache: | ||
63 | try: | ||
64 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | ||
65 | except OSError: | ||
66 | return 0 | ||
67 | return __mtime_cache[f] | ||
68 | |||
69 | def update_mtime(f): | ||
70 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | ||
71 | return __mtime_cache[f] | ||
72 | |||
73 | def mark_dependency(d, f): | ||
74 | if f.startswith('./'): | ||
75 | f = "%s/%s" % (os.getcwd(), f[2:]) | ||
76 | deps = (d.getVar('__depends') or []) | ||
77 | s = (f, cached_mtime_noerror(f)) | ||
78 | if s not in deps: | ||
79 | deps.append(s) | ||
80 | d.setVar('__depends', deps) | ||
81 | |||
82 | def check_dependency(d, f): | ||
83 | s = (f, cached_mtime_noerror(f)) | ||
84 | deps = (d.getVar('__depends') or []) | ||
85 | return s in deps | ||
86 | |||
87 | def supports(fn, data): | ||
88 | """Returns true if we have a handler for this file, false otherwise""" | ||
89 | for h in handlers: | ||
90 | if h['supports'](fn, data): | ||
91 | return 1 | ||
92 | return 0 | ||
93 | |||
94 | def handle(fn, data, include = 0): | ||
95 | """Call the handler that is appropriate for this file""" | ||
96 | for h in handlers: | ||
97 | if h['supports'](fn, data): | ||
98 | with data.inchistory.include(fn): | ||
99 | return h['handle'](fn, data, include) | ||
100 | raise ParseError("not a BitBake file", fn) | ||
101 | |||
102 | def init(fn, data): | ||
103 | for h in handlers: | ||
104 | if h['supports'](fn): | ||
105 | return h['init'](data) | ||
106 | |||
107 | def init_parser(d): | ||
108 | bb.parse.siggen = bb.siggen.init(d) | ||
109 | |||
110 | def resolve_file(fn, d): | ||
111 | if not os.path.isabs(fn): | ||
112 | bbpath = d.getVar("BBPATH", True) | ||
113 | newfn, attempts = bb.utils.which(bbpath, fn, history=True) | ||
114 | for af in attempts: | ||
115 | mark_dependency(d, af) | ||
116 | if not newfn: | ||
117 | raise IOError("file %s not found in %s" % (fn, bbpath)) | ||
118 | fn = newfn | ||
119 | |||
120 | mark_dependency(d, fn) | ||
121 | if not os.path.isfile(fn): | ||
122 | raise IOError("file %s not found" % fn) | ||
123 | |||
124 | logger.debug(2, "LOAD %s", fn) | ||
125 | return fn | ||
126 | |||
127 | # Used by OpenEmbedded metadata | ||
128 | __pkgsplit_cache__={} | ||
129 | def vars_from_file(mypkg, d): | ||
130 | if not mypkg or not mypkg.endswith((".bb", ".bbappend")): | ||
131 | return (None, None, None) | ||
132 | if mypkg in __pkgsplit_cache__: | ||
133 | return __pkgsplit_cache__[mypkg] | ||
134 | |||
135 | myfile = os.path.splitext(os.path.basename(mypkg)) | ||
136 | parts = myfile[0].split('_') | ||
137 | __pkgsplit_cache__[mypkg] = parts | ||
138 | if len(parts) > 3: | ||
139 | raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg) | ||
140 | exp = 3 - len(parts) | ||
141 | tmplist = [] | ||
142 | while exp != 0: | ||
143 | exp -= 1 | ||
144 | tmplist.append(None) | ||
145 | parts.extend(tmplist) | ||
146 | return parts | ||
147 | |||
148 | def get_file_depends(d): | ||
149 | '''Return the dependent files''' | ||
150 | dep_files = [] | ||
151 | depends = d.getVar('__base_depends', True) or [] | ||
152 | depends = depends + (d.getVar('__depends', True) or []) | ||
153 | for (fn, _) in depends: | ||
154 | dep_files.append(os.path.abspath(fn)) | ||
155 | return " ".join(dep_files) | ||
156 | |||
157 | from bb.parse.parse_py import __version__, ConfHandler, BBHandler | ||
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py new file mode 100644 index 0000000000..d8c141b37c --- /dev/null +++ b/bitbake/lib/bb/parse/ast.py | |||
@@ -0,0 +1,478 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | AbstractSyntaxTree classes for the Bitbake language | ||
5 | """ | ||
6 | |||
7 | # Copyright (C) 2003, 2004 Chris Larson | ||
8 | # Copyright (C) 2003, 2004 Phil Blundell | ||
9 | # Copyright (C) 2009 Holger Hans Peter Freyther | ||
10 | # | ||
11 | # This program is free software; you can redistribute it and/or modify | ||
12 | # it under the terms of the GNU General Public License version 2 as | ||
13 | # published by the Free Software Foundation. | ||
14 | # | ||
15 | # This program is distributed in the hope that it will be useful, | ||
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
18 | # GNU General Public License for more details. | ||
19 | # | ||
20 | # You should have received a copy of the GNU General Public License along | ||
21 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
23 | |||
24 | from __future__ import absolute_import | ||
25 | from future_builtins import filter | ||
26 | import re | ||
27 | import string | ||
28 | import logging | ||
29 | import bb | ||
30 | import itertools | ||
31 | from bb import methodpool | ||
32 | from bb.parse import logger | ||
33 | |||
34 | _bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]") | ||
35 | |||
36 | class StatementGroup(list): | ||
37 | def eval(self, data): | ||
38 | for statement in self: | ||
39 | statement.eval(data) | ||
40 | |||
41 | class AstNode(object): | ||
42 | def __init__(self, filename, lineno): | ||
43 | self.filename = filename | ||
44 | self.lineno = lineno | ||
45 | |||
46 | class IncludeNode(AstNode): | ||
47 | def __init__(self, filename, lineno, what_file, force): | ||
48 | AstNode.__init__(self, filename, lineno) | ||
49 | self.what_file = what_file | ||
50 | self.force = force | ||
51 | |||
52 | def eval(self, data): | ||
53 | """ | ||
54 | Include the file and evaluate the statements | ||
55 | """ | ||
56 | s = data.expand(self.what_file) | ||
57 | logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) | ||
58 | |||
59 | # TODO: Cache those includes... maybe not here though | ||
60 | if self.force: | ||
61 | bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required") | ||
62 | else: | ||
63 | bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) | ||
64 | |||
65 | class ExportNode(AstNode): | ||
66 | def __init__(self, filename, lineno, var): | ||
67 | AstNode.__init__(self, filename, lineno) | ||
68 | self.var = var | ||
69 | |||
70 | def eval(self, data): | ||
71 | data.setVarFlag(self.var, "export", 1, op = 'exported') | ||
72 | |||
73 | class DataNode(AstNode): | ||
74 | """ | ||
75 | Various data related updates. For the sake of sanity | ||
76 | we have one class doing all this. This means that all | ||
77 | this need to be re-evaluated... we might be able to do | ||
78 | that faster with multiple classes. | ||
79 | """ | ||
80 | def __init__(self, filename, lineno, groupd): | ||
81 | AstNode.__init__(self, filename, lineno) | ||
82 | self.groupd = groupd | ||
83 | |||
84 | def getFunc(self, key, data): | ||
85 | if 'flag' in self.groupd and self.groupd['flag'] != None: | ||
86 | return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True) | ||
87 | else: | ||
88 | return data.getVar(key, noweakdefault=True) | ||
89 | |||
90 | def eval(self, data): | ||
91 | groupd = self.groupd | ||
92 | key = groupd["var"] | ||
93 | loginfo = { | ||
94 | 'variable': key, | ||
95 | 'file': self.filename, | ||
96 | 'line': self.lineno, | ||
97 | } | ||
98 | if "exp" in groupd and groupd["exp"] != None: | ||
99 | data.setVarFlag(key, "export", 1, op = 'exported', **loginfo) | ||
100 | |||
101 | op = "set" | ||
102 | if "ques" in groupd and groupd["ques"] != None: | ||
103 | val = self.getFunc(key, data) | ||
104 | op = "set?" | ||
105 | if val == None: | ||
106 | val = groupd["value"] | ||
107 | elif "colon" in groupd and groupd["colon"] != None: | ||
108 | e = data.createCopy() | ||
109 | bb.data.update_data(e) | ||
110 | op = "immediate" | ||
111 | val = e.expand(groupd["value"], key + "[:=]") | ||
112 | elif "append" in groupd and groupd["append"] != None: | ||
113 | op = "append" | ||
114 | val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"]) | ||
115 | elif "prepend" in groupd and groupd["prepend"] != None: | ||
116 | op = "prepend" | ||
117 | val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or "")) | ||
118 | elif "postdot" in groupd and groupd["postdot"] != None: | ||
119 | op = "postdot" | ||
120 | val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"]) | ||
121 | elif "predot" in groupd and groupd["predot"] != None: | ||
122 | op = "predot" | ||
123 | val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or "")) | ||
124 | else: | ||
125 | val = groupd["value"] | ||
126 | |||
127 | flag = None | ||
128 | if 'flag' in groupd and groupd['flag'] != None: | ||
129 | flag = groupd['flag'] | ||
130 | elif groupd["lazyques"]: | ||
131 | flag = "defaultval" | ||
132 | |||
133 | loginfo['op'] = op | ||
134 | loginfo['detail'] = groupd["value"] | ||
135 | |||
136 | if flag: | ||
137 | data.setVarFlag(key, flag, val, **loginfo) | ||
138 | else: | ||
139 | data.setVar(key, val, **loginfo) | ||
140 | |||
141 | class MethodNode(AstNode): | ||
142 | tr_tbl = string.maketrans('/.+-@%', '______') | ||
143 | |||
144 | def __init__(self, filename, lineno, func_name, body): | ||
145 | AstNode.__init__(self, filename, lineno) | ||
146 | self.func_name = func_name | ||
147 | self.body = body | ||
148 | |||
149 | def eval(self, data): | ||
150 | text = '\n'.join(self.body) | ||
151 | if self.func_name == "__anonymous": | ||
152 | funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl))) | ||
153 | text = "def %s(d):\n" % (funcname) + text | ||
154 | bb.methodpool.insert_method(funcname, text, self.filename) | ||
155 | anonfuncs = data.getVar('__BBANONFUNCS') or [] | ||
156 | anonfuncs.append(funcname) | ||
157 | data.setVar('__BBANONFUNCS', anonfuncs) | ||
158 | data.setVar(funcname, text) | ||
159 | else: | ||
160 | data.setVarFlag(self.func_name, "func", 1) | ||
161 | data.setVar(self.func_name, text) | ||
162 | |||
163 | class PythonMethodNode(AstNode): | ||
164 | def __init__(self, filename, lineno, function, modulename, body): | ||
165 | AstNode.__init__(self, filename, lineno) | ||
166 | self.function = function | ||
167 | self.modulename = modulename | ||
168 | self.body = body | ||
169 | |||
170 | def eval(self, data): | ||
171 | # Note we will add root to parsedmethods after having parse | ||
172 | # 'this' file. This means we will not parse methods from | ||
173 | # bb classes twice | ||
174 | text = '\n'.join(self.body) | ||
175 | bb.methodpool.insert_method(self.modulename, text, self.filename) | ||
176 | data.setVarFlag(self.function, "func", 1) | ||
177 | data.setVarFlag(self.function, "python", 1) | ||
178 | data.setVar(self.function, text) | ||
179 | |||
180 | class MethodFlagsNode(AstNode): | ||
181 | def __init__(self, filename, lineno, key, m): | ||
182 | AstNode.__init__(self, filename, lineno) | ||
183 | self.key = key | ||
184 | self.m = m | ||
185 | |||
186 | def eval(self, data): | ||
187 | if data.getVar(self.key): | ||
188 | # clean up old version of this piece of metadata, as its | ||
189 | # flags could cause problems | ||
190 | data.setVarFlag(self.key, 'python', None) | ||
191 | data.setVarFlag(self.key, 'fakeroot', None) | ||
192 | if self.m.group("py") is not None: | ||
193 | data.setVarFlag(self.key, "python", "1") | ||
194 | else: | ||
195 | data.delVarFlag(self.key, "python") | ||
196 | if self.m.group("fr") is not None: | ||
197 | data.setVarFlag(self.key, "fakeroot", "1") | ||
198 | else: | ||
199 | data.delVarFlag(self.key, "fakeroot") | ||
200 | |||
201 | class ExportFuncsNode(AstNode): | ||
202 | def __init__(self, filename, lineno, fns, classname): | ||
203 | AstNode.__init__(self, filename, lineno) | ||
204 | self.n = fns.split() | ||
205 | self.classname = classname | ||
206 | |||
207 | def eval(self, data): | ||
208 | |||
209 | for func in self.n: | ||
210 | calledfunc = self.classname + "_" + func | ||
211 | |||
212 | if data.getVar(func) and not data.getVarFlag(func, 'export_func'): | ||
213 | continue | ||
214 | |||
215 | if data.getVar(func): | ||
216 | data.setVarFlag(func, 'python', None) | ||
217 | data.setVarFlag(func, 'func', None) | ||
218 | |||
219 | for flag in [ "func", "python" ]: | ||
220 | if data.getVarFlag(calledfunc, flag): | ||
221 | data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag)) | ||
222 | for flag in [ "dirs" ]: | ||
223 | if data.getVarFlag(func, flag): | ||
224 | data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag)) | ||
225 | |||
226 | if data.getVarFlag(calledfunc, "python"): | ||
227 | data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n") | ||
228 | else: | ||
229 | data.setVar(func, " " + calledfunc + "\n") | ||
230 | data.setVarFlag(func, 'export_func', '1') | ||
231 | |||
232 | class AddTaskNode(AstNode): | ||
233 | def __init__(self, filename, lineno, func, before, after): | ||
234 | AstNode.__init__(self, filename, lineno) | ||
235 | self.func = func | ||
236 | self.before = before | ||
237 | self.after = after | ||
238 | |||
239 | def eval(self, data): | ||
240 | bb.build.addtask(self.func, self.before, self.after, data) | ||
241 | |||
242 | class DelTaskNode(AstNode): | ||
243 | def __init__(self, filename, lineno, func): | ||
244 | AstNode.__init__(self, filename, lineno) | ||
245 | self.func = func | ||
246 | |||
247 | def eval(self, data): | ||
248 | bb.build.deltask(self.func, data) | ||
249 | |||
250 | class BBHandlerNode(AstNode): | ||
251 | def __init__(self, filename, lineno, fns): | ||
252 | AstNode.__init__(self, filename, lineno) | ||
253 | self.hs = fns.split() | ||
254 | |||
255 | def eval(self, data): | ||
256 | bbhands = data.getVar('__BBHANDLERS') or [] | ||
257 | for h in self.hs: | ||
258 | bbhands.append(h) | ||
259 | data.setVarFlag(h, "handler", 1) | ||
260 | data.setVar('__BBHANDLERS', bbhands) | ||
261 | |||
262 | class InheritNode(AstNode): | ||
263 | def __init__(self, filename, lineno, classes): | ||
264 | AstNode.__init__(self, filename, lineno) | ||
265 | self.classes = classes | ||
266 | |||
267 | def eval(self, data): | ||
268 | bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) | ||
269 | |||
270 | def handleInclude(statements, filename, lineno, m, force): | ||
271 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) | ||
272 | |||
273 | def handleExport(statements, filename, lineno, m): | ||
274 | statements.append(ExportNode(filename, lineno, m.group(1))) | ||
275 | |||
276 | def handleData(statements, filename, lineno, groupd): | ||
277 | statements.append(DataNode(filename, lineno, groupd)) | ||
278 | |||
279 | def handleMethod(statements, filename, lineno, func_name, body): | ||
280 | statements.append(MethodNode(filename, lineno, func_name, body)) | ||
281 | |||
282 | def handlePythonMethod(statements, filename, lineno, funcname, modulename, body): | ||
283 | statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body)) | ||
284 | |||
285 | def handleMethodFlags(statements, filename, lineno, key, m): | ||
286 | statements.append(MethodFlagsNode(filename, lineno, key, m)) | ||
287 | |||
288 | def handleExportFuncs(statements, filename, lineno, m, classname): | ||
289 | statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) | ||
290 | |||
291 | def handleAddTask(statements, filename, lineno, m): | ||
292 | func = m.group("func") | ||
293 | before = m.group("before") | ||
294 | after = m.group("after") | ||
295 | if func is None: | ||
296 | return | ||
297 | |||
298 | statements.append(AddTaskNode(filename, lineno, func, before, after)) | ||
299 | |||
300 | def handleDelTask(statements, filename, lineno, m): | ||
301 | func = m.group("func") | ||
302 | if func is None: | ||
303 | return | ||
304 | |||
305 | statements.append(DelTaskNode(filename, lineno, func)) | ||
306 | |||
307 | def handleBBHandlers(statements, filename, lineno, m): | ||
308 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) | ||
309 | |||
310 | def handleInherit(statements, filename, lineno, m): | ||
311 | classes = m.group(1) | ||
312 | statements.append(InheritNode(filename, lineno, classes)) | ||
313 | |||
314 | def finalize(fn, d, variant = None): | ||
315 | all_handlers = {} | ||
316 | for var in d.getVar('__BBHANDLERS') or []: | ||
317 | # try to add the handler | ||
318 | bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split()) | ||
319 | |||
320 | bb.event.fire(bb.event.RecipePreFinalise(fn), d) | ||
321 | |||
322 | bb.data.expandKeys(d) | ||
323 | bb.data.update_data(d) | ||
324 | code = [] | ||
325 | for funcname in d.getVar("__BBANONFUNCS") or []: | ||
326 | code.append("%s(d)" % funcname) | ||
327 | bb.utils.better_exec("\n".join(code), {"d": d}) | ||
328 | bb.data.update_data(d) | ||
329 | |||
330 | tasklist = d.getVar('__BBTASKS') or [] | ||
331 | deltasklist = d.getVar('__BBDELTASKS') or [] | ||
332 | bb.build.add_tasks(tasklist, deltasklist, d) | ||
333 | |||
334 | bb.parse.siggen.finalise(fn, d, variant) | ||
335 | |||
336 | d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) | ||
337 | |||
338 | bb.event.fire(bb.event.RecipeParsed(fn), d) | ||
339 | |||
340 | def _create_variants(datastores, names, function): | ||
341 | def create_variant(name, orig_d, arg = None): | ||
342 | new_d = bb.data.createCopy(orig_d) | ||
343 | function(arg or name, new_d) | ||
344 | datastores[name] = new_d | ||
345 | |||
346 | for variant, variant_d in datastores.items(): | ||
347 | for name in names: | ||
348 | if not variant: | ||
349 | # Based on main recipe | ||
350 | create_variant(name, variant_d) | ||
351 | else: | ||
352 | create_variant("%s-%s" % (variant, name), variant_d, name) | ||
353 | |||
354 | def _expand_versions(versions): | ||
355 | def expand_one(version, start, end): | ||
356 | for i in xrange(start, end + 1): | ||
357 | ver = _bbversions_re.sub(str(i), version, 1) | ||
358 | yield ver | ||
359 | |||
360 | versions = iter(versions) | ||
361 | while True: | ||
362 | try: | ||
363 | version = next(versions) | ||
364 | except StopIteration: | ||
365 | break | ||
366 | |||
367 | range_ver = _bbversions_re.search(version) | ||
368 | if not range_ver: | ||
369 | yield version | ||
370 | else: | ||
371 | newversions = expand_one(version, int(range_ver.group("from")), | ||
372 | int(range_ver.group("to"))) | ||
373 | versions = itertools.chain(newversions, versions) | ||
374 | |||
375 | def multi_finalize(fn, d): | ||
376 | appends = (d.getVar("__BBAPPEND", True) or "").split() | ||
377 | for append in appends: | ||
378 | logger.debug(2, "Appending .bbappend file %s to %s", append, fn) | ||
379 | bb.parse.BBHandler.handle(append, d, True) | ||
380 | |||
381 | onlyfinalise = d.getVar("__ONLYFINALISE", False) | ||
382 | |||
383 | safe_d = d | ||
384 | d = bb.data.createCopy(safe_d) | ||
385 | try: | ||
386 | finalize(fn, d) | ||
387 | except bb.parse.SkipPackage as e: | ||
388 | d.setVar("__SKIPPED", e.args[0]) | ||
389 | datastores = {"": safe_d} | ||
390 | |||
391 | versions = (d.getVar("BBVERSIONS", True) or "").split() | ||
392 | if versions: | ||
393 | pv = orig_pv = d.getVar("PV", True) | ||
394 | baseversions = {} | ||
395 | |||
396 | def verfunc(ver, d, pv_d = None): | ||
397 | if pv_d is None: | ||
398 | pv_d = d | ||
399 | |||
400 | overrides = d.getVar("OVERRIDES", True).split(":") | ||
401 | pv_d.setVar("PV", ver) | ||
402 | overrides.append(ver) | ||
403 | bpv = baseversions.get(ver) or orig_pv | ||
404 | pv_d.setVar("BPV", bpv) | ||
405 | overrides.append(bpv) | ||
406 | d.setVar("OVERRIDES", ":".join(overrides)) | ||
407 | |||
408 | versions = list(_expand_versions(versions)) | ||
409 | for pos, version in enumerate(list(versions)): | ||
410 | try: | ||
411 | pv, bpv = version.split(":", 2) | ||
412 | except ValueError: | ||
413 | pass | ||
414 | else: | ||
415 | versions[pos] = pv | ||
416 | baseversions[pv] = bpv | ||
417 | |||
418 | if pv in versions and not baseversions.get(pv): | ||
419 | versions.remove(pv) | ||
420 | else: | ||
421 | pv = versions.pop() | ||
422 | |||
423 | # This is necessary because our existing main datastore | ||
424 | # has already been finalized with the old PV, we need one | ||
425 | # that's been finalized with the new PV. | ||
426 | d = bb.data.createCopy(safe_d) | ||
427 | verfunc(pv, d, safe_d) | ||
428 | try: | ||
429 | finalize(fn, d) | ||
430 | except bb.parse.SkipPackage as e: | ||
431 | d.setVar("__SKIPPED", e.args[0]) | ||
432 | |||
433 | _create_variants(datastores, versions, verfunc) | ||
434 | |||
435 | extended = d.getVar("BBCLASSEXTEND", True) or "" | ||
436 | if extended: | ||
437 | # the following is to support bbextends with arguments, for e.g. multilib | ||
438 | # an example is as follows: | ||
439 | # BBCLASSEXTEND = "multilib:lib32" | ||
440 | # it will create foo-lib32, inheriting multilib.bbclass and set | ||
441 | # BBEXTENDCURR to "multilib" and BBEXTENDVARIANT to "lib32" | ||
442 | extendedmap = {} | ||
443 | variantmap = {} | ||
444 | |||
445 | for ext in extended.split(): | ||
446 | eext = ext.split(':', 2) | ||
447 | if len(eext) > 1: | ||
448 | extendedmap[ext] = eext[0] | ||
449 | variantmap[ext] = eext[1] | ||
450 | else: | ||
451 | extendedmap[ext] = ext | ||
452 | |||
453 | pn = d.getVar("PN", True) | ||
454 | def extendfunc(name, d): | ||
455 | if name != extendedmap[name]: | ||
456 | d.setVar("BBEXTENDCURR", extendedmap[name]) | ||
457 | d.setVar("BBEXTENDVARIANT", variantmap[name]) | ||
458 | else: | ||
459 | d.setVar("PN", "%s-%s" % (pn, name)) | ||
460 | bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) | ||
461 | |||
462 | safe_d.setVar("BBCLASSEXTEND", extended) | ||
463 | _create_variants(datastores, extendedmap.keys(), extendfunc) | ||
464 | |||
465 | for variant, variant_d in datastores.iteritems(): | ||
466 | if variant: | ||
467 | try: | ||
468 | if not onlyfinalise or variant in onlyfinalise: | ||
469 | finalize(fn, variant_d, variant) | ||
470 | except bb.parse.SkipPackage as e: | ||
471 | variant_d.setVar("__SKIPPED", e.args[0]) | ||
472 | |||
473 | if len(datastores) > 1: | ||
474 | variants = filter(None, datastores.iterkeys()) | ||
475 | safe_d.setVar("__VARIANTS", " ".join(variants)) | ||
476 | |||
477 | datastores[""] = d | ||
478 | return datastores | ||
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py new file mode 100644 index 0000000000..408890e48a --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
@@ -0,0 +1,267 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | class for handling .bb files | ||
6 | |||
7 | Reads a .bb file and obtains its metadata | ||
8 | |||
9 | """ | ||
10 | |||
11 | |||
12 | # Copyright (C) 2003, 2004 Chris Larson | ||
13 | # Copyright (C) 2003, 2004 Phil Blundell | ||
14 | # | ||
15 | # This program is free software; you can redistribute it and/or modify | ||
16 | # it under the terms of the GNU General Public License version 2 as | ||
17 | # published by the Free Software Foundation. | ||
18 | # | ||
19 | # This program is distributed in the hope that it will be useful, | ||
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
22 | # GNU General Public License for more details. | ||
23 | # | ||
24 | # You should have received a copy of the GNU General Public License along | ||
25 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
26 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
27 | |||
28 | from __future__ import absolute_import | ||
29 | import re, bb, os | ||
30 | import logging | ||
31 | import bb.build, bb.utils | ||
32 | from bb import data | ||
33 | |||
34 | from . import ConfHandler | ||
35 | from .. import resolve_file, ast, logger | ||
36 | from .ConfHandler import include, init | ||
37 | |||
38 | # For compatibility | ||
39 | bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"]) | ||
40 | |||
41 | __func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) | ||
42 | __inherit_regexp__ = re.compile( r"inherit\s+(.+)" ) | ||
43 | __export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" ) | ||
44 | __addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") | ||
45 | __deltask_regexp__ = re.compile("deltask\s+(?P<func>\w+)") | ||
46 | __addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" ) | ||
47 | __def_regexp__ = re.compile( r"def\s+(\w+).*:" ) | ||
48 | __python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" ) | ||
49 | |||
50 | |||
51 | __infunc__ = "" | ||
52 | __inpython__ = False | ||
53 | __body__ = [] | ||
54 | __classname__ = "" | ||
55 | |||
56 | cached_statements = {} | ||
57 | |||
58 | # We need to indicate EOF to the feeder. This code is so messy that | ||
59 | # factoring it out to a close_parse_file method is out of question. | ||
60 | # We will use the IN_PYTHON_EOF as an indicator to just close the method | ||
61 | # | ||
62 | # The two parts using it are tightly integrated anyway | ||
63 | IN_PYTHON_EOF = -9999999999999 | ||
64 | |||
65 | |||
66 | |||
67 | def supports(fn, d): | ||
68 | """Return True if fn has a supported extension""" | ||
69 | return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] | ||
70 | |||
71 | def inherit(files, fn, lineno, d): | ||
72 | __inherit_cache = d.getVar('__inherit_cache') or [] | ||
73 | files = d.expand(files).split() | ||
74 | for file in files: | ||
75 | if not os.path.isabs(file) and not file.endswith(".bbclass"): | ||
76 | file = os.path.join('classes', '%s.bbclass' % file) | ||
77 | |||
78 | if not os.path.isabs(file): | ||
79 | dname = os.path.dirname(fn) | ||
80 | bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True)) | ||
81 | abs_fn, attempts = bb.utils.which(bbpath, file, history=True) | ||
82 | for af in attempts: | ||
83 | if af != abs_fn: | ||
84 | bb.parse.mark_dependency(d, af) | ||
85 | if abs_fn: | ||
86 | file = abs_fn | ||
87 | |||
88 | if not file in __inherit_cache: | ||
89 | logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file) | ||
90 | __inherit_cache.append( file ) | ||
91 | d.setVar('__inherit_cache', __inherit_cache) | ||
92 | include(fn, file, lineno, d, "inherit") | ||
93 | __inherit_cache = d.getVar('__inherit_cache') or [] | ||
94 | |||
95 | def get_statements(filename, absolute_filename, base_name): | ||
96 | global cached_statements | ||
97 | |||
98 | try: | ||
99 | return cached_statements[absolute_filename] | ||
100 | except KeyError: | ||
101 | file = open(absolute_filename, 'r') | ||
102 | statements = ast.StatementGroup() | ||
103 | |||
104 | lineno = 0 | ||
105 | while True: | ||
106 | lineno = lineno + 1 | ||
107 | s = file.readline() | ||
108 | if not s: break | ||
109 | s = s.rstrip() | ||
110 | feeder(lineno, s, filename, base_name, statements) | ||
111 | file.close() | ||
112 | if __inpython__: | ||
113 | # add a blank line to close out any python definition | ||
114 | feeder(IN_PYTHON_EOF, "", filename, base_name, statements) | ||
115 | |||
116 | if filename.endswith(".bbclass") or filename.endswith(".inc"): | ||
117 | cached_statements[absolute_filename] = statements | ||
118 | return statements | ||
119 | |||
120 | def handle(fn, d, include): | ||
121 | global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__ | ||
122 | __body__ = [] | ||
123 | __infunc__ = "" | ||
124 | __classname__ = "" | ||
125 | __residue__ = [] | ||
126 | |||
127 | |||
128 | if include == 0: | ||
129 | logger.debug(2, "BB %s: handle(data)", fn) | ||
130 | else: | ||
131 | logger.debug(2, "BB %s: handle(data, include)", fn) | ||
132 | |||
133 | base_name = os.path.basename(fn) | ||
134 | (root, ext) = os.path.splitext(base_name) | ||
135 | init(d) | ||
136 | |||
137 | if ext == ".bbclass": | ||
138 | __classname__ = root | ||
139 | __inherit_cache = d.getVar('__inherit_cache') or [] | ||
140 | if not fn in __inherit_cache: | ||
141 | __inherit_cache.append(fn) | ||
142 | d.setVar('__inherit_cache', __inherit_cache) | ||
143 | |||
144 | if include != 0: | ||
145 | oldfile = d.getVar('FILE') | ||
146 | else: | ||
147 | oldfile = None | ||
148 | |||
149 | abs_fn = resolve_file(fn, d) | ||
150 | |||
151 | if include: | ||
152 | bb.parse.mark_dependency(d, abs_fn) | ||
153 | |||
154 | # actual loading | ||
155 | statements = get_statements(fn, abs_fn, base_name) | ||
156 | |||
157 | # DONE WITH PARSING... time to evaluate | ||
158 | if ext != ".bbclass": | ||
159 | d.setVar('FILE', abs_fn) | ||
160 | |||
161 | try: | ||
162 | statements.eval(d) | ||
163 | except bb.parse.SkipPackage: | ||
164 | bb.data.setVar("__SKIPPED", True, d) | ||
165 | if include == 0: | ||
166 | return { "" : d } | ||
167 | |||
168 | if ext != ".bbclass" and include == 0: | ||
169 | return ast.multi_finalize(fn, d) | ||
170 | |||
171 | if oldfile: | ||
172 | d.setVar("FILE", oldfile) | ||
173 | |||
174 | return d | ||
175 | |||
176 | def feeder(lineno, s, fn, root, statements): | ||
177 | global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__ | ||
178 | if __infunc__: | ||
179 | if s == '}': | ||
180 | __body__.append('') | ||
181 | ast.handleMethod(statements, fn, lineno, __infunc__, __body__) | ||
182 | __infunc__ = "" | ||
183 | __body__ = [] | ||
184 | else: | ||
185 | __body__.append(s) | ||
186 | return | ||
187 | |||
188 | if __inpython__: | ||
189 | m = __python_func_regexp__.match(s) | ||
190 | if m and lineno != IN_PYTHON_EOF: | ||
191 | __body__.append(s) | ||
192 | return | ||
193 | else: | ||
194 | ast.handlePythonMethod(statements, fn, lineno, __inpython__, | ||
195 | root, __body__) | ||
196 | __body__ = [] | ||
197 | __inpython__ = False | ||
198 | |||
199 | if lineno == IN_PYTHON_EOF: | ||
200 | return | ||
201 | |||
202 | if s and s[0] == '#': | ||
203 | if len(__residue__) != 0 and __residue__[0][0] != "#": | ||
204 | bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) | ||
205 | |||
206 | if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): | ||
207 | bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) | ||
208 | |||
209 | if s and s[-1] == '\\': | ||
210 | __residue__.append(s[:-1]) | ||
211 | return | ||
212 | |||
213 | s = "".join(__residue__) + s | ||
214 | __residue__ = [] | ||
215 | |||
216 | # Skip empty lines | ||
217 | if s == '': | ||
218 | return | ||
219 | |||
220 | # Skip comments | ||
221 | if s[0] == '#': | ||
222 | return | ||
223 | |||
224 | m = __func_start_regexp__.match(s) | ||
225 | if m: | ||
226 | __infunc__ = m.group("func") or "__anonymous" | ||
227 | ast.handleMethodFlags(statements, fn, lineno, __infunc__, m) | ||
228 | return | ||
229 | |||
230 | m = __def_regexp__.match(s) | ||
231 | if m: | ||
232 | __body__.append(s) | ||
233 | __inpython__ = m.group(1) | ||
234 | |||
235 | return | ||
236 | |||
237 | m = __export_func_regexp__.match(s) | ||
238 | if m: | ||
239 | ast.handleExportFuncs(statements, fn, lineno, m, __classname__) | ||
240 | return | ||
241 | |||
242 | m = __addtask_regexp__.match(s) | ||
243 | if m: | ||
244 | ast.handleAddTask(statements, fn, lineno, m) | ||
245 | return | ||
246 | |||
247 | m = __deltask_regexp__.match(s) | ||
248 | if m: | ||
249 | ast.handleDelTask(statements, fn, lineno, m) | ||
250 | return | ||
251 | |||
252 | m = __addhandler_regexp__.match(s) | ||
253 | if m: | ||
254 | ast.handleBBHandlers(statements, fn, lineno, m) | ||
255 | return | ||
256 | |||
257 | m = __inherit_regexp__.match(s) | ||
258 | if m: | ||
259 | ast.handleInherit(statements, fn, lineno, m) | ||
260 | return | ||
261 | |||
262 | return ConfHandler.feeder(lineno, s, fn, statements) | ||
263 | |||
264 | # Add us to the handlers list | ||
265 | from .. import handlers | ||
266 | handlers.append({'supports': supports, 'handle': handle, 'init': init}) | ||
267 | del handlers | ||
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py new file mode 100644 index 0000000000..978ebe4608 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
@@ -0,0 +1,189 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | class for handling configuration data files | ||
6 | |||
7 | Reads a .conf file and obtains its metadata | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # Copyright (C) 2003, 2004 Phil Blundell | ||
13 | # | ||
14 | # This program is free software; you can redistribute it and/or modify | ||
15 | # it under the terms of the GNU General Public License version 2 as | ||
16 | # published by the Free Software Foundation. | ||
17 | # | ||
18 | # This program is distributed in the hope that it will be useful, | ||
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
21 | # GNU General Public License for more details. | ||
22 | # | ||
23 | # You should have received a copy of the GNU General Public License along | ||
24 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
25 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
26 | |||
27 | import re, os | ||
28 | import logging | ||
29 | import bb.utils | ||
30 | from bb.parse import ParseError, resolve_file, ast, logger | ||
31 | |||
32 | __config_regexp__ = re.compile( r""" | ||
33 | ^ | ||
34 | (?P<exp>export\s*)? | ||
35 | (?P<var>[a-zA-Z0-9\-~_+.${}/]+?) | ||
36 | (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? | ||
37 | |||
38 | \s* ( | ||
39 | (?P<colon>:=) | | ||
40 | (?P<lazyques>\?\?=) | | ||
41 | (?P<ques>\?=) | | ||
42 | (?P<append>\+=) | | ||
43 | (?P<prepend>=\+) | | ||
44 | (?P<predot>=\.) | | ||
45 | (?P<postdot>\.=) | | ||
46 | = | ||
47 | ) \s* | ||
48 | |||
49 | (?!'[^']*'[^']*'$) | ||
50 | (?!\"[^\"]*\"[^\"]*\"$) | ||
51 | (?P<apo>['\"]) | ||
52 | (?P<value>.*) | ||
53 | (?P=apo) | ||
54 | $ | ||
55 | """, re.X) | ||
56 | __include_regexp__ = re.compile( r"include\s+(.+)" ) | ||
57 | __require_regexp__ = re.compile( r"require\s+(.+)" ) | ||
58 | __export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" ) | ||
59 | |||
60 | def init(data): | ||
61 | topdir = data.getVar('TOPDIR') | ||
62 | if not topdir: | ||
63 | data.setVar('TOPDIR', os.getcwd()) | ||
64 | |||
65 | |||
66 | def supports(fn, d): | ||
67 | return fn[-5:] == ".conf" | ||
68 | |||
69 | def include(oldfn, fn, lineno, data, error_out): | ||
70 | """ | ||
71 | error_out: A string indicating the verb (e.g. "include", "inherit") to be | ||
72 | used in a ParseError that will be raised if the file to be included could | ||
73 | not be included. Specify False to avoid raising an error in this case. | ||
74 | """ | ||
75 | if oldfn == fn: # prevent infinite recursion | ||
76 | return None | ||
77 | |||
78 | import bb | ||
79 | fn = data.expand(fn) | ||
80 | oldfn = data.expand(oldfn) | ||
81 | |||
82 | if not os.path.isabs(fn): | ||
83 | dname = os.path.dirname(oldfn) | ||
84 | bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True)) | ||
85 | abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) | ||
86 | if abs_fn and bb.parse.check_dependency(data, abs_fn): | ||
87 | bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True))) | ||
88 | for af in attempts: | ||
89 | bb.parse.mark_dependency(data, af) | ||
90 | if abs_fn: | ||
91 | fn = abs_fn | ||
92 | elif bb.parse.check_dependency(data, fn): | ||
93 | bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True))) | ||
94 | |||
95 | from bb.parse import handle | ||
96 | try: | ||
97 | ret = handle(fn, data, True) | ||
98 | except (IOError, OSError): | ||
99 | if error_out: | ||
100 | raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno) | ||
101 | logger.debug(2, "CONF file '%s' not found", fn) | ||
102 | bb.parse.mark_dependency(data, fn) | ||
103 | |||
104 | # We have an issue where a UI might want to enforce particular settings such as | ||
105 | # an empty DISTRO variable. If configuration files do something like assigning | ||
106 | # a weak default, it turns out to be very difficult to filter out these changes, | ||
107 | # particularly when the weak default might appear half way though parsing a chain | ||
108 | # of configuration files. We therefore let the UIs hook into configuration file | ||
109 | # parsing. This turns out to be a hard problem to solve any other way. | ||
110 | confFilters = [] | ||
111 | |||
112 | def handle(fn, data, include): | ||
113 | init(data) | ||
114 | |||
115 | if include == 0: | ||
116 | oldfile = None | ||
117 | else: | ||
118 | oldfile = data.getVar('FILE') | ||
119 | |||
120 | abs_fn = resolve_file(fn, data) | ||
121 | f = open(abs_fn, 'r') | ||
122 | |||
123 | if include: | ||
124 | bb.parse.mark_dependency(data, abs_fn) | ||
125 | |||
126 | statements = ast.StatementGroup() | ||
127 | lineno = 0 | ||
128 | while True: | ||
129 | lineno = lineno + 1 | ||
130 | s = f.readline() | ||
131 | if not s: | ||
132 | break | ||
133 | w = s.strip() | ||
134 | # skip empty lines | ||
135 | if not w: | ||
136 | continue | ||
137 | s = s.rstrip() | ||
138 | while s[-1] == '\\': | ||
139 | s2 = f.readline().strip() | ||
140 | lineno = lineno + 1 | ||
141 | if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : | ||
142 | bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) | ||
143 | s = s[:-1] + s2 | ||
144 | # skip comments | ||
145 | if s[0] == '#': | ||
146 | continue | ||
147 | feeder(lineno, s, abs_fn, statements) | ||
148 | |||
149 | # DONE WITH PARSING... time to evaluate | ||
150 | data.setVar('FILE', abs_fn) | ||
151 | statements.eval(data) | ||
152 | if oldfile: | ||
153 | data.setVar('FILE', oldfile) | ||
154 | |||
155 | f.close() | ||
156 | |||
157 | for f in confFilters: | ||
158 | f(fn, data) | ||
159 | |||
160 | return data | ||
161 | |||
162 | def feeder(lineno, s, fn, statements): | ||
163 | m = __config_regexp__.match(s) | ||
164 | if m: | ||
165 | groupd = m.groupdict() | ||
166 | ast.handleData(statements, fn, lineno, groupd) | ||
167 | return | ||
168 | |||
169 | m = __include_regexp__.match(s) | ||
170 | if m: | ||
171 | ast.handleInclude(statements, fn, lineno, m, False) | ||
172 | return | ||
173 | |||
174 | m = __require_regexp__.match(s) | ||
175 | if m: | ||
176 | ast.handleInclude(statements, fn, lineno, m, True) | ||
177 | return | ||
178 | |||
179 | m = __export_regexp__.match(s) | ||
180 | if m: | ||
181 | ast.handleExport(statements, fn, lineno, m) | ||
182 | return | ||
183 | |||
184 | raise ParseError("unparsed line: '%s'" % s, fn, lineno); | ||
185 | |||
186 | # Add us to the handlers list | ||
187 | from bb.parse import handlers | ||
188 | handlers.append({'supports': supports, 'handle': handle, 'init': init}) | ||
189 | del handlers | ||
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py new file mode 100644 index 0000000000..3e658d0de9 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/__init__.py | |||
@@ -0,0 +1,33 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | BitBake Parsers | ||
6 | |||
7 | File parsers for the BitBake build tools. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # Copyright (C) 2003, 2004 Phil Blundell | ||
13 | # | ||
14 | # This program is free software; you can redistribute it and/or modify | ||
15 | # it under the terms of the GNU General Public License version 2 as | ||
16 | # published by the Free Software Foundation. | ||
17 | # | ||
18 | # This program is distributed in the hope that it will be useful, | ||
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
21 | # GNU General Public License for more details. | ||
22 | # | ||
23 | # You should have received a copy of the GNU General Public License along | ||
24 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
25 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
26 | # | ||
27 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
28 | |||
29 | from __future__ import absolute_import | ||
30 | from . import ConfHandler | ||
31 | from . import BBHandler | ||
32 | |||
33 | __version__ = '1.0' | ||