diff options
Diffstat (limited to 'bitbake/lib/bb/parse')
| -rw-r--r-- | bitbake/lib/bb/parse/__init__.py | 216 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/ast.py | 620 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/parse_py/BBHandler.py | 306 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/parse_py/ConfHandler.py | 221 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/parse_py/__init__.py | 20 |
5 files changed, 0 insertions, 1383 deletions
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py deleted file mode 100644 index d428d8a4b4..0000000000 --- a/bitbake/lib/bb/parse/__init__.py +++ /dev/null | |||
| @@ -1,216 +0,0 @@ | |||
| 1 | """ | ||
| 2 | BitBake Parsers | ||
| 3 | |||
| 4 | File parsers for the BitBake build tools. | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | |||
| 9 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 10 | # Copyright (C) 2003, 2004 Phil Blundell | ||
| 11 | # | ||
| 12 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 13 | # | ||
| 14 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 15 | # | ||
| 16 | |||
| 17 | handlers = [] | ||
| 18 | |||
| 19 | import errno | ||
| 20 | import logging | ||
| 21 | import os | ||
| 22 | import stat | ||
| 23 | import bb | ||
| 24 | import bb.utils | ||
| 25 | import bb.siggen | ||
| 26 | |||
| 27 | logger = logging.getLogger("BitBake.Parsing") | ||
| 28 | |||
| 29 | class ParseError(Exception): | ||
| 30 | """Exception raised when parsing fails""" | ||
| 31 | def __init__(self, msg, filename, lineno=0): | ||
| 32 | self.msg = msg | ||
| 33 | self.filename = filename | ||
| 34 | self.lineno = lineno | ||
| 35 | Exception.__init__(self, msg, filename, lineno) | ||
| 36 | |||
| 37 | def __str__(self): | ||
| 38 | if self.lineno: | ||
| 39 | return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg) | ||
| 40 | else: | ||
| 41 | return "ParseError in %s: %s" % (self.filename, self.msg) | ||
| 42 | |||
| 43 | class SkipRecipe(Exception): | ||
| 44 | """Exception raised to skip this recipe""" | ||
| 45 | |||
| 46 | class SkipPackage(SkipRecipe): | ||
| 47 | """Exception raised to skip this recipe (use SkipRecipe in new code)""" | ||
| 48 | |||
| 49 | __mtime_cache = {} | ||
| 50 | def cached_mtime(f): | ||
| 51 | if f not in __mtime_cache: | ||
| 52 | res = os.stat(f) | ||
| 53 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
| 54 | return __mtime_cache[f] | ||
| 55 | |||
| 56 | def cached_mtime_noerror(f): | ||
| 57 | if f not in __mtime_cache: | ||
| 58 | try: | ||
| 59 | res = os.stat(f) | ||
| 60 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
| 61 | except OSError: | ||
| 62 | return 0 | ||
| 63 | return __mtime_cache[f] | ||
| 64 | |||
| 65 | def check_mtime(f, mtime): | ||
| 66 | try: | ||
| 67 | res = os.stat(f) | ||
| 68 | current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
| 69 | __mtime_cache[f] = current_mtime | ||
| 70 | except OSError: | ||
| 71 | current_mtime = 0 | ||
| 72 | return current_mtime == mtime | ||
| 73 | |||
| 74 | def update_mtime(f): | ||
| 75 | try: | ||
| 76 | res = os.stat(f) | ||
| 77 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
| 78 | except OSError: | ||
| 79 | if f in __mtime_cache: | ||
| 80 | del __mtime_cache[f] | ||
| 81 | return 0 | ||
| 82 | return __mtime_cache[f] | ||
| 83 | |||
| 84 | def update_cache(f): | ||
| 85 | if f in __mtime_cache: | ||
| 86 | logger.debug("Updating mtime cache for %s" % f) | ||
| 87 | update_mtime(f) | ||
| 88 | |||
| 89 | def clear_cache(): | ||
| 90 | global __mtime_cache | ||
| 91 | __mtime_cache = {} | ||
| 92 | |||
| 93 | def mark_dependency(d, f): | ||
| 94 | if f.startswith('./'): | ||
| 95 | f = "%s/%s" % (os.getcwd(), f[2:]) | ||
| 96 | deps = (d.getVar('__depends', False) or []) | ||
| 97 | s = (f, cached_mtime_noerror(f)) | ||
| 98 | if s not in deps: | ||
| 99 | deps.append(s) | ||
| 100 | d.setVar('__depends', deps) | ||
| 101 | |||
| 102 | def check_dependency(d, f): | ||
| 103 | s = (f, cached_mtime_noerror(f)) | ||
| 104 | deps = (d.getVar('__depends', False) or []) | ||
| 105 | return s in deps | ||
| 106 | |||
| 107 | def supports(fn, data): | ||
| 108 | """Returns true if we have a handler for this file, false otherwise""" | ||
| 109 | for h in handlers: | ||
| 110 | if h['supports'](fn, data): | ||
| 111 | return 1 | ||
| 112 | return 0 | ||
| 113 | |||
| 114 | def handle(fn, data, include=0, baseconfig=False): | ||
| 115 | """Call the handler that is appropriate for this file""" | ||
| 116 | for h in handlers: | ||
| 117 | if h['supports'](fn, data): | ||
| 118 | with data.inchistory.include(fn): | ||
| 119 | return h['handle'](fn, data, include, baseconfig) | ||
| 120 | raise ParseError("not a BitBake file", fn) | ||
| 121 | |||
| 122 | def init(fn, data): | ||
| 123 | for h in handlers: | ||
| 124 | if h['supports'](fn): | ||
| 125 | return h['init'](data) | ||
| 126 | |||
| 127 | def init_parser(d): | ||
| 128 | if hasattr(bb.parse, "siggen"): | ||
| 129 | bb.parse.siggen.exit() | ||
| 130 | bb.parse.siggen = bb.siggen.init(d) | ||
| 131 | |||
| 132 | def resolve_file(fn, d): | ||
| 133 | if not os.path.isabs(fn): | ||
| 134 | bbpath = d.getVar("BBPATH") | ||
| 135 | newfn, attempts = bb.utils.which(bbpath, fn, history=True) | ||
| 136 | for af in attempts: | ||
| 137 | mark_dependency(d, af) | ||
| 138 | if not newfn: | ||
| 139 | raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath)) | ||
| 140 | fn = newfn | ||
| 141 | else: | ||
| 142 | mark_dependency(d, fn) | ||
| 143 | |||
| 144 | if not os.path.isfile(fn): | ||
| 145 | raise IOError(errno.ENOENT, "file %s not found" % fn) | ||
| 146 | |||
| 147 | return fn | ||
| 148 | |||
| 149 | # Used by OpenEmbedded metadata | ||
| 150 | __pkgsplit_cache__={} | ||
| 151 | def vars_from_file(mypkg, d): | ||
| 152 | if not mypkg or not mypkg.endswith((".bb", ".bbappend")): | ||
| 153 | return (None, None, None) | ||
| 154 | if mypkg in __pkgsplit_cache__: | ||
| 155 | return __pkgsplit_cache__[mypkg] | ||
| 156 | |||
| 157 | myfile = os.path.splitext(os.path.basename(mypkg)) | ||
| 158 | parts = myfile[0].split('_') | ||
| 159 | __pkgsplit_cache__[mypkg] = parts | ||
| 160 | if len(parts) > 3: | ||
| 161 | raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg) | ||
| 162 | exp = 3 - len(parts) | ||
| 163 | tmplist = [] | ||
| 164 | while exp != 0: | ||
| 165 | exp -= 1 | ||
| 166 | tmplist.append(None) | ||
| 167 | parts.extend(tmplist) | ||
| 168 | return parts | ||
| 169 | |||
| 170 | def get_file_depends(d): | ||
| 171 | '''Return the dependent files''' | ||
| 172 | dep_files = [] | ||
| 173 | depends = d.getVar('__base_depends', False) or [] | ||
| 174 | depends = depends + (d.getVar('__depends', False) or []) | ||
| 175 | for (fn, _) in depends: | ||
| 176 | dep_files.append(os.path.abspath(fn)) | ||
| 177 | return " ".join(dep_files) | ||
| 178 | |||
| 179 | def vardeps(*varnames): | ||
| 180 | """ | ||
| 181 | Function decorator that can be used to instruct the bitbake dependency | ||
| 182 | parsing to add a dependency on the specified variables names | ||
| 183 | |||
| 184 | Example: | ||
| 185 | |||
| 186 | @bb.parse.vardeps("FOO", "BAR") | ||
| 187 | def my_function(): | ||
| 188 | ... | ||
| 189 | |||
| 190 | """ | ||
| 191 | def inner(f): | ||
| 192 | if not hasattr(f, "bb_vardeps"): | ||
| 193 | f.bb_vardeps = set() | ||
| 194 | f.bb_vardeps |= set(varnames) | ||
| 195 | return f | ||
| 196 | return inner | ||
| 197 | |||
| 198 | def vardepsexclude(*varnames): | ||
| 199 | """ | ||
| 200 | Function decorator that can be used to instruct the bitbake dependency | ||
| 201 | parsing to ignore dependencies on the specified variable names in the code | ||
| 202 | |||
| 203 | Example: | ||
| 204 | |||
| 205 | @bb.parse.vardepsexclude("FOO", "BAR") | ||
| 206 | def my_function(): | ||
| 207 | ... | ||
| 208 | """ | ||
| 209 | def inner(f): | ||
| 210 | if not hasattr(f, "bb_vardepsexclude"): | ||
| 211 | f.bb_vardepsexclude = set() | ||
| 212 | f.bb_vardepsexclude |= set(varnames) | ||
| 213 | return f | ||
| 214 | return inner | ||
| 215 | |||
| 216 | from bb.parse.parse_py import __version__, ConfHandler, BBHandler | ||
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py deleted file mode 100644 index cfead466e1..0000000000 --- a/bitbake/lib/bb/parse/ast.py +++ /dev/null | |||
| @@ -1,620 +0,0 @@ | |||
| 1 | """ | ||
| 2 | AbstractSyntaxTree classes for the Bitbake language | ||
| 3 | """ | ||
| 4 | |||
| 5 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
| 7 | # Copyright (C) 2009 Holger Hans Peter Freyther | ||
| 8 | # | ||
| 9 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 10 | # | ||
| 11 | |||
| 12 | import sys | ||
| 13 | import bb | ||
| 14 | from bb import methodpool | ||
| 15 | from bb.parse import logger | ||
| 16 | |||
| 17 | class StatementGroup(list): | ||
| 18 | def eval(self, data): | ||
| 19 | for statement in self: | ||
| 20 | statement.eval(data) | ||
| 21 | |||
| 22 | class AstNode(object): | ||
| 23 | def __init__(self, filename, lineno): | ||
| 24 | self.filename = filename | ||
| 25 | self.lineno = lineno | ||
| 26 | |||
| 27 | class IncludeNode(AstNode): | ||
| 28 | def __init__(self, filename, lineno, what_file, force): | ||
| 29 | AstNode.__init__(self, filename, lineno) | ||
| 30 | self.what_file = what_file | ||
| 31 | self.force = force | ||
| 32 | |||
| 33 | def eval(self, data): | ||
| 34 | """ | ||
| 35 | Include the file and evaluate the statements | ||
| 36 | """ | ||
| 37 | s = data.expand(self.what_file) | ||
| 38 | logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s) | ||
| 39 | |||
| 40 | # TODO: Cache those includes... maybe not here though | ||
| 41 | if self.force: | ||
| 42 | bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required") | ||
| 43 | else: | ||
| 44 | bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) | ||
| 45 | |||
| 46 | class IncludeAllNode(AstNode): | ||
| 47 | def __init__(self, filename, lineno, what_file): | ||
| 48 | AstNode.__init__(self, filename, lineno) | ||
| 49 | self.what_file = what_file | ||
| 50 | |||
| 51 | def eval(self, data): | ||
| 52 | """ | ||
| 53 | Include the file and evaluate the statements | ||
| 54 | """ | ||
| 55 | s = data.expand(self.what_file) | ||
| 56 | logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s) | ||
| 57 | |||
| 58 | for path in data.getVar("BBPATH").split(":"): | ||
| 59 | bb.parse.ConfHandler.include(self.filename, os.path.join(path, s), self.lineno, data, False) | ||
| 60 | |||
| 61 | class ExportNode(AstNode): | ||
| 62 | def __init__(self, filename, lineno, var): | ||
| 63 | AstNode.__init__(self, filename, lineno) | ||
| 64 | self.var = var | ||
| 65 | |||
| 66 | def eval(self, data): | ||
| 67 | data.setVarFlag(self.var, "export", 1, op = 'exported') | ||
| 68 | |||
| 69 | class UnsetNode(AstNode): | ||
| 70 | def __init__(self, filename, lineno, var): | ||
| 71 | AstNode.__init__(self, filename, lineno) | ||
| 72 | self.var = var | ||
| 73 | |||
| 74 | def eval(self, data): | ||
| 75 | loginfo = { | ||
| 76 | 'variable': self.var, | ||
| 77 | 'file': self.filename, | ||
| 78 | 'line': self.lineno, | ||
| 79 | } | ||
| 80 | data.delVar(self.var,**loginfo) | ||
| 81 | |||
| 82 | class UnsetFlagNode(AstNode): | ||
| 83 | def __init__(self, filename, lineno, var, flag): | ||
| 84 | AstNode.__init__(self, filename, lineno) | ||
| 85 | self.var = var | ||
| 86 | self.flag = flag | ||
| 87 | |||
| 88 | def eval(self, data): | ||
| 89 | loginfo = { | ||
| 90 | 'variable': self.var, | ||
| 91 | 'file': self.filename, | ||
| 92 | 'line': self.lineno, | ||
| 93 | } | ||
| 94 | data.delVarFlag(self.var, self.flag, **loginfo) | ||
| 95 | |||
| 96 | class DataNode(AstNode): | ||
| 97 | """ | ||
| 98 | Various data related updates. For the sake of sanity | ||
| 99 | we have one class doing all this. This means that all | ||
| 100 | this need to be re-evaluated... we might be able to do | ||
| 101 | that faster with multiple classes. | ||
| 102 | """ | ||
| 103 | def __init__(self, filename, lineno, groupd): | ||
| 104 | AstNode.__init__(self, filename, lineno) | ||
| 105 | self.groupd = groupd | ||
| 106 | |||
| 107 | def getFunc(self, key, data): | ||
| 108 | if 'flag' in self.groupd and self.groupd['flag'] is not None: | ||
| 109 | return data.getVarFlag(key, self.groupd['flag'], expand=False, noweakdefault=True) | ||
| 110 | else: | ||
| 111 | return data.getVar(key, False, noweakdefault=True, parsing=True) | ||
| 112 | |||
| 113 | def eval(self, data): | ||
| 114 | groupd = self.groupd | ||
| 115 | key = groupd["var"] | ||
| 116 | loginfo = { | ||
| 117 | 'variable': key, | ||
| 118 | 'file': self.filename, | ||
| 119 | 'line': self.lineno, | ||
| 120 | } | ||
| 121 | if "exp" in groupd and groupd["exp"] is not None: | ||
| 122 | data.setVarFlag(key, "export", 1, op = 'exported', **loginfo) | ||
| 123 | |||
| 124 | op = "set" | ||
| 125 | if "ques" in groupd and groupd["ques"] is not None: | ||
| 126 | val = self.getFunc(key, data) | ||
| 127 | op = "set?" | ||
| 128 | if val is None: | ||
| 129 | val = groupd["value"] | ||
| 130 | elif "colon" in groupd and groupd["colon"] is not None: | ||
| 131 | e = data.createCopy() | ||
| 132 | op = "immediate" | ||
| 133 | val = e.expand(groupd["value"], key + "[:=]") | ||
| 134 | elif "append" in groupd and groupd["append"] is not None: | ||
| 135 | op = "append" | ||
| 136 | val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"]) | ||
| 137 | elif "prepend" in groupd and groupd["prepend"] is not None: | ||
| 138 | op = "prepend" | ||
| 139 | val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or "")) | ||
| 140 | elif "postdot" in groupd and groupd["postdot"] is not None: | ||
| 141 | op = "postdot" | ||
| 142 | val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"]) | ||
| 143 | elif "predot" in groupd and groupd["predot"] is not None: | ||
| 144 | op = "predot" | ||
| 145 | val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or "")) | ||
| 146 | else: | ||
| 147 | val = groupd["value"] | ||
| 148 | |||
| 149 | if ":append" in key or ":remove" in key or ":prepend" in key: | ||
| 150 | if op in ["append", "prepend", "postdot", "predot", "ques"]: | ||
| 151 | bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.") | ||
| 152 | |||
| 153 | flag = None | ||
| 154 | if 'flag' in groupd and groupd['flag'] is not None: | ||
| 155 | if groupd["lazyques"]: | ||
| 156 | flag = "_defaultval_flag_"+groupd['flag'] | ||
| 157 | else: | ||
| 158 | flag = groupd['flag'] | ||
| 159 | elif groupd["lazyques"]: | ||
| 160 | flag = "_defaultval" | ||
| 161 | |||
| 162 | loginfo['op'] = op | ||
| 163 | loginfo['detail'] = groupd["value"] | ||
| 164 | |||
| 165 | if flag: | ||
| 166 | data.setVarFlag(key, flag, val, **loginfo) | ||
| 167 | else: | ||
| 168 | data.setVar(key, val, parsing=True, **loginfo) | ||
| 169 | |||
| 170 | class MethodNode(AstNode): | ||
| 171 | tr_tbl = str.maketrans('/.+-@%&~', '________') | ||
| 172 | |||
| 173 | def __init__(self, filename, lineno, func_name, body, python, fakeroot): | ||
| 174 | AstNode.__init__(self, filename, lineno) | ||
| 175 | self.func_name = func_name | ||
| 176 | self.body = body | ||
| 177 | self.python = python | ||
| 178 | self.fakeroot = fakeroot | ||
| 179 | |||
| 180 | def eval(self, data): | ||
| 181 | text = '\n'.join(self.body) | ||
| 182 | funcname = self.func_name | ||
| 183 | if self.func_name == "__anonymous": | ||
| 184 | funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl))) | ||
| 185 | self.python = True | ||
| 186 | text = "def %s(d):\n" % (funcname) + text | ||
| 187 | bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body) - 1) | ||
| 188 | anonfuncs = data.getVar('__BBANONFUNCS', False) or [] | ||
| 189 | anonfuncs.append(funcname) | ||
| 190 | data.setVar('__BBANONFUNCS', anonfuncs) | ||
| 191 | if data.getVar(funcname, False): | ||
| 192 | # clean up old version of this piece of metadata, as its | ||
| 193 | # flags could cause problems | ||
| 194 | data.delVarFlag(funcname, 'python') | ||
| 195 | data.delVarFlag(funcname, 'fakeroot') | ||
| 196 | if self.python: | ||
| 197 | data.setVarFlag(funcname, "python", "1") | ||
| 198 | if self.fakeroot: | ||
| 199 | data.setVarFlag(funcname, "fakeroot", "1") | ||
| 200 | data.setVarFlag(funcname, "func", 1) | ||
| 201 | data.setVar(funcname, text, parsing=True) | ||
| 202 | data.setVarFlag(funcname, 'filename', self.filename) | ||
| 203 | data.setVarFlag(funcname, 'lineno', str(self.lineno - len(self.body))) | ||
| 204 | |||
| 205 | class PythonMethodNode(AstNode): | ||
| 206 | def __init__(self, filename, lineno, function, modulename, body): | ||
| 207 | AstNode.__init__(self, filename, lineno) | ||
| 208 | self.function = function | ||
| 209 | self.modulename = modulename | ||
| 210 | self.body = body | ||
| 211 | |||
| 212 | def eval(self, data): | ||
| 213 | # Note we will add root to parsedmethods after having parse | ||
| 214 | # 'this' file. This means we will not parse methods from | ||
| 215 | # bb classes twice | ||
| 216 | text = '\n'.join(self.body) | ||
| 217 | bb.methodpool.insert_method(self.modulename, text, self.filename, self.lineno - len(self.body) - 1) | ||
| 218 | data.setVarFlag(self.function, "func", 1) | ||
| 219 | data.setVarFlag(self.function, "python", 1) | ||
| 220 | data.setVar(self.function, text, parsing=True) | ||
| 221 | data.setVarFlag(self.function, 'filename', self.filename) | ||
| 222 | data.setVarFlag(self.function, 'lineno', str(self.lineno - len(self.body) - 1)) | ||
| 223 | |||
| 224 | class ExportFuncsNode(AstNode): | ||
| 225 | def __init__(self, filename, lineno, fns, classname): | ||
| 226 | AstNode.__init__(self, filename, lineno) | ||
| 227 | self.n = fns.split() | ||
| 228 | self.classname = classname | ||
| 229 | |||
| 230 | def eval(self, data): | ||
| 231 | |||
| 232 | sentinel = " # Export function set\n" | ||
| 233 | for func in self.n: | ||
| 234 | calledfunc = self.classname + "_" + func | ||
| 235 | |||
| 236 | basevar = data.getVar(func, False) | ||
| 237 | if basevar and sentinel not in basevar: | ||
| 238 | continue | ||
| 239 | |||
| 240 | if data.getVar(func, False): | ||
| 241 | data.setVarFlag(func, 'python', None) | ||
| 242 | data.setVarFlag(func, 'func', None) | ||
| 243 | |||
| 244 | for flag in [ "func", "python" ]: | ||
| 245 | if data.getVarFlag(calledfunc, flag, False): | ||
| 246 | data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False)) | ||
| 247 | for flag in ["dirs", "cleandirs", "fakeroot"]: | ||
| 248 | if data.getVarFlag(func, flag, False): | ||
| 249 | data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False)) | ||
| 250 | data.setVarFlag(func, "filename", "autogenerated") | ||
| 251 | data.setVarFlag(func, "lineno", 1) | ||
| 252 | |||
| 253 | if data.getVarFlag(calledfunc, "python", False): | ||
| 254 | data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True) | ||
| 255 | else: | ||
| 256 | if "-" in self.classname: | ||
| 257 | bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc)) | ||
| 258 | data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True) | ||
| 259 | |||
| 260 | class AddTaskNode(AstNode): | ||
| 261 | def __init__(self, filename, lineno, tasks, before, after): | ||
| 262 | AstNode.__init__(self, filename, lineno) | ||
| 263 | self.tasks = tasks | ||
| 264 | self.before = before | ||
| 265 | self.after = after | ||
| 266 | |||
| 267 | def eval(self, data): | ||
| 268 | tasks = self.tasks.split() | ||
| 269 | for task in tasks: | ||
| 270 | bb.build.addtask(task, self.before, self.after, data) | ||
| 271 | |||
| 272 | class DelTaskNode(AstNode): | ||
| 273 | def __init__(self, filename, lineno, tasks): | ||
| 274 | AstNode.__init__(self, filename, lineno) | ||
| 275 | self.tasks = tasks | ||
| 276 | |||
| 277 | def eval(self, data): | ||
| 278 | tasks = data.expand(self.tasks).split() | ||
| 279 | for task in tasks: | ||
| 280 | bb.build.deltask(task, data) | ||
| 281 | |||
| 282 | class BBHandlerNode(AstNode): | ||
| 283 | def __init__(self, filename, lineno, fns): | ||
| 284 | AstNode.__init__(self, filename, lineno) | ||
| 285 | self.hs = fns.split() | ||
| 286 | |||
| 287 | def eval(self, data): | ||
| 288 | bbhands = data.getVar('__BBHANDLERS', False) or [] | ||
| 289 | for h in self.hs: | ||
| 290 | bbhands.append(h) | ||
| 291 | data.setVarFlag(h, "handler", 1) | ||
| 292 | data.setVar('__BBHANDLERS', bbhands) | ||
| 293 | |||
| 294 | class PyLibNode(AstNode): | ||
| 295 | def __init__(self, filename, lineno, libdir, namespace): | ||
| 296 | AstNode.__init__(self, filename, lineno) | ||
| 297 | self.libdir = libdir | ||
| 298 | self.namespace = namespace | ||
| 299 | |||
| 300 | def eval(self, data): | ||
| 301 | global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split() | ||
| 302 | for m in global_mods: | ||
| 303 | if m not in bb.utils._context: | ||
| 304 | bb.utils._context[m] = __import__(m) | ||
| 305 | |||
| 306 | libdir = data.expand(self.libdir) | ||
| 307 | if libdir not in sys.path: | ||
| 308 | sys.path.append(libdir) | ||
| 309 | try: | ||
| 310 | bb.utils._context[self.namespace] = __import__(self.namespace) | ||
| 311 | toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", []) | ||
| 312 | for i in toimport: | ||
| 313 | bb.utils._context[self.namespace] = __import__(self.namespace + "." + i) | ||
| 314 | mod = getattr(bb.utils._context[self.namespace], i) | ||
| 315 | fn = getattr(mod, "__file__") | ||
| 316 | funcs = {} | ||
| 317 | for f in dir(mod): | ||
| 318 | if f.startswith("_"): | ||
| 319 | continue | ||
| 320 | fcall = getattr(mod, f) | ||
| 321 | if not callable(fcall): | ||
| 322 | continue | ||
| 323 | funcs[f] = fcall | ||
| 324 | bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i)) | ||
| 325 | |||
| 326 | except AttributeError as e: | ||
| 327 | bb.error("Error importing OE modules: %s" % str(e)) | ||
| 328 | |||
| 329 | class InheritNode(AstNode): | ||
| 330 | def __init__(self, filename, lineno, classes): | ||
| 331 | AstNode.__init__(self, filename, lineno) | ||
| 332 | self.classes = classes | ||
| 333 | |||
| 334 | def eval(self, data): | ||
| 335 | bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) | ||
| 336 | |||
| 337 | class InheritDeferredNode(AstNode): | ||
| 338 | def __init__(self, filename, lineno, classes): | ||
| 339 | AstNode.__init__(self, filename, lineno) | ||
| 340 | self.inherit = (classes, filename, lineno) | ||
| 341 | |||
| 342 | def eval(self, data): | ||
| 343 | bb.parse.BBHandler.inherit_defer(*self.inherit, data) | ||
| 344 | |||
| 345 | class AddFragmentsNode(AstNode): | ||
| 346 | def __init__(self, filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable): | ||
| 347 | AstNode.__init__(self, filename, lineno) | ||
| 348 | self.fragments_path_prefix = fragments_path_prefix | ||
| 349 | self.fragments_variable = fragments_variable | ||
| 350 | self.flagged_variables_list_variable = flagged_variables_list_variable | ||
| 351 | self.builtin_fragments_variable = builtin_fragments_variable | ||
| 352 | |||
| 353 | def eval(self, data): | ||
| 354 | # No need to use mark_dependency since we would only match a fragment | ||
| 355 | # from a specific layer and there can only be a single layer with a | ||
| 356 | # given namespace. | ||
| 357 | def find_fragment(layers, layerid, full_fragment_name): | ||
| 358 | for layerpath in layers.split(): | ||
| 359 | candidate_fragment_path = os.path.join(layerpath, full_fragment_name) | ||
| 360 | if os.path.exists(candidate_fragment_path) and bb.utils.get_file_layer(candidate_fragment_path, data) == layerid: | ||
| 361 | return candidate_fragment_path | ||
| 362 | return None | ||
| 363 | |||
| 364 | def check_and_set_builtin_fragment(fragment, data, builtin_fragments): | ||
| 365 | prefix, value = fragment.split('/', 1) | ||
| 366 | if prefix in builtin_fragments.keys(): | ||
| 367 | # parsing=True since we want to emulate X=Y and allow X:override=Z to continue to exist | ||
| 368 | data.setVar(builtin_fragments[prefix], value, parsing=True) | ||
| 369 | return True | ||
| 370 | return False | ||
| 371 | |||
| 372 | fragments = data.getVar(self.fragments_variable) | ||
| 373 | layers = data.getVar('BBLAYERS') | ||
| 374 | flagged_variables = data.getVar(self.flagged_variables_list_variable).split() | ||
| 375 | builtin_fragments = {f[0]:f[1] for f in [f.split(':') for f in data.getVar(self.builtin_fragments_variable).split()] } | ||
| 376 | |||
| 377 | if not fragments: | ||
| 378 | return | ||
| 379 | |||
| 380 | # Check for multiple builtin fragments setting the same variable | ||
| 381 | for builtin_fragment_key in builtin_fragments.keys(): | ||
| 382 | builtin_fragments_list = list( | ||
| 383 | filter( | ||
| 384 | lambda f: f.startswith(builtin_fragment_key + "/"), | ||
| 385 | fragments.split(), | ||
| 386 | ) | ||
| 387 | ) | ||
| 388 | if len(builtin_fragments_list) > 1: | ||
| 389 | bb.warn( | ||
| 390 | ("Multiple builtin fragments are enabled for %s via variable %s: %s. " | ||
| 391 | "This likely points to a mis-configuration in the metadata, as only " | ||
| 392 | "one of them should be set. The build will use the last value.") | ||
| 393 | % ( | ||
| 394 | builtin_fragment_key, | ||
| 395 | self.fragments_variable, | ||
| 396 | " ".join(builtin_fragments_list), | ||
| 397 | ) | ||
| 398 | ) | ||
| 399 | |||
| 400 | for f in fragments.split(): | ||
| 401 | if check_and_set_builtin_fragment(f, data, builtin_fragments): | ||
| 402 | continue | ||
| 403 | layerid, fragment_name = f.split('/', 1) | ||
| 404 | full_fragment_name = data.expand("{}/{}.conf".format(self.fragments_path_prefix, fragment_name)) | ||
| 405 | fragment_path = find_fragment(layers, layerid, full_fragment_name) | ||
| 406 | if fragment_path: | ||
| 407 | bb.parse.ConfHandler.include(self.filename, fragment_path, self.lineno, data, "include fragment") | ||
| 408 | for flagged_var in flagged_variables: | ||
| 409 | val = data.getVar(flagged_var) | ||
| 410 | data.setVarFlag(flagged_var, f, val) | ||
| 411 | data.setVar(flagged_var, None) | ||
| 412 | else: | ||
| 413 | bb.error("Could not find fragment {} in enabled layers: {}".format(f, layers)) | ||
| 414 | |||
| 415 | def handleInclude(statements, filename, lineno, m, force): | ||
| 416 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) | ||
| 417 | |||
| 418 | def handleIncludeAll(statements, filename, lineno, m): | ||
| 419 | statements.append(IncludeAllNode(filename, lineno, m.group(1))) | ||
| 420 | |||
| 421 | def handleExport(statements, filename, lineno, m): | ||
| 422 | statements.append(ExportNode(filename, lineno, m.group(1))) | ||
| 423 | |||
| 424 | def handleUnset(statements, filename, lineno, m): | ||
| 425 | statements.append(UnsetNode(filename, lineno, m.group(1))) | ||
| 426 | |||
| 427 | def handleUnsetFlag(statements, filename, lineno, m): | ||
| 428 | statements.append(UnsetFlagNode(filename, lineno, m.group(1), m.group(2))) | ||
| 429 | |||
| 430 | def handleData(statements, filename, lineno, groupd): | ||
| 431 | statements.append(DataNode(filename, lineno, groupd)) | ||
| 432 | |||
| 433 | def handleMethod(statements, filename, lineno, func_name, body, python, fakeroot): | ||
| 434 | statements.append(MethodNode(filename, lineno, func_name, body, python, fakeroot)) | ||
| 435 | |||
| 436 | def handlePythonMethod(statements, filename, lineno, funcname, modulename, body): | ||
| 437 | statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body)) | ||
| 438 | |||
| 439 | def handleExportFuncs(statements, filename, lineno, m, classname): | ||
| 440 | statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) | ||
| 441 | |||
| 442 | def handleAddTask(statements, filename, lineno, tasks, before, after): | ||
| 443 | statements.append(AddTaskNode(filename, lineno, tasks, before, after)) | ||
| 444 | |||
| 445 | def handleDelTask(statements, filename, lineno, tasks): | ||
| 446 | statements.append(DelTaskNode(filename, lineno, tasks)) | ||
| 447 | |||
| 448 | def handleBBHandlers(statements, filename, lineno, m): | ||
| 449 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) | ||
| 450 | |||
| 451 | def handlePyLib(statements, filename, lineno, m): | ||
| 452 | statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2))) | ||
| 453 | |||
| 454 | def handleInherit(statements, filename, lineno, m): | ||
| 455 | classes = m.group(1) | ||
| 456 | statements.append(InheritNode(filename, lineno, classes)) | ||
| 457 | |||
| 458 | def handleInheritDeferred(statements, filename, lineno, m): | ||
| 459 | classes = m.group(1) | ||
| 460 | statements.append(InheritDeferredNode(filename, lineno, classes)) | ||
| 461 | |||
| 462 | def handleAddFragments(statements, filename, lineno, m): | ||
| 463 | fragments_path_prefix = m.group(1) | ||
| 464 | fragments_variable = m.group(2) | ||
| 465 | flagged_variables_list_variable = m.group(3) | ||
| 466 | builtin_fragments_variable = m.group(4) | ||
| 467 | statements.append(AddFragmentsNode(filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable)) | ||
| 468 | |||
| 469 | def runAnonFuncs(d): | ||
| 470 | code = [] | ||
| 471 | for funcname in d.getVar("__BBANONFUNCS", False) or []: | ||
| 472 | code.append("%s(d)" % funcname) | ||
| 473 | bb.utils.better_exec("\n".join(code), {"d": d}) | ||
| 474 | |||
| 475 | # Handle recipe level PREFERRED_PROVIDERs | ||
| 476 | def handleVirtRecipeProviders(tasklist, d): | ||
| 477 | depends = (d.getVar("DEPENDS") or "").split() | ||
| 478 | virtprovs = (d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS") or "").split() | ||
| 479 | newdeps = [] | ||
| 480 | for dep in depends: | ||
| 481 | if dep in virtprovs: | ||
| 482 | newdep = d.getVar("PREFERRED_PROVIDER_" + dep) | ||
| 483 | if not newdep: | ||
| 484 | bb.fatal("Error, recipe virtual provider PREFERRED_PROVIDER_%s not set" % dep) | ||
| 485 | newdeps.append(newdep) | ||
| 486 | else: | ||
| 487 | newdeps.append(dep) | ||
| 488 | d.setVar("DEPENDS", " ".join(newdeps)) | ||
| 489 | for task in tasklist: | ||
| 490 | taskdeps = (d.getVarFlag(task, "depends") or "").split() | ||
| 491 | remapped = [] | ||
| 492 | for entry in taskdeps: | ||
| 493 | r, t = entry.split(":") | ||
| 494 | if r in virtprovs: | ||
| 495 | r = d.getVar("PREFERRED_PROVIDER_" + r) | ||
| 496 | remapped.append("%s:%s" % (r, t)) | ||
| 497 | d.setVarFlag(task, "depends", " ".join(remapped)) | ||
| 498 | |||
| 499 | def finalize(fn, d, variant = None): | ||
| 500 | saved_handlers = bb.event.get_handlers().copy() | ||
| 501 | try: | ||
| 502 | # Found renamed variables. Exit immediately | ||
| 503 | if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: | ||
| 504 | raise bb.BBHandledException() | ||
| 505 | |||
| 506 | inherits = [x[0] for x in (d.getVar('__BBDEFINHERITS', False) or [('',)])] | ||
| 507 | bb.event.fire(bb.event.RecipePreDeferredInherits(fn, inherits), d) | ||
| 508 | |||
| 509 | while True: | ||
| 510 | inherits = d.getVar('__BBDEFINHERITS', False) or [] | ||
| 511 | if not inherits: | ||
| 512 | break | ||
| 513 | inherit, filename, lineno = inherits.pop(0) | ||
| 514 | d.setVar('__BBDEFINHERITS', inherits) | ||
| 515 | bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True) | ||
| 516 | |||
| 517 | for var in d.getVar('__BBHANDLERS', False) or []: | ||
| 518 | # try to add the handler | ||
| 519 | handlerfn = d.getVarFlag(var, "filename", False) | ||
| 520 | if not handlerfn: | ||
| 521 | bb.fatal("Undefined event handler function '%s'" % var) | ||
| 522 | handlerln = int(d.getVarFlag(var, "lineno", False)) | ||
| 523 | bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln, data=d) | ||
| 524 | |||
| 525 | bb.event.fire(bb.event.RecipePreFinalise(fn), d) | ||
| 526 | |||
| 527 | bb.data.expandKeys(d) | ||
| 528 | |||
| 529 | bb.event.fire(bb.event.RecipePostKeyExpansion(fn), d) | ||
| 530 | |||
| 531 | runAnonFuncs(d) | ||
| 532 | |||
| 533 | tasklist = d.getVar('__BBTASKS', False) or [] | ||
| 534 | bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) | ||
| 535 | handleVirtRecipeProviders(tasklist, d) | ||
| 536 | bb.build.add_tasks(tasklist, d) | ||
| 537 | |||
| 538 | bb.parse.siggen.finalise(fn, d, variant) | ||
| 539 | |||
| 540 | d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) | ||
| 541 | |||
| 542 | if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"): | ||
| 543 | bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.") | ||
| 544 | |||
| 545 | bb.event.fire(bb.event.RecipeParsed(fn), d) | ||
| 546 | finally: | ||
| 547 | bb.event.set_handlers(saved_handlers) | ||
| 548 | |||
| 549 | def _create_variants(datastores, names, function, onlyfinalise): | ||
| 550 | def create_variant(name, orig_d, arg = None): | ||
| 551 | if onlyfinalise and name not in onlyfinalise: | ||
| 552 | return | ||
| 553 | new_d = bb.data.createCopy(orig_d) | ||
| 554 | function(arg or name, new_d) | ||
| 555 | datastores[name] = new_d | ||
| 556 | |||
| 557 | for variant in list(datastores.keys()): | ||
| 558 | for name in names: | ||
| 559 | if not variant: | ||
| 560 | # Based on main recipe | ||
| 561 | create_variant(name, datastores[""]) | ||
| 562 | else: | ||
| 563 | create_variant("%s-%s" % (variant, name), datastores[variant], name) | ||
| 564 | |||
| 565 | def multi_finalize(fn, d): | ||
| 566 | appends = (d.getVar("__BBAPPEND") or "").split() | ||
| 567 | for append in appends: | ||
| 568 | logger.debug("Appending .bbappend file %s to %s", append, fn) | ||
| 569 | bb.parse.BBHandler.handle(append, d, True) | ||
| 570 | |||
| 571 | onlyfinalise = d.getVar("__ONLYFINALISE", False) | ||
| 572 | |||
| 573 | safe_d = d | ||
| 574 | d = bb.data.createCopy(safe_d) | ||
| 575 | try: | ||
| 576 | finalize(fn, d) | ||
| 577 | except bb.parse.SkipRecipe as e: | ||
| 578 | d.setVar("__SKIPPED", e.args[0]) | ||
| 579 | datastores = {"": safe_d} | ||
| 580 | |||
| 581 | extended = d.getVar("BBCLASSEXTEND") or "" | ||
| 582 | if extended: | ||
| 583 | # the following is to support bbextends with arguments, for e.g. multilib | ||
| 584 | # an example is as follows: | ||
| 585 | # BBCLASSEXTEND = "multilib:lib32" | ||
| 586 | # it will create foo-lib32, inheriting multilib.bbclass and set | ||
| 587 | # BBEXTENDCURR to "multilib" and BBEXTENDVARIANT to "lib32" | ||
| 588 | extendedmap = {} | ||
| 589 | variantmap = {} | ||
| 590 | |||
| 591 | for ext in extended.split(): | ||
| 592 | eext = ext.split(':', 2) | ||
| 593 | if len(eext) > 1: | ||
| 594 | extendedmap[ext] = eext[0] | ||
| 595 | variantmap[ext] = eext[1] | ||
| 596 | else: | ||
| 597 | extendedmap[ext] = ext | ||
| 598 | |||
| 599 | pn = d.getVar("PN") | ||
| 600 | def extendfunc(name, d): | ||
| 601 | if name != extendedmap[name]: | ||
| 602 | d.setVar("BBEXTENDCURR", extendedmap[name]) | ||
| 603 | d.setVar("BBEXTENDVARIANT", variantmap[name]) | ||
| 604 | else: | ||
| 605 | d.setVar("PN", "%s-%s" % (pn, name)) | ||
| 606 | bb.parse.BBHandler.inherit_defer(extendedmap[name], fn, 0, d) | ||
| 607 | |||
| 608 | safe_d.setVar("BBCLASSEXTEND", extended) | ||
| 609 | _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) | ||
| 610 | |||
| 611 | for variant in datastores.keys(): | ||
| 612 | if variant: | ||
| 613 | try: | ||
| 614 | if not onlyfinalise or variant in onlyfinalise: | ||
| 615 | finalize(fn, datastores[variant], variant) | ||
| 616 | except bb.parse.SkipRecipe as e: | ||
| 617 | datastores[variant].setVar("__SKIPPED", e.args[0]) | ||
| 618 | |||
| 619 | datastores[""] = d | ||
| 620 | return datastores | ||
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py deleted file mode 100644 index 008fec2308..0000000000 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ /dev/null | |||
| @@ -1,306 +0,0 @@ | |||
| 1 | """ | ||
| 2 | class for handling .bb files | ||
| 3 | |||
| 4 | Reads a .bb file and obtains its metadata | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | |||
| 9 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 10 | # Copyright (C) 2003, 2004 Phil Blundell | ||
| 11 | # | ||
| 12 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 13 | # | ||
| 14 | |||
| 15 | import re, bb, os | ||
| 16 | import bb.build, bb.utils, bb.data_smart | ||
| 17 | |||
| 18 | from . import ConfHandler | ||
| 19 | from .. import resolve_file, ast, logger, ParseError | ||
| 20 | from .ConfHandler import include, init | ||
| 21 | |||
| 22 | __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" ) | ||
| 23 | __inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) | ||
| 24 | __inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" ) | ||
| 25 | __export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) | ||
| 26 | __addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)") | ||
| 27 | __deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)") | ||
| 28 | __addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) | ||
| 29 | __def_regexp__ = re.compile(r"def\s+(\w+).*:" ) | ||
| 30 | __python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) | ||
| 31 | __python_tab_regexp__ = re.compile(r" *\t") | ||
| 32 | |||
| 33 | __infunc__ = [] | ||
| 34 | __inpython__ = False | ||
| 35 | __body__ = [] | ||
| 36 | __classname__ = "" | ||
| 37 | __residue__ = [] | ||
| 38 | |||
| 39 | cached_statements = {} | ||
| 40 | |||
| 41 | def supports(fn, d): | ||
| 42 | """Return True if fn has a supported extension""" | ||
| 43 | return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] | ||
| 44 | |||
| 45 | def inherit_defer(expression, fn, lineno, d): | ||
| 46 | inherit = (expression, fn, lineno) | ||
| 47 | inherits = d.getVar('__BBDEFINHERITS', False) or [] | ||
| 48 | inherits.append(inherit) | ||
| 49 | d.setVar('__BBDEFINHERITS', inherits) | ||
| 50 | |||
| 51 | def inherit(files, fn, lineno, d, deferred=False): | ||
| 52 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | ||
| 53 | #if "${" in files and not deferred: | ||
| 54 | # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno)) | ||
| 55 | files = d.expand(files).split() | ||
| 56 | for file in files: | ||
| 57 | defer = (d.getVar("BB_DEFER_BBCLASSES") or "").split() | ||
| 58 | if not deferred and file in defer: | ||
| 59 | inherit_defer(file, fn, lineno, d) | ||
| 60 | continue | ||
| 61 | classtype = d.getVar("__bbclasstype", False) | ||
| 62 | origfile = file | ||
| 63 | for t in ["classes-" + classtype, "classes"]: | ||
| 64 | file = origfile | ||
| 65 | if not os.path.isabs(file) and not file.endswith(".bbclass"): | ||
| 66 | file = os.path.join(t, '%s.bbclass' % file) | ||
| 67 | |||
| 68 | if not os.path.isabs(file): | ||
| 69 | bbpath = d.getVar("BBPATH") | ||
| 70 | abs_fn, attempts = bb.utils.which(bbpath, file, history=True) | ||
| 71 | for af in attempts: | ||
| 72 | if af != abs_fn: | ||
| 73 | bb.parse.mark_dependency(d, af) | ||
| 74 | if abs_fn: | ||
| 75 | file = abs_fn | ||
| 76 | |||
| 77 | if os.path.exists(file): | ||
| 78 | break | ||
| 79 | |||
| 80 | if not os.path.exists(file): | ||
| 81 | raise ParseError("Could not inherit file %s" % (file), fn, lineno) | ||
| 82 | |||
| 83 | if not file in __inherit_cache: | ||
| 84 | logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) | ||
| 85 | __inherit_cache.append( file ) | ||
| 86 | d.setVar('__inherit_cache', __inherit_cache) | ||
| 87 | try: | ||
| 88 | bb.parse.handle(file, d, True) | ||
| 89 | except (IOError, OSError) as exc: | ||
| 90 | raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno) | ||
| 91 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | ||
| 92 | |||
| 93 | def get_statements(filename, absolute_filename, base_name): | ||
| 94 | global cached_statements, __residue__, __body__ | ||
| 95 | |||
| 96 | try: | ||
| 97 | return cached_statements[absolute_filename] | ||
| 98 | except KeyError: | ||
| 99 | with open(absolute_filename, 'r') as f: | ||
| 100 | statements = ast.StatementGroup() | ||
| 101 | |||
| 102 | lineno = 0 | ||
| 103 | while True: | ||
| 104 | lineno = lineno + 1 | ||
| 105 | s = f.readline() | ||
| 106 | if not s: break | ||
| 107 | s = s.rstrip() | ||
| 108 | feeder(lineno, s, filename, base_name, statements) | ||
| 109 | |||
| 110 | if __inpython__: | ||
| 111 | # add a blank line to close out any python definition | ||
| 112 | feeder(lineno, "", filename, base_name, statements, eof=True) | ||
| 113 | |||
| 114 | if __residue__: | ||
| 115 | raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno) | ||
| 116 | if __body__: | ||
| 117 | raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno) | ||
| 118 | |||
| 119 | if filename.endswith(".bbclass") or filename.endswith(".inc"): | ||
| 120 | cached_statements[absolute_filename] = statements | ||
| 121 | return statements | ||
| 122 | |||
| 123 | def handle(fn, d, include, baseconfig=False): | ||
| 124 | global __infunc__, __body__, __residue__, __classname__ | ||
| 125 | __body__ = [] | ||
| 126 | __infunc__ = [] | ||
| 127 | __classname__ = "" | ||
| 128 | __residue__ = [] | ||
| 129 | |||
| 130 | base_name = os.path.basename(fn) | ||
| 131 | (root, ext) = os.path.splitext(base_name) | ||
| 132 | init(d) | ||
| 133 | |||
| 134 | if ext == ".bbclass": | ||
| 135 | __classname__ = root | ||
| 136 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | ||
| 137 | if not fn in __inherit_cache: | ||
| 138 | __inherit_cache.append(fn) | ||
| 139 | d.setVar('__inherit_cache', __inherit_cache) | ||
| 140 | |||
| 141 | if include != 0: | ||
| 142 | oldfile = d.getVar('FILE', False) | ||
| 143 | else: | ||
| 144 | oldfile = None | ||
| 145 | |||
| 146 | abs_fn = resolve_file(fn, d) | ||
| 147 | |||
| 148 | # actual loading | ||
| 149 | statements = get_statements(fn, abs_fn, base_name) | ||
| 150 | |||
| 151 | # DONE WITH PARSING... time to evaluate | ||
| 152 | if ext != ".bbclass" and abs_fn != oldfile: | ||
| 153 | d.setVar('FILE', abs_fn) | ||
| 154 | |||
| 155 | try: | ||
| 156 | statements.eval(d) | ||
| 157 | except bb.parse.SkipRecipe: | ||
| 158 | d.setVar("__SKIPPED", True) | ||
| 159 | if include == 0: | ||
| 160 | return { "" : d } | ||
| 161 | |||
| 162 | if __infunc__: | ||
| 163 | raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2]) | ||
| 164 | if __residue__: | ||
| 165 | raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn) | ||
| 166 | |||
| 167 | if ext != ".bbclass" and include == 0: | ||
| 168 | return ast.multi_finalize(fn, d) | ||
| 169 | |||
| 170 | if ext != ".bbclass" and oldfile and abs_fn != oldfile: | ||
| 171 | d.setVar("FILE", oldfile) | ||
| 172 | |||
| 173 | return d | ||
| 174 | |||
| 175 | def feeder(lineno, s, fn, root, statements, eof=False): | ||
| 176 | global __inpython__, __infunc__, __body__, __residue__, __classname__ | ||
| 177 | |||
| 178 | # Check tabs in python functions: | ||
| 179 | # - def py_funcname(): covered by __inpython__ | ||
| 180 | # - python(): covered by '__anonymous' == __infunc__[0] | ||
| 181 | # - python funcname(): covered by __infunc__[3] | ||
| 182 | if __inpython__ or (__infunc__ and ('__anonymous' == __infunc__[0] or __infunc__[3])): | ||
| 183 | tab = __python_tab_regexp__.match(s) | ||
| 184 | if tab: | ||
| 185 | bb.warn('python should use 4 spaces indentation, but found tabs in %s, line %s' % (root, lineno)) | ||
| 186 | |||
| 187 | if __infunc__: | ||
| 188 | if s == '}': | ||
| 189 | __body__.append('') | ||
| 190 | ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__, __infunc__[3], __infunc__[4]) | ||
| 191 | __infunc__ = [] | ||
| 192 | __body__ = [] | ||
| 193 | else: | ||
| 194 | __body__.append(s) | ||
| 195 | return | ||
| 196 | |||
| 197 | if __inpython__: | ||
| 198 | m = __python_func_regexp__.match(s) | ||
| 199 | if m and not eof: | ||
| 200 | __body__.append(s) | ||
| 201 | return | ||
| 202 | else: | ||
| 203 | ast.handlePythonMethod(statements, fn, lineno, __inpython__, | ||
| 204 | root, __body__) | ||
| 205 | __body__ = [] | ||
| 206 | __inpython__ = False | ||
| 207 | |||
| 208 | if eof: | ||
| 209 | return | ||
| 210 | |||
| 211 | if s and s[0] == '#': | ||
| 212 | if len(__residue__) != 0 and __residue__[0][0] != "#": | ||
| 213 | bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s)) | ||
| 214 | |||
| 215 | if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): | ||
| 216 | bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__))) | ||
| 217 | |||
| 218 | if s and s[-1] == '\\': | ||
| 219 | __residue__.append(s[:-1]) | ||
| 220 | return | ||
| 221 | |||
| 222 | s = "".join(__residue__) + s | ||
| 223 | __residue__ = [] | ||
| 224 | |||
| 225 | # Skip empty lines | ||
| 226 | if s == '': | ||
| 227 | return | ||
| 228 | |||
| 229 | # Skip comments | ||
| 230 | if s[0] == '#': | ||
| 231 | return | ||
| 232 | |||
| 233 | m = __func_start_regexp__.match(s) | ||
| 234 | if m: | ||
| 235 | __infunc__ = [m.group("func") or "__anonymous", fn, lineno, m.group("py") is not None, m.group("fr") is not None] | ||
| 236 | return | ||
| 237 | |||
| 238 | m = __def_regexp__.match(s) | ||
| 239 | if m: | ||
| 240 | __body__.append(s) | ||
| 241 | __inpython__ = m.group(1) | ||
| 242 | |||
| 243 | return | ||
| 244 | |||
| 245 | m = __export_func_regexp__.match(s) | ||
| 246 | if m: | ||
| 247 | ast.handleExportFuncs(statements, fn, lineno, m, __classname__) | ||
| 248 | return | ||
| 249 | |||
| 250 | m = __addtask_regexp__.match(s) | ||
| 251 | if m: | ||
| 252 | after = "" | ||
| 253 | before = "" | ||
| 254 | |||
| 255 | # This code splits on 'before' and 'after' instead of on whitespace so we can defer | ||
| 256 | # evaluation to as late as possible. | ||
| 257 | tasks = m.group(1).split(" before ")[0].split(" after ")[0] | ||
| 258 | |||
| 259 | for exp in m.group(1).split(" before "): | ||
| 260 | exp2 = exp.split(" after ") | ||
| 261 | if len(exp2) > 1: | ||
| 262 | after = after + " ".join(exp2[1:]) | ||
| 263 | |||
| 264 | for exp in m.group(1).split(" after "): | ||
| 265 | exp2 = exp.split(" before ") | ||
| 266 | if len(exp2) > 1: | ||
| 267 | before = before + " ".join(exp2[1:]) | ||
| 268 | |||
| 269 | # Check and warn for having task with a keyword as part of task name | ||
| 270 | taskexpression = s.split() | ||
| 271 | for te in taskexpression: | ||
| 272 | if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): | ||
| 273 | raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) | ||
| 274 | |||
| 275 | if tasks is not None: | ||
| 276 | ast.handleAddTask(statements, fn, lineno, tasks, before, after) | ||
| 277 | return | ||
| 278 | |||
| 279 | m = __deltask_regexp__.match(s) | ||
| 280 | if m: | ||
| 281 | task = m.group(1) | ||
| 282 | if task is not None: | ||
| 283 | ast.handleDelTask(statements, fn, lineno, task) | ||
| 284 | return | ||
| 285 | |||
| 286 | m = __addhandler_regexp__.match(s) | ||
| 287 | if m: | ||
| 288 | ast.handleBBHandlers(statements, fn, lineno, m) | ||
| 289 | return | ||
| 290 | |||
| 291 | m = __inherit_regexp__.match(s) | ||
| 292 | if m: | ||
| 293 | ast.handleInherit(statements, fn, lineno, m) | ||
| 294 | return | ||
| 295 | |||
| 296 | m = __inherit_def_regexp__.match(s) | ||
| 297 | if m: | ||
| 298 | ast.handleInheritDeferred(statements, fn, lineno, m) | ||
| 299 | return | ||
| 300 | |||
| 301 | return ConfHandler.feeder(lineno, s, fn, statements, conffile=False) | ||
| 302 | |||
| 303 | # Add us to the handlers list | ||
| 304 | from .. import handlers | ||
| 305 | handlers.append({'supports': supports, 'handle': handle, 'init': init}) | ||
| 306 | del handlers | ||
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py deleted file mode 100644 index 9ddbae123d..0000000000 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ /dev/null | |||
| @@ -1,221 +0,0 @@ | |||
| 1 | """ | ||
| 2 | class for handling configuration data files | ||
| 3 | |||
| 4 | Reads a .conf file and obtains its metadata | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | # Copyright (C) 2003, 2004 Phil Blundell | ||
| 10 | # | ||
| 11 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 12 | # | ||
| 13 | |||
| 14 | import errno | ||
| 15 | import re | ||
| 16 | import os | ||
| 17 | import bb.utils | ||
| 18 | from bb.parse import ParseError, resolve_file, ast, logger, handle | ||
| 19 | |||
| 20 | __config_regexp__ = re.compile( r""" | ||
| 21 | ^ | ||
| 22 | (?P<exp>export\s+)? | ||
| 23 | (?P<var>[a-zA-Z0-9\-_+.${}/~:]*?) | ||
| 24 | (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@/]*)\])? | ||
| 25 | |||
| 26 | (?P<whitespace>\s*) ( | ||
| 27 | (?P<colon>:=) | | ||
| 28 | (?P<lazyques>\?\?=) | | ||
| 29 | (?P<ques>\?=) | | ||
| 30 | (?P<append>\+=) | | ||
| 31 | (?P<prepend>=\+) | | ||
| 32 | (?P<predot>=\.) | | ||
| 33 | (?P<postdot>\.=) | | ||
| 34 | = | ||
| 35 | ) (?P<whitespace2>\s*) | ||
| 36 | |||
| 37 | (?!'[^']*'[^']*'$) | ||
| 38 | (?!\"[^\"]*\"[^\"]*\"$) | ||
| 39 | (?P<apo>['\"]) | ||
| 40 | (?P<value>.*) | ||
| 41 | (?P=apo) | ||
| 42 | $ | ||
| 43 | """, re.X) | ||
| 44 | __include_regexp__ = re.compile( r"include\s+(.+)" ) | ||
| 45 | __require_regexp__ = re.compile( r"require\s+(.+)" ) | ||
| 46 | __includeall_regexp__ = re.compile( r"include_all\s+(.+)" ) | ||
| 47 | __export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) | ||
| 48 | __unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) | ||
| 49 | __unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" ) | ||
| 50 | __addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" ) | ||
| 51 | __addfragments_regexp__ = re.compile(r"addfragments\s+(.+)\s+(.+)\s+(.+)\s+(.+)" ) | ||
| 52 | |||
| 53 | def init(data): | ||
| 54 | return | ||
| 55 | |||
| 56 | def supports(fn, d): | ||
| 57 | return fn[-5:] == ".conf" | ||
| 58 | |||
| 59 | def include(parentfn, fns, lineno, data, error_out): | ||
| 60 | """ | ||
| 61 | error_out: A string indicating the verb (e.g. "include", "inherit") to be | ||
| 62 | used in a ParseError that will be raised if the file to be included could | ||
| 63 | not be included. Specify False to avoid raising an error in this case. | ||
| 64 | """ | ||
| 65 | fns = data.expand(fns) | ||
| 66 | parentfn = data.expand(parentfn) | ||
| 67 | |||
| 68 | # "include" or "require" accept zero to n space-separated file names to include. | ||
| 69 | for fn in fns.split(): | ||
| 70 | include_single_file(parentfn, fn, lineno, data, error_out) | ||
| 71 | |||
| 72 | def include_single_file(parentfn, fn, lineno, data, error_out): | ||
| 73 | """ | ||
| 74 | Helper function for include() which does not expand or split its parameters. | ||
| 75 | """ | ||
| 76 | if parentfn == fn: # prevent infinite recursion | ||
| 77 | return None | ||
| 78 | |||
| 79 | if not os.path.isabs(fn): | ||
| 80 | dname = os.path.dirname(parentfn) | ||
| 81 | bbpath = "%s:%s" % (dname, data.getVar("BBPATH")) | ||
| 82 | abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) | ||
| 83 | if abs_fn and bb.parse.check_dependency(data, abs_fn): | ||
| 84 | logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE'))) | ||
| 85 | for af in attempts: | ||
| 86 | bb.parse.mark_dependency(data, af) | ||
| 87 | if abs_fn: | ||
| 88 | fn = abs_fn | ||
| 89 | elif bb.parse.check_dependency(data, fn): | ||
| 90 | logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE'))) | ||
| 91 | |||
| 92 | try: | ||
| 93 | bb.parse.handle(fn, data, True) | ||
| 94 | except (IOError, OSError) as exc: | ||
| 95 | if exc.errno == errno.ENOENT: | ||
| 96 | if error_out: | ||
| 97 | raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno) | ||
| 98 | logger.debug2("CONF file '%s' not found", fn) | ||
| 99 | else: | ||
| 100 | if error_out: | ||
| 101 | raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno) | ||
| 102 | else: | ||
| 103 | raise ParseError("Error parsing %s: %s" % (fn, exc.strerror), parentfn, lineno) | ||
| 104 | |||
| 105 | # We have an issue where a UI might want to enforce particular settings such as | ||
| 106 | # an empty DISTRO variable. If configuration files do something like assigning | ||
| 107 | # a weak default, it turns out to be very difficult to filter out these changes, | ||
| 108 | # particularly when the weak default might appear half way though parsing a chain | ||
| 109 | # of configuration files. We therefore let the UIs hook into configuration file | ||
| 110 | # parsing. This turns out to be a hard problem to solve any other way. | ||
| 111 | confFilters = [] | ||
| 112 | |||
| 113 | def handle(fn, data, include, baseconfig=False): | ||
| 114 | init(data) | ||
| 115 | |||
| 116 | if include == 0: | ||
| 117 | oldfile = None | ||
| 118 | else: | ||
| 119 | oldfile = data.getVar('FILE', False) | ||
| 120 | |||
| 121 | abs_fn = resolve_file(fn, data) | ||
| 122 | with open(abs_fn, 'r') as f: | ||
| 123 | |||
| 124 | statements = ast.StatementGroup() | ||
| 125 | lineno = 0 | ||
| 126 | while True: | ||
| 127 | lineno = lineno + 1 | ||
| 128 | s = f.readline() | ||
| 129 | if not s: | ||
| 130 | break | ||
| 131 | origlineno = lineno | ||
| 132 | origline = s | ||
| 133 | w = s.strip() | ||
| 134 | # skip empty lines | ||
| 135 | if not w: | ||
| 136 | continue | ||
| 137 | s = s.rstrip() | ||
| 138 | while s[-1] == '\\': | ||
| 139 | line = f.readline() | ||
| 140 | origline += line | ||
| 141 | s2 = line.rstrip() | ||
| 142 | lineno = lineno + 1 | ||
| 143 | if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : | ||
| 144 | bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline)) | ||
| 145 | |||
| 146 | s = s[:-1] + s2 | ||
| 147 | # skip comments | ||
| 148 | if s[0] == '#': | ||
| 149 | continue | ||
| 150 | feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig) | ||
| 151 | |||
| 152 | # DONE WITH PARSING... time to evaluate | ||
| 153 | data.setVar('FILE', abs_fn) | ||
| 154 | statements.eval(data) | ||
| 155 | if oldfile: | ||
| 156 | data.setVar('FILE', oldfile) | ||
| 157 | |||
| 158 | for f in confFilters: | ||
| 159 | f(fn, data) | ||
| 160 | |||
| 161 | return data | ||
| 162 | |||
| 163 | # baseconfig is set for the bblayers/layer.conf cookerdata config parsing | ||
| 164 | # The function is also used by BBHandler, conffile would be False | ||
| 165 | def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True): | ||
| 166 | m = __config_regexp__.match(s) | ||
| 167 | if m: | ||
| 168 | groupd = m.groupdict() | ||
| 169 | if groupd['var'] == "": | ||
| 170 | raise ParseError("Empty variable name in assignment: '%s'" % s, fn, lineno); | ||
| 171 | if not groupd['whitespace'] or not groupd['whitespace2']: | ||
| 172 | logger.warning("%s:%s has a lack of whitespace around the assignment: '%s'" % (fn, lineno, s)) | ||
| 173 | ast.handleData(statements, fn, lineno, groupd) | ||
| 174 | return | ||
| 175 | |||
| 176 | m = __include_regexp__.match(s) | ||
| 177 | if m: | ||
| 178 | ast.handleInclude(statements, fn, lineno, m, False) | ||
| 179 | return | ||
| 180 | |||
| 181 | m = __require_regexp__.match(s) | ||
| 182 | if m: | ||
| 183 | ast.handleInclude(statements, fn, lineno, m, True) | ||
| 184 | return | ||
| 185 | |||
| 186 | m = __includeall_regexp__.match(s) | ||
| 187 | if m: | ||
| 188 | ast.handleIncludeAll(statements, fn, lineno, m) | ||
| 189 | return | ||
| 190 | |||
| 191 | m = __export_regexp__.match(s) | ||
| 192 | if m: | ||
| 193 | ast.handleExport(statements, fn, lineno, m) | ||
| 194 | return | ||
| 195 | |||
| 196 | m = __unset_regexp__.match(s) | ||
| 197 | if m: | ||
| 198 | ast.handleUnset(statements, fn, lineno, m) | ||
| 199 | return | ||
| 200 | |||
| 201 | m = __unset_flag_regexp__.match(s) | ||
| 202 | if m: | ||
| 203 | ast.handleUnsetFlag(statements, fn, lineno, m) | ||
| 204 | return | ||
| 205 | |||
| 206 | m = __addpylib_regexp__.match(s) | ||
| 207 | if baseconfig and conffile and m: | ||
| 208 | ast.handlePyLib(statements, fn, lineno, m) | ||
| 209 | return | ||
| 210 | |||
| 211 | m = __addfragments_regexp__.match(s) | ||
| 212 | if m: | ||
| 213 | ast.handleAddFragments(statements, fn, lineno, m) | ||
| 214 | return | ||
| 215 | |||
| 216 | raise ParseError("unparsed line: '%s'" % s, fn, lineno); | ||
| 217 | |||
| 218 | # Add us to the handlers list | ||
| 219 | from bb.parse import handlers | ||
| 220 | handlers.append({'supports': supports, 'handle': handle, 'init': init}) | ||
| 221 | del handlers | ||
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py deleted file mode 100644 index f508afa14e..0000000000 --- a/bitbake/lib/bb/parse/parse_py/__init__.py +++ /dev/null | |||
| @@ -1,20 +0,0 @@ | |||
| 1 | """ | ||
| 2 | BitBake Parsers | ||
| 3 | |||
| 4 | File parsers for the BitBake build tools. | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | # Copyright (C) 2003, 2004 Phil Blundell | ||
| 10 | # | ||
| 11 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 12 | # | ||
| 13 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 14 | # | ||
| 15 | |||
| 16 | from __future__ import absolute_import | ||
| 17 | from . import ConfHandler | ||
| 18 | from . import BBHandler | ||
| 19 | |||
| 20 | __version__ = '1.0' | ||
