diff options
| -rwxr-xr-x | bitbake/bin/bitbake-runtask | 6 | ||||
| -rwxr-xr-x | bitbake/bin/bitdoc | 2 | ||||
| -rw-r--r-- | bitbake/doc/manual/usermanual.xml | 2 | ||||
| -rw-r--r-- | bitbake/lib/bb/build.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/cache.py | 5 | ||||
| -rw-r--r-- | bitbake/lib/bb/codeparser.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/command.py | 5 | ||||
| -rw-r--r-- | bitbake/lib/bb/cooker.py | 62 | ||||
| -rw-r--r-- | bitbake/lib/bb/data.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/data_smart.py | 2 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 32 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/git.py | 6 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 78 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/git.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/__init__.py | 6 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/ast.py | 100 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/parse_py/BBHandler.py | 2 | ||||
| -rw-r--r-- | bitbake/lib/bb/parse/parse_py/ConfHandler.py | 18 | ||||
| -rw-r--r-- | bitbake/lib/bb/persist_data.py | 6 | ||||
| -rw-r--r-- | bitbake/lib/bb/providers.py | 8 | ||||
| -rw-r--r-- | bitbake/lib/bb/runqueue.py | 26 | ||||
| -rw-r--r-- | bitbake/lib/bb/siggen.py | 2 | ||||
| -rw-r--r-- | bitbake/lib/bb/ui/crumbs/configurator.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/utils.py | 6 |
24 files changed, 196 insertions, 198 deletions
diff --git a/bitbake/bin/bitbake-runtask b/bitbake/bin/bitbake-runtask index 394b4c3ef9..6a0ecdc555 100755 --- a/bitbake/bin/bitbake-runtask +++ b/bitbake/bin/bitbake-runtask | |||
| @@ -91,7 +91,7 @@ def register_idle_function(self, function, data): | |||
| 91 | cooker = bb.cooker.BBCooker(config, register_idle_function, initialenv) | 91 | cooker = bb.cooker.BBCooker(config, register_idle_function, initialenv) |
| 92 | config_data = cooker.configuration.data | 92 | config_data = cooker.configuration.data |
| 93 | cooker.status = config_data | 93 | cooker.status = config_data |
| 94 | cooker.handleCollections(bb.data.getVar("BBFILE_COLLECTIONS", config_data, 1)) | 94 | cooker.handleCollections(config_data.getVar("BBFILE_COLLECTIONS", 1)) |
| 95 | 95 | ||
| 96 | fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) | 96 | fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) |
| 97 | buildfile = cooker.matchFile(fn) | 97 | buildfile = cooker.matchFile(fn) |
| @@ -108,9 +108,9 @@ if taskname.endswith("_setscene"): | |||
| 108 | if hashdata: | 108 | if hashdata: |
| 109 | bb.parse.siggen.set_taskdata(hashdata["hashes"], hashdata["deps"]) | 109 | bb.parse.siggen.set_taskdata(hashdata["hashes"], hashdata["deps"]) |
| 110 | for h in hashdata["hashes"]: | 110 | for h in hashdata["hashes"]: |
| 111 | bb.data.setVar("BBHASH_%s" % h, hashdata["hashes"][h], the_data) | 111 | the_data.setVar("BBHASH_%s" % h, hashdata["hashes"][h]) |
| 112 | for h in hashdata["deps"]: | 112 | for h in hashdata["deps"]: |
| 113 | bb.data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h], the_data) | 113 | the_data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h]) |
| 114 | 114 | ||
| 115 | ret = 0 | 115 | ret = 0 |
| 116 | if dryrun != "True": | 116 | if dryrun != "True": |
diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc index 08a0173d72..576d88b574 100755 --- a/bitbake/bin/bitdoc +++ b/bitbake/bin/bitdoc | |||
| @@ -462,7 +462,7 @@ def main(): | |||
| 462 | state_group = 2 | 462 | state_group = 2 |
| 463 | 463 | ||
| 464 | for key in bb.data.keys(documentation): | 464 | for key in bb.data.keys(documentation): |
| 465 | data = bb.data.getVarFlag(key, "doc", documentation) | 465 | data = documentation.getVarFlag(key, "doc") |
| 466 | if not data: | 466 | if not data: |
| 467 | continue | 467 | continue |
| 468 | 468 | ||
diff --git a/bitbake/doc/manual/usermanual.xml b/bitbake/doc/manual/usermanual.xml index 687503b1cb..fa53ace45f 100644 --- a/bitbake/doc/manual/usermanual.xml +++ b/bitbake/doc/manual/usermanual.xml | |||
| @@ -186,7 +186,7 @@ include</literal> directive.</para> | |||
| 186 | <title>Defining Python functions into the global Python namespace</title> | 186 | <title>Defining Python functions into the global Python namespace</title> |
| 187 | <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para> | 187 | <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para> |
| 188 | <para><screen>def get_depends(bb, d): | 188 | <para><screen>def get_depends(bb, d): |
| 189 | if bb.data.getVar('SOMECONDITION', d, True): | 189 | if d.getVar('SOMECONDITION', True): |
| 190 | return "dependencywithcond" | 190 | return "dependencywithcond" |
| 191 | else: | 191 | else: |
| 192 | return "dependency" | 192 | return "dependency" |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 8937f083a1..fab2f62a5f 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
| @@ -70,9 +70,9 @@ class TaskBase(event.Event): | |||
| 70 | 70 | ||
| 71 | def __init__(self, t, d ): | 71 | def __init__(self, t, d ): |
| 72 | self._task = t | 72 | self._task = t |
| 73 | self._package = bb.data.getVar("PF", d, 1) | 73 | self._package = d.getVar("PF", 1) |
| 74 | event.Event.__init__(self) | 74 | event.Event.__init__(self) |
| 75 | self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:]) | 75 | self._message = "package %s: task %s: %s" % (d.getVar("PF", 1), t, bb.event.getName(self)[4:]) |
| 76 | 76 | ||
| 77 | def getTask(self): | 77 | def getTask(self): |
| 78 | return self._task | 78 | return self._task |
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index fe35e132e9..6b7fa6f1a2 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
| @@ -31,7 +31,6 @@ | |||
| 31 | import os | 31 | import os |
| 32 | import logging | 32 | import logging |
| 33 | from collections import defaultdict | 33 | from collections import defaultdict |
| 34 | import bb.data | ||
| 35 | import bb.utils | 34 | import bb.utils |
| 36 | 35 | ||
| 37 | logger = logging.getLogger("BitBake.Cache") | 36 | logger = logging.getLogger("BitBake.Cache") |
| @@ -260,7 +259,7 @@ class Cache(object): | |||
| 260 | # It will be used in later for deciding whether we | 259 | # It will be used in later for deciding whether we |
| 261 | # need extra cache file dump/load support | 260 | # need extra cache file dump/load support |
| 262 | self.caches_array = caches_array | 261 | self.caches_array = caches_array |
| 263 | self.cachedir = bb.data.getVar("CACHE", data, True) | 262 | self.cachedir = data.getVar("CACHE", True) |
| 264 | self.clean = set() | 263 | self.clean = set() |
| 265 | self.checked = set() | 264 | self.checked = set() |
| 266 | self.depends_cache = {} | 265 | self.depends_cache = {} |
| @@ -283,7 +282,7 @@ class Cache(object): | |||
| 283 | # If any of configuration.data's dependencies are newer than the | 282 | # If any of configuration.data's dependencies are newer than the |
| 284 | # cache there isn't even any point in loading it... | 283 | # cache there isn't even any point in loading it... |
| 285 | newest_mtime = 0 | 284 | newest_mtime = 0 |
| 286 | deps = bb.data.getVar("__base_depends", data) | 285 | deps = data.getVar("__base_depends") |
| 287 | 286 | ||
| 288 | old_mtimes = [old_mtime for _, old_mtime in deps] | 287 | old_mtimes = [old_mtime for _, old_mtime in deps] |
| 289 | old_mtimes.append(newest_mtime) | 288 | old_mtimes.append(newest_mtime) |
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index d425514481..2590e5cae7 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
| @@ -36,8 +36,8 @@ pythonparsecache = {} | |||
| 36 | shellparsecache = {} | 36 | shellparsecache = {} |
| 37 | 37 | ||
| 38 | def parser_cachefile(d): | 38 | def parser_cachefile(d): |
| 39 | cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or | 39 | cachedir = (d.getVar("PERSISTENT_DIR", True) or |
| 40 | bb.data.getVar("CACHE", d, True)) | 40 | d.getVar("CACHE", True)) |
| 41 | if cachedir in [None, '']: | 41 | if cachedir in [None, '']: |
| 42 | return None | 42 | return None |
| 43 | bb.utils.mkdirhier(cachedir) | 43 | bb.utils.mkdirhier(cachedir) |
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py index 41796f629e..1808f0c7e5 100644 --- a/bitbake/lib/bb/command.py +++ b/bitbake/lib/bb/command.py | |||
| @@ -30,7 +30,6 @@ Commands are queued in a CommandQueue | |||
| 30 | 30 | ||
| 31 | import bb.event | 31 | import bb.event |
| 32 | import bb.cooker | 32 | import bb.cooker |
| 33 | import bb.data | ||
| 34 | 33 | ||
| 35 | async_cmds = {} | 34 | async_cmds = {} |
| 36 | sync_cmds = {} | 35 | sync_cmds = {} |
| @@ -162,7 +161,7 @@ class CommandsSync: | |||
| 162 | if len(params) > 1: | 161 | if len(params) > 1: |
| 163 | expand = params[1] | 162 | expand = params[1] |
| 164 | 163 | ||
| 165 | return bb.data.getVar(varname, command.cooker.configuration.data, expand) | 164 | return command.cooker.configuration.data.getVar(varname, expand) |
| 166 | 165 | ||
| 167 | def setVariable(self, command, params): | 166 | def setVariable(self, command, params): |
| 168 | """ | 167 | """ |
| @@ -170,7 +169,7 @@ class CommandsSync: | |||
| 170 | """ | 169 | """ |
| 171 | varname = params[0] | 170 | varname = params[0] |
| 172 | value = params[1] | 171 | value = params[1] |
| 173 | bb.data.setVar(varname, value, command.cooker.configuration.data) | 172 | command.cooker.configuration.data.setVar(varname, value) |
| 174 | 173 | ||
| 175 | def resetCooker(self, command, params): | 174 | def resetCooker(self, command, params): |
| 176 | """ | 175 | """ |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 4f01cc10c6..5bbabfceb9 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
| @@ -136,16 +136,16 @@ class BBCooker: | |||
| 136 | self.loadConfigurationData() | 136 | self.loadConfigurationData() |
| 137 | 137 | ||
| 138 | if not self.configuration.cmd: | 138 | if not self.configuration.cmd: |
| 139 | self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build" | 139 | self.configuration.cmd = self.configuration.data.getVar("BB_DEFAULT_TASK", True) or "build" |
| 140 | 140 | ||
| 141 | # Take a lock so only one copy of bitbake can run against a given build | 141 | # Take a lock so only one copy of bitbake can run against a given build |
| 142 | # directory at a time | 142 | # directory at a time |
| 143 | lockfile = bb.data.expand("${TOPDIR}/bitbake.lock", self.configuration.data) | 143 | lockfile = self.configuration.data.expand("${TOPDIR}/bitbake.lock") |
| 144 | self.lock = bb.utils.lockfile(lockfile, False, False) | 144 | self.lock = bb.utils.lockfile(lockfile, False, False) |
| 145 | if not self.lock: | 145 | if not self.lock: |
| 146 | bb.fatal("Only one copy of bitbake should be run against a build directory") | 146 | bb.fatal("Only one copy of bitbake should be run against a build directory") |
| 147 | 147 | ||
| 148 | bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True) | 148 | bbpkgs = self.configuration.data.getVar('BBPKGS', True) |
| 149 | if bbpkgs and len(self.configuration.pkgs_to_build) == 0: | 149 | if bbpkgs and len(self.configuration.pkgs_to_build) == 0: |
| 150 | self.configuration.pkgs_to_build.extend(bbpkgs.split()) | 150 | self.configuration.pkgs_to_build.extend(bbpkgs.split()) |
| 151 | 151 | ||
| @@ -174,7 +174,7 @@ class BBCooker: | |||
| 174 | self.configuration.data = bb.data.init() | 174 | self.configuration.data = bb.data.init() |
| 175 | 175 | ||
| 176 | if not self.server_registration_cb: | 176 | if not self.server_registration_cb: |
| 177 | bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data) | 177 | self.configuration.data.setVar("BB_WORKERCONTEXT", "1") |
| 178 | 178 | ||
| 179 | filtered_keys = bb.utils.approved_variables() | 179 | filtered_keys = bb.utils.approved_variables() |
| 180 | bb.data.inheritFromOS(self.configuration.data, self.savedenv, filtered_keys) | 180 | bb.data.inheritFromOS(self.configuration.data, self.savedenv, filtered_keys) |
| @@ -189,13 +189,13 @@ class BBCooker: | |||
| 189 | sys.exit(1) | 189 | sys.exit(1) |
| 190 | 190 | ||
| 191 | if not self.configuration.cmd: | 191 | if not self.configuration.cmd: |
| 192 | self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build" | 192 | self.configuration.cmd = self.configuration.data.getVar("BB_DEFAULT_TASK", True) or "build" |
| 193 | 193 | ||
| 194 | def parseConfiguration(self): | 194 | def parseConfiguration(self): |
| 195 | 195 | ||
| 196 | 196 | ||
| 197 | # Change nice level if we're asked to | 197 | # Change nice level if we're asked to |
| 198 | nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True) | 198 | nice = self.configuration.data.getVar("BB_NICE_LEVEL", True) |
| 199 | if nice: | 199 | if nice: |
| 200 | curnice = os.nice(0) | 200 | curnice = os.nice(0) |
| 201 | nice = int(nice) - curnice | 201 | nice = int(nice) - curnice |
| @@ -293,7 +293,7 @@ class BBCooker: | |||
| 293 | # this showEnvironment() code path doesn't use the cache | 293 | # this showEnvironment() code path doesn't use the cache |
| 294 | self.parseConfiguration() | 294 | self.parseConfiguration() |
| 295 | self.status = bb.cache.CacheData(self.caches_array) | 295 | self.status = bb.cache.CacheData(self.caches_array) |
| 296 | self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) | 296 | self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", 1) ) |
| 297 | 297 | ||
| 298 | fn = self.matchFile(buildfile) | 298 | fn = self.matchFile(buildfile) |
| 299 | elif len(pkgs_to_build) == 1: | 299 | elif len(pkgs_to_build) == 1: |
| @@ -597,7 +597,7 @@ class BBCooker: | |||
| 597 | bb.data.expandKeys(localdata) | 597 | bb.data.expandKeys(localdata) |
| 598 | 598 | ||
| 599 | # Handle PREFERRED_PROVIDERS | 599 | # Handle PREFERRED_PROVIDERS |
| 600 | for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, True) or "").split(): | 600 | for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): |
| 601 | try: | 601 | try: |
| 602 | (providee, provider) = p.split(':') | 602 | (providee, provider) = p.split(':') |
| 603 | except: | 603 | except: |
| @@ -645,8 +645,8 @@ class BBCooker: | |||
| 645 | # Generate a list of parsed configuration files by searching the files | 645 | # Generate a list of parsed configuration files by searching the files |
| 646 | # listed in the __depends and __base_depends variables with a .conf suffix. | 646 | # listed in the __depends and __base_depends variables with a .conf suffix. |
| 647 | conffiles = [] | 647 | conffiles = [] |
| 648 | dep_files = bb.data.getVar('__depends', self.configuration.data) or set() | 648 | dep_files = self.configuration.data.getVar('__depends') or set() |
| 649 | dep_files.union(bb.data.getVar('__base_depends', self.configuration.data) or set()) | 649 | dep_files.union(self.configuration.data.getVar('__base_depends') or set()) |
| 650 | 650 | ||
| 651 | for f in dep_files: | 651 | for f in dep_files: |
| 652 | if f[0].endswith(".conf"): | 652 | if f[0].endswith(".conf"): |
| @@ -674,7 +674,7 @@ class BBCooker: | |||
| 674 | 674 | ||
| 675 | matches = [] | 675 | matches = [] |
| 676 | p = re.compile(re.escape(filepattern)) | 676 | p = re.compile(re.escape(filepattern)) |
| 677 | bbpaths = bb.data.getVar('BBPATH', self.configuration.data, True).split(':') | 677 | bbpaths = self.configuration.data.getVar('BBPATH', True).split(':') |
| 678 | for path in bbpaths: | 678 | for path in bbpaths: |
| 679 | dirpath = os.path.join(path, directory) | 679 | dirpath = os.path.join(path, directory) |
| 680 | if os.path.exists(dirpath): | 680 | if os.path.exists(dirpath): |
| @@ -696,7 +696,7 @@ class BBCooker: | |||
| 696 | 696 | ||
| 697 | data = self.configuration.data | 697 | data = self.configuration.data |
| 698 | # iterate configs | 698 | # iterate configs |
| 699 | bbpaths = bb.data.getVar('BBPATH', data, True).split(':') | 699 | bbpaths = data.getVar('BBPATH', True).split(':') |
| 700 | for path in bbpaths: | 700 | for path in bbpaths: |
| 701 | confpath = os.path.join(path, "conf", var) | 701 | confpath = os.path.join(path, "conf", var) |
| 702 | if os.path.exists(confpath): | 702 | if os.path.exists(confpath): |
| @@ -801,16 +801,16 @@ class BBCooker: | |||
| 801 | parselog.debug(2, "Found bblayers.conf (%s)", layerconf) | 801 | parselog.debug(2, "Found bblayers.conf (%s)", layerconf) |
| 802 | data = _parse(layerconf, data) | 802 | data = _parse(layerconf, data) |
| 803 | 803 | ||
| 804 | layers = (bb.data.getVar('BBLAYERS', data, True) or "").split() | 804 | layers = (data.getVar('BBLAYERS', True) or "").split() |
| 805 | 805 | ||
| 806 | data = bb.data.createCopy(data) | 806 | data = bb.data.createCopy(data) |
| 807 | for layer in layers: | 807 | for layer in layers: |
| 808 | parselog.debug(2, "Adding layer %s", layer) | 808 | parselog.debug(2, "Adding layer %s", layer) |
| 809 | bb.data.setVar('LAYERDIR', layer, data) | 809 | data.setVar('LAYERDIR', layer) |
| 810 | data = _parse(os.path.join(layer, "conf", "layer.conf"), data) | 810 | data = _parse(os.path.join(layer, "conf", "layer.conf"), data) |
| 811 | data.expandVarref('LAYERDIR') | 811 | data.expandVarref('LAYERDIR') |
| 812 | 812 | ||
| 813 | bb.data.delVar('LAYERDIR', data) | 813 | data.delVar('LAYERDIR') |
| 814 | 814 | ||
| 815 | if not data.getVar("BBPATH", True): | 815 | if not data.getVar("BBPATH", True): |
| 816 | raise SystemExit("The BBPATH variable is not set") | 816 | raise SystemExit("The BBPATH variable is not set") |
| @@ -828,8 +828,8 @@ class BBCooker: | |||
| 828 | 828 | ||
| 829 | # Nomally we only register event handlers at the end of parsing .bb files | 829 | # Nomally we only register event handlers at the end of parsing .bb files |
| 830 | # We register any handlers we've found so far here... | 830 | # We register any handlers we've found so far here... |
| 831 | for var in bb.data.getVar('__BBHANDLERS', data) or []: | 831 | for var in data.getVar('__BBHANDLERS') or []: |
| 832 | bb.event.register(var, bb.data.getVar(var, data)) | 832 | bb.event.register(var, data.getVar(var)) |
| 833 | 833 | ||
| 834 | if data.getVar("BB_WORKERCONTEXT", False) is None: | 834 | if data.getVar("BB_WORKERCONTEXT", False) is None: |
| 835 | bb.fetch.fetcher_init(data) | 835 | bb.fetch.fetcher_init(data) |
| @@ -848,7 +848,7 @@ class BBCooker: | |||
| 848 | min_prio = 0 | 848 | min_prio = 0 |
| 849 | for c in collection_list: | 849 | for c in collection_list: |
| 850 | # Get collection priority if defined explicitly | 850 | # Get collection priority if defined explicitly |
| 851 | priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1) | 851 | priority = self.configuration.data.getVar("BBFILE_PRIORITY_%s" % c, 1) |
| 852 | if priority: | 852 | if priority: |
| 853 | try: | 853 | try: |
| 854 | prio = int(priority) | 854 | prio = int(priority) |
| @@ -861,7 +861,7 @@ class BBCooker: | |||
| 861 | collection_priorities[c] = None | 861 | collection_priorities[c] = None |
| 862 | 862 | ||
| 863 | # Check dependencies and store information for priority calculation | 863 | # Check dependencies and store information for priority calculation |
| 864 | deps = bb.data.getVar("LAYERDEPENDS_%s" % c, self.configuration.data, 1) | 864 | deps = self.configuration.data.getVar("LAYERDEPENDS_%s" % c, 1) |
| 865 | if deps: | 865 | if deps: |
| 866 | depnamelist = [] | 866 | depnamelist = [] |
| 867 | deplist = deps.split() | 867 | deplist = deps.split() |
| @@ -880,7 +880,7 @@ class BBCooker: | |||
| 880 | 880 | ||
| 881 | if dep in collection_list: | 881 | if dep in collection_list: |
| 882 | if depver: | 882 | if depver: |
| 883 | layerver = bb.data.getVar("LAYERVERSION_%s" % dep, self.configuration.data, 1) | 883 | layerver = self.configuration.data.getVar("LAYERVERSION_%s" % dep, 1) |
| 884 | if layerver: | 884 | if layerver: |
| 885 | try: | 885 | try: |
| 886 | lver = int(layerver) | 886 | lver = int(layerver) |
| @@ -913,7 +913,7 @@ class BBCooker: | |||
| 913 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities | 913 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities |
| 914 | for c in collection_list: | 914 | for c in collection_list: |
| 915 | calc_layer_priority(c) | 915 | calc_layer_priority(c) |
| 916 | regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1) | 916 | regex = self.configuration.data.getVar("BBFILE_PATTERN_%s" % c, 1) |
| 917 | if regex == None: | 917 | if regex == None: |
| 918 | parselog.error("BBFILE_PATTERN_%s not defined" % c) | 918 | parselog.error("BBFILE_PATTERN_%s not defined" % c) |
| 919 | continue | 919 | continue |
| @@ -928,9 +928,9 @@ class BBCooker: | |||
| 928 | """ | 928 | """ |
| 929 | Setup any variables needed before starting a build | 929 | Setup any variables needed before starting a build |
| 930 | """ | 930 | """ |
| 931 | if not bb.data.getVar("BUILDNAME", self.configuration.data): | 931 | if not self.configuration.data.getVar("BUILDNAME"): |
| 932 | bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data) | 932 | self.configuration.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M')) |
| 933 | bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data) | 933 | self.configuration.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime())) |
| 934 | 934 | ||
| 935 | def matchFiles(self, bf): | 935 | def matchFiles(self, bf): |
| 936 | """ | 936 | """ |
| @@ -977,7 +977,7 @@ class BBCooker: | |||
| 977 | # buildFile() doesn't use the cache | 977 | # buildFile() doesn't use the cache |
| 978 | self.parseConfiguration() | 978 | self.parseConfiguration() |
| 979 | self.status = bb.cache.CacheData(self.caches_array) | 979 | self.status = bb.cache.CacheData(self.caches_array) |
| 980 | self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) | 980 | self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", 1) ) |
| 981 | 981 | ||
| 982 | # If we are told to do the None task then query the default task | 982 | # If we are told to do the None task then query the default task |
| 983 | if (task == None): | 983 | if (task == None): |
| @@ -1021,7 +1021,7 @@ class BBCooker: | |||
| 1021 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | 1021 | taskdata = bb.taskdata.TaskData(self.configuration.abort) |
| 1022 | taskdata.add_provider(self.configuration.data, self.status, item) | 1022 | taskdata.add_provider(self.configuration.data, self.status, item) |
| 1023 | 1023 | ||
| 1024 | buildname = bb.data.getVar("BUILDNAME", self.configuration.data) | 1024 | buildname = self.configuration.data.getVar("BUILDNAME") |
| 1025 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data) | 1025 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data) |
| 1026 | 1026 | ||
| 1027 | # Execute the runqueue | 1027 | # Execute the runqueue |
| @@ -1098,7 +1098,7 @@ class BBCooker: | |||
| 1098 | 1098 | ||
| 1099 | self.buildSetVars() | 1099 | self.buildSetVars() |
| 1100 | 1100 | ||
| 1101 | buildname = bb.data.getVar("BUILDNAME", self.configuration.data) | 1101 | buildname = self.configuration.data.getVar("BUILDNAME") |
| 1102 | bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data) | 1102 | bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data) |
| 1103 | 1103 | ||
| 1104 | localdata = data.createCopy(self.configuration.data) | 1104 | localdata = data.createCopy(self.configuration.data) |
| @@ -1132,16 +1132,16 @@ class BBCooker: | |||
| 1132 | del self.status | 1132 | del self.status |
| 1133 | self.status = bb.cache.CacheData(self.caches_array) | 1133 | self.status = bb.cache.CacheData(self.caches_array) |
| 1134 | 1134 | ||
| 1135 | ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" | 1135 | ignore = self.configuration.data.getVar("ASSUME_PROVIDED", 1) or "" |
| 1136 | self.status.ignored_dependencies = set(ignore.split()) | 1136 | self.status.ignored_dependencies = set(ignore.split()) |
| 1137 | 1137 | ||
| 1138 | for dep in self.configuration.extra_assume_provided: | 1138 | for dep in self.configuration.extra_assume_provided: |
| 1139 | self.status.ignored_dependencies.add(dep) | 1139 | self.status.ignored_dependencies.add(dep) |
| 1140 | 1140 | ||
| 1141 | self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) | 1141 | self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", 1) ) |
| 1142 | 1142 | ||
| 1143 | (filelist, masked) = self.collect_bbfiles() | 1143 | (filelist, masked) = self.collect_bbfiles() |
| 1144 | bb.data.renameVar("__depends", "__base_depends", self.configuration.data) | 1144 | self.configuration.data.renameVar("__depends", "__base_depends") |
| 1145 | 1145 | ||
| 1146 | self.parser = CookerParser(self, filelist, masked) | 1146 | self.parser = CookerParser(self, filelist, masked) |
| 1147 | self.state = state.parsing | 1147 | self.state = state.parsing |
| @@ -1232,7 +1232,7 @@ class BBCooker: | |||
| 1232 | if g not in newfiles: | 1232 | if g not in newfiles: |
| 1233 | newfiles.append(g) | 1233 | newfiles.append(g) |
| 1234 | 1234 | ||
| 1235 | bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) | 1235 | bbmask = self.configuration.data.getVar('BBMASK', 1) |
| 1236 | 1236 | ||
| 1237 | if bbmask: | 1237 | if bbmask: |
| 1238 | try: | 1238 | try: |
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 7c1533cfa9..e5c408c5f2 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
| @@ -266,7 +266,7 @@ def emit_func(func, o=sys.__stdout__, d = init()): | |||
| 266 | seen |= deps | 266 | seen |= deps |
| 267 | newdeps = set() | 267 | newdeps = set() |
| 268 | for dep in deps: | 268 | for dep in deps: |
| 269 | if bb.data.getVarFlag(dep, "func", d): | 269 | if d.getVarFlag(dep, "func"): |
| 270 | emit_var(dep, o, d, False) and o.write('\n') | 270 | emit_var(dep, o, d, False) and o.write('\n') |
| 271 | newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True)) | 271 | newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True)) |
| 272 | newdeps -= seen | 272 | newdeps -= seen |
| @@ -319,7 +319,7 @@ def generate_dependencies(d): | |||
| 319 | deps = {} | 319 | deps = {} |
| 320 | values = {} | 320 | values = {} |
| 321 | 321 | ||
| 322 | tasklist = bb.data.getVar('__BBTASKS', d) or [] | 322 | tasklist = d.getVar('__BBTASKS') or [] |
| 323 | for task in tasklist: | 323 | for task in tasklist: |
| 324 | deps[task], values[task] = build_dependencies(task, keys, shelldeps, vardepvals, d) | 324 | deps[task], values[task] = build_dependencies(task, keys, shelldeps, vardepvals, d) |
| 325 | newdeps = deps[task] | 325 | newdeps = deps[task] |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index ec4e9210b2..ea1347837c 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
| @@ -146,7 +146,7 @@ class DataSmart(MutableMapping): | |||
| 146 | 146 | ||
| 147 | return varparse | 147 | return varparse |
| 148 | 148 | ||
| 149 | def expand(self, s, varname): | 149 | def expand(self, s, varname = None): |
| 150 | return self.expandWithRefs(s, varname).value | 150 | return self.expandWithRefs(s, varname).value |
| 151 | 151 | ||
| 152 | 152 | ||
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 5be4c9caea..ea98019207 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py | |||
| @@ -154,7 +154,7 @@ def fetcher_init(d): | |||
| 154 | Calls before this must not hit the cache. | 154 | Calls before this must not hit the cache. |
| 155 | """ | 155 | """ |
| 156 | # When to drop SCM head revisions controlled by user policy | 156 | # When to drop SCM head revisions controlled by user policy |
| 157 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" | 157 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', 1) or "clear" |
| 158 | if srcrev_policy == "cache": | 158 | if srcrev_policy == "cache": |
| 159 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 159 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 160 | elif srcrev_policy == "clear": | 160 | elif srcrev_policy == "clear": |
| @@ -200,7 +200,7 @@ def fetcher_compare_revisions(d): | |||
| 200 | def init(urls, d, setup = True): | 200 | def init(urls, d, setup = True): |
| 201 | urldata = {} | 201 | urldata = {} |
| 202 | 202 | ||
| 203 | fn = bb.data.getVar('FILE', d, 1) | 203 | fn = d.getVar('FILE', 1) |
| 204 | if fn in urldata_cache: | 204 | if fn in urldata_cache: |
| 205 | urldata = urldata_cache[fn] | 205 | urldata = urldata_cache[fn] |
| 206 | 206 | ||
| @@ -243,7 +243,7 @@ def verify_checksum(u, ud, d): | |||
| 243 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', | 243 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', |
| 244 | ud.localpath, ud.md5_name, md5data, | 244 | ud.localpath, ud.md5_name, md5data, |
| 245 | ud.sha256_name, sha256data) | 245 | ud.sha256_name, sha256data) |
| 246 | if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": | 246 | if d.getVar("BB_STRICT_CHECKSUM", True) == "1": |
| 247 | raise FetchError("No checksum specified for %s." % u) | 247 | raise FetchError("No checksum specified for %s." % u) |
| 248 | return | 248 | return |
| 249 | 249 | ||
| @@ -276,7 +276,7 @@ def go(d, urls = None): | |||
| 276 | 276 | ||
| 277 | if m.try_premirror(u, ud, d): | 277 | if m.try_premirror(u, ud, d): |
| 278 | # First try fetching uri, u, from PREMIRRORS | 278 | # First try fetching uri, u, from PREMIRRORS |
| 279 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | 279 | mirrors = mirror_from_string(d.getVar('PREMIRRORS', True)) |
| 280 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | 280 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) |
| 281 | elif os.path.exists(ud.localfile): | 281 | elif os.path.exists(ud.localfile): |
| 282 | localpath = ud.localfile | 282 | localpath = ud.localfile |
| @@ -291,7 +291,7 @@ def go(d, urls = None): | |||
| 291 | # Remove any incomplete file | 291 | # Remove any incomplete file |
| 292 | bb.utils.remove(ud.localpath) | 292 | bb.utils.remove(ud.localpath) |
| 293 | # Finally, try fetching uri, u, from MIRRORS | 293 | # Finally, try fetching uri, u, from MIRRORS |
| 294 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | 294 | mirrors = mirror_from_string(d.getVar('MIRRORS', True)) |
| 295 | localpath = try_mirrors (d, u, mirrors) | 295 | localpath = try_mirrors (d, u, mirrors) |
| 296 | if not localpath or not os.path.exists(localpath): | 296 | if not localpath or not os.path.exists(localpath): |
| 297 | raise FetchError("Unable to fetch URL %s from any source." % u) | 297 | raise FetchError("Unable to fetch URL %s from any source." % u) |
| @@ -327,7 +327,7 @@ def checkstatus(d, urls = None): | |||
| 327 | m = ud.method | 327 | m = ud.method |
| 328 | logger.debug(1, "Testing URL %s", u) | 328 | logger.debug(1, "Testing URL %s", u) |
| 329 | # First try checking uri, u, from PREMIRRORS | 329 | # First try checking uri, u, from PREMIRRORS |
| 330 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | 330 | mirrors = mirror_from_string(d.getVar('PREMIRRORS', True)) |
| 331 | ret = try_mirrors(d, u, mirrors, True) | 331 | ret = try_mirrors(d, u, mirrors, True) |
| 332 | if not ret: | 332 | if not ret: |
| 333 | # Next try checking from the original uri, u | 333 | # Next try checking from the original uri, u |
| @@ -335,7 +335,7 @@ def checkstatus(d, urls = None): | |||
| 335 | ret = m.checkstatus(u, ud, d) | 335 | ret = m.checkstatus(u, ud, d) |
| 336 | except: | 336 | except: |
| 337 | # Finally, try checking uri, u, from MIRRORS | 337 | # Finally, try checking uri, u, from MIRRORS |
| 338 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | 338 | mirrors = mirror_from_string(d.getVar('MIRRORS', True)) |
| 339 | ret = try_mirrors (d, u, mirrors, True) | 339 | ret = try_mirrors (d, u, mirrors, True) |
| 340 | 340 | ||
| 341 | if not ret: | 341 | if not ret: |
| @@ -383,7 +383,7 @@ def get_srcrev(d): | |||
| 383 | scms = [] | 383 | scms = [] |
| 384 | 384 | ||
| 385 | # Only call setup_localpath on URIs which supports_srcrev() | 385 | # Only call setup_localpath on URIs which supports_srcrev() |
| 386 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) | 386 | urldata = init(d.getVar('SRC_URI', 1).split(), d, False) |
| 387 | for u in urldata: | 387 | for u in urldata: |
| 388 | ud = urldata[u] | 388 | ud = urldata[u] |
| 389 | if ud.method.supports_srcrev(): | 389 | if ud.method.supports_srcrev(): |
| @@ -395,8 +395,8 @@ def get_srcrev(d): | |||
| 395 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") | 395 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") |
| 396 | raise ParameterError | 396 | raise ParameterError |
| 397 | 397 | ||
| 398 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | 398 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": |
| 399 | bb.data.setVar('__BB_DONT_CACHE', '1', d) | 399 | d.setVar('__BB_DONT_CACHE', '1') |
| 400 | 400 | ||
| 401 | if len(scms) == 1: | 401 | if len(scms) == 1: |
| 402 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | 402 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) |
| @@ -404,7 +404,7 @@ def get_srcrev(d): | |||
| 404 | # | 404 | # |
| 405 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 405 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| 406 | # | 406 | # |
| 407 | format = bb.data.getVar('SRCREV_FORMAT', d, 1) | 407 | format = d.getVar('SRCREV_FORMAT', 1) |
| 408 | if not format: | 408 | if not format: |
| 409 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | 409 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
| 410 | raise ParameterError | 410 | raise ParameterError |
| @@ -539,8 +539,8 @@ class FetchData(object): | |||
| 539 | else: | 539 | else: |
| 540 | self.md5_name = "md5sum" | 540 | self.md5_name = "md5sum" |
| 541 | self.sha256_name = "sha256sum" | 541 | self.sha256_name = "sha256sum" |
| 542 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | 542 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) |
| 543 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | 543 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) |
| 544 | 544 | ||
| 545 | for m in methods: | 545 | for m in methods: |
| 546 | if m.supports(url, self, d): | 546 | if m.supports(url, self, d): |
| @@ -555,7 +555,7 @@ class FetchData(object): | |||
| 555 | self.localpath = self.parm["localpath"] | 555 | self.localpath = self.parm["localpath"] |
| 556 | self.basename = os.path.basename(self.localpath) | 556 | self.basename = os.path.basename(self.localpath) |
| 557 | else: | 557 | else: |
| 558 | premirrors = bb.data.getVar('PREMIRRORS', d, True) | 558 | premirrors = d.getVar('PREMIRRORS', True) |
| 559 | local = "" | 559 | local = "" |
| 560 | if premirrors and self.url: | 560 | if premirrors and self.url: |
| 561 | aurl = self.url.split(";")[0] | 561 | aurl = self.url.split(";")[0] |
| @@ -775,7 +775,7 @@ class Fetch(object): | |||
| 775 | 775 | ||
| 776 | latest_rev = self._build_revision(url, ud, d) | 776 | latest_rev = self._build_revision(url, ud, d) |
| 777 | last_rev = localcounts.get(key + '_rev') | 777 | last_rev = localcounts.get(key + '_rev') |
| 778 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 778 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False |
| 779 | count = None | 779 | count = None |
| 780 | if uselocalcount: | 780 | if uselocalcount: |
| 781 | count = Fetch.localcount_internal_helper(ud, d) | 781 | count = Fetch.localcount_internal_helper(ud, d) |
| @@ -803,7 +803,7 @@ class Fetch(object): | |||
| 803 | 803 | ||
| 804 | def generate_revision_key(self, url, ud, d): | 804 | def generate_revision_key(self, url, ud, d): |
| 805 | key = self._revision_key(url, ud, d) | 805 | key = self._revision_key(url, ud, d) |
| 806 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 806 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 807 | 807 | ||
| 808 | from . import cvs | 808 | from . import cvs |
| 809 | from . import git | 809 | from . import git |
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 49c1cfe8f9..7160919d5a 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py | |||
| @@ -34,7 +34,7 @@ class Git(Fetch): | |||
| 34 | # | 34 | # |
| 35 | # Only enable _sortable revision if the key is set | 35 | # Only enable _sortable revision if the key is set |
| 36 | # | 36 | # |
| 37 | if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): | 37 | if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True): |
| 38 | self._sortable_buildindex = self._sortable_buildindex_disabled | 38 | self._sortable_buildindex = self._sortable_buildindex_disabled |
| 39 | def supports(self, url, ud, d): | 39 | def supports(self, url, ud, d): |
| 40 | """ | 40 | """ |
| @@ -220,7 +220,7 @@ class Git(Fetch): | |||
| 220 | 220 | ||
| 221 | def generate_revision_key(self, url, ud, d, branch=False): | 221 | def generate_revision_key(self, url, ud, d, branch=False): |
| 222 | key = self._revision_key(url, ud, d, branch) | 222 | key = self._revision_key(url, ud, d, branch) |
| 223 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 223 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 224 | 224 | ||
| 225 | def _latest_revision(self, url, ud, d): | 225 | def _latest_revision(self, url, ud, d): |
| 226 | """ | 226 | """ |
| @@ -276,7 +276,7 @@ class Git(Fetch): | |||
| 276 | del localcounts[oldkey + '_rev'] | 276 | del localcounts[oldkey + '_rev'] |
| 277 | localcounts[key + '_rev'] = last_rev | 277 | localcounts[key + '_rev'] = last_rev |
| 278 | 278 | ||
| 279 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 279 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False |
| 280 | count = None | 280 | count = None |
| 281 | if uselocalcount: | 281 | if uselocalcount: |
| 282 | count = Fetch.localcount_internal_helper(ud, d) | 282 | count = Fetch.localcount_internal_helper(ud, d) |
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index d3c761d00d..19a79fdbfe 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -28,7 +28,7 @@ from __future__ import absolute_import | |||
| 28 | from __future__ import print_function | 28 | from __future__ import print_function |
| 29 | import os, re | 29 | import os, re |
| 30 | import logging | 30 | import logging |
| 31 | import bb.data, bb.persist_data, bb.utils | 31 | import bb.persist_data, bb.utils |
| 32 | from bb import data | 32 | from bb import data |
| 33 | 33 | ||
| 34 | __version__ = "2" | 34 | __version__ = "2" |
| @@ -211,7 +211,7 @@ def fetcher_init(d): | |||
| 211 | Calls before this must not hit the cache. | 211 | Calls before this must not hit the cache. |
| 212 | """ | 212 | """ |
| 213 | # When to drop SCM head revisions controlled by user policy | 213 | # When to drop SCM head revisions controlled by user policy |
| 214 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear" | 214 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" |
| 215 | if srcrev_policy == "cache": | 215 | if srcrev_policy == "cache": |
| 216 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 216 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 217 | elif srcrev_policy == "clear": | 217 | elif srcrev_policy == "clear": |
| @@ -271,7 +271,7 @@ def verify_checksum(u, ud, d): | |||
| 271 | sha256data = bb.utils.sha256_file(ud.localpath) | 271 | sha256data = bb.utils.sha256_file(ud.localpath) |
| 272 | 272 | ||
| 273 | # If strict checking enabled and neither sum defined, raise error | 273 | # If strict checking enabled and neither sum defined, raise error |
| 274 | strict = bb.data.getVar("BB_STRICT_CHECKSUM", d, True) or None | 274 | strict = d.getVar("BB_STRICT_CHECKSUM", True) or None |
| 275 | if (strict and ud.md5_expected == None and ud.sha256_expected == None): | 275 | if (strict and ud.md5_expected == None and ud.sha256_expected == None): |
| 276 | raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n' | 276 | raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n' |
| 277 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', u, | 277 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', u, |
| @@ -336,8 +336,8 @@ def subprocess_setup(): | |||
| 336 | 336 | ||
| 337 | def get_autorev(d): | 337 | def get_autorev(d): |
| 338 | # only not cache src rev in autorev case | 338 | # only not cache src rev in autorev case |
| 339 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | 339 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": |
| 340 | bb.data.setVar('__BB_DONT_CACHE', '1', d) | 340 | d.setVar('__BB_DONT_CACHE', '1') |
| 341 | return "AUTOINC" | 341 | return "AUTOINC" |
| 342 | 342 | ||
| 343 | def get_srcrev(d): | 343 | def get_srcrev(d): |
| @@ -350,7 +350,7 @@ def get_srcrev(d): | |||
| 350 | """ | 350 | """ |
| 351 | 351 | ||
| 352 | scms = [] | 352 | scms = [] |
| 353 | fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d) | 353 | fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) |
| 354 | urldata = fetcher.ud | 354 | urldata = fetcher.ud |
| 355 | for u in urldata: | 355 | for u in urldata: |
| 356 | if urldata[u].method.supports_srcrev(): | 356 | if urldata[u].method.supports_srcrev(): |
| @@ -365,7 +365,7 @@ def get_srcrev(d): | |||
| 365 | # | 365 | # |
| 366 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 366 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| 367 | # | 367 | # |
| 368 | format = bb.data.getVar('SRCREV_FORMAT', d, True) | 368 | format = d.getVar('SRCREV_FORMAT', True) |
| 369 | if not format: | 369 | if not format: |
| 370 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | 370 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
| 371 | 371 | ||
| @@ -400,7 +400,7 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []): | |||
| 400 | 'GIT_PROXY_IGNORE', 'SOCKS5_USER', 'SOCKS5_PASSWD'] | 400 | 'GIT_PROXY_IGNORE', 'SOCKS5_USER', 'SOCKS5_PASSWD'] |
| 401 | 401 | ||
| 402 | for var in exportvars: | 402 | for var in exportvars: |
| 403 | val = bb.data.getVar(var, d, True) | 403 | val = d.getVar(var, True) |
| 404 | if val: | 404 | if val: |
| 405 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | 405 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) |
| 406 | 406 | ||
| @@ -440,7 +440,7 @@ def check_network_access(d, info = "", url = None): | |||
| 440 | """ | 440 | """ |
| 441 | log remote network access, and error if BB_NO_NETWORK is set | 441 | log remote network access, and error if BB_NO_NETWORK is set |
| 442 | """ | 442 | """ |
| 443 | if bb.data.getVar("BB_NO_NETWORK", d, True) == "1": | 443 | if d.getVar("BB_NO_NETWORK", True) == "1": |
| 444 | raise NetworkAccess(url, info) | 444 | raise NetworkAccess(url, info) |
| 445 | else: | 445 | else: |
| 446 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) | 446 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) |
| @@ -526,15 +526,15 @@ def srcrev_internal_helper(ud, d, name): | |||
| 526 | return ud.parm['tag'] | 526 | return ud.parm['tag'] |
| 527 | 527 | ||
| 528 | rev = None | 528 | rev = None |
| 529 | pn = bb.data.getVar("PN", d, True) | 529 | pn = d.getVar("PN", True) |
| 530 | if name != '': | 530 | if name != '': |
| 531 | rev = bb.data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True) | 531 | rev = d.getVar("SRCREV_%s_pn-%s" % (name, pn), True) |
| 532 | if not rev: | 532 | if not rev: |
| 533 | rev = bb.data.getVar("SRCREV_%s" % name, d, True) | 533 | rev = d.getVar("SRCREV_%s" % name, True) |
| 534 | if not rev: | 534 | if not rev: |
| 535 | rev = bb.data.getVar("SRCREV_pn-%s" % pn, d, True) | 535 | rev = d.getVar("SRCREV_pn-%s" % pn, True) |
| 536 | if not rev: | 536 | if not rev: |
| 537 | rev = bb.data.getVar("SRCREV", d, True) | 537 | rev = d.getVar("SRCREV", True) |
| 538 | if rev == "INVALID": | 538 | if rev == "INVALID": |
| 539 | raise FetchError("Please set SRCREV to a valid value", ud.url) | 539 | raise FetchError("Please set SRCREV to a valid value", ud.url) |
| 540 | if rev == "AUTOINC": | 540 | if rev == "AUTOINC": |
| @@ -572,11 +572,11 @@ class FetchData(object): | |||
| 572 | if self.md5_name in self.parm: | 572 | if self.md5_name in self.parm: |
| 573 | self.md5_expected = self.parm[self.md5_name] | 573 | self.md5_expected = self.parm[self.md5_name] |
| 574 | else: | 574 | else: |
| 575 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | 575 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) |
| 576 | if self.sha256_name in self.parm: | 576 | if self.sha256_name in self.parm: |
| 577 | self.sha256_expected = self.parm[self.sha256_name] | 577 | self.sha256_expected = self.parm[self.sha256_name] |
| 578 | else: | 578 | else: |
| 579 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | 579 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) |
| 580 | 580 | ||
| 581 | self.names = self.parm.get("name",'default').split(',') | 581 | self.names = self.parm.get("name",'default').split(',') |
| 582 | 582 | ||
| @@ -600,7 +600,7 @@ class FetchData(object): | |||
| 600 | self.localpath = self.method.localpath(self.url, self, d) | 600 | self.localpath = self.method.localpath(self.url, self, d) |
| 601 | 601 | ||
| 602 | # Note: These files should always be in DL_DIR whereas localpath may not be. | 602 | # Note: These files should always be in DL_DIR whereas localpath may not be. |
| 603 | basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename), d) | 603 | basepath = d.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename)) |
| 604 | self.donestamp = basepath + '.done' | 604 | self.donestamp = basepath + '.done' |
| 605 | self.lockfile = basepath + '.lock' | 605 | self.lockfile = basepath + '.lock' |
| 606 | 606 | ||
| @@ -626,12 +626,12 @@ class FetchData(object): | |||
| 626 | if "srcdate" in self.parm: | 626 | if "srcdate" in self.parm: |
| 627 | return self.parm['srcdate'] | 627 | return self.parm['srcdate'] |
| 628 | 628 | ||
| 629 | pn = bb.data.getVar("PN", d, True) | 629 | pn = d.getVar("PN", True) |
| 630 | 630 | ||
| 631 | if pn: | 631 | if pn: |
| 632 | return bb.data.getVar("SRCDATE_%s" % pn, d, True) or bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True) | 632 | return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) |
| 633 | 633 | ||
| 634 | return bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True) | 634 | return d.getVar("SRCDATE", True) or d.getVar("DATE", True) |
| 635 | 635 | ||
| 636 | class FetchMethod(object): | 636 | class FetchMethod(object): |
| 637 | """Base class for 'fetch'ing data""" | 637 | """Base class for 'fetch'ing data""" |
| @@ -703,7 +703,7 @@ class FetchMethod(object): | |||
| 703 | 703 | ||
| 704 | dots = file.split(".") | 704 | dots = file.split(".") |
| 705 | if dots[-1] in ['gz', 'bz2', 'Z']: | 705 | if dots[-1] in ['gz', 'bz2', 'Z']: |
| 706 | efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1]))) | 706 | efile = os.path.join(data.getVar('WORKDIR', True),os.path.basename('.'.join(dots[0:-1]))) |
| 707 | else: | 707 | else: |
| 708 | efile = file | 708 | efile = file |
| 709 | cmd = None | 709 | cmd = None |
| @@ -747,7 +747,7 @@ class FetchMethod(object): | |||
| 747 | dest = os.path.join(rootdir, os.path.basename(file)) | 747 | dest = os.path.join(rootdir, os.path.basename(file)) |
| 748 | if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): | 748 | if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): |
| 749 | if os.path.isdir(file): | 749 | if os.path.isdir(file): |
| 750 | filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True)) | 750 | filesdir = os.path.realpath(data.getVar("FILESDIR", True)) |
| 751 | destdir = "." | 751 | destdir = "." |
| 752 | if file[0:len(filesdir)] == filesdir: | 752 | if file[0:len(filesdir)] == filesdir: |
| 753 | destdir = file[len(filesdir):file.rfind('/')] | 753 | destdir = file[len(filesdir):file.rfind('/')] |
| @@ -779,7 +779,7 @@ class FetchMethod(object): | |||
| 779 | bb.utils.mkdirhier(newdir) | 779 | bb.utils.mkdirhier(newdir) |
| 780 | os.chdir(newdir) | 780 | os.chdir(newdir) |
| 781 | 781 | ||
| 782 | cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd) | 782 | cmd = "PATH=\"%s\" %s" % (data.getVar('PATH', True), cmd) |
| 783 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) | 783 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) |
| 784 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) | 784 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) |
| 785 | 785 | ||
| @@ -824,10 +824,10 @@ class FetchMethod(object): | |||
| 824 | 824 | ||
| 825 | localcount = None | 825 | localcount = None |
| 826 | if name != '': | 826 | if name != '': |
| 827 | pn = bb.data.getVar("PN", d, True) | 827 | pn = d.getVar("PN", True) |
| 828 | localcount = bb.data.getVar("LOCALCOUNT_" + name, d, True) | 828 | localcount = d.getVar("LOCALCOUNT_" + name, True) |
| 829 | if not localcount: | 829 | if not localcount: |
| 830 | localcount = bb.data.getVar("LOCALCOUNT", d, True) | 830 | localcount = d.getVar("LOCALCOUNT", True) |
| 831 | return localcount | 831 | return localcount |
| 832 | 832 | ||
| 833 | localcount_internal_helper = staticmethod(localcount_internal_helper) | 833 | localcount_internal_helper = staticmethod(localcount_internal_helper) |
| @@ -859,7 +859,7 @@ class FetchMethod(object): | |||
| 859 | 859 | ||
| 860 | latest_rev = self._build_revision(url, ud, d, name) | 860 | latest_rev = self._build_revision(url, ud, d, name) |
| 861 | last_rev = localcounts.get(key + '_rev') | 861 | last_rev = localcounts.get(key + '_rev') |
| 862 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 862 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False |
| 863 | count = None | 863 | count = None |
| 864 | if uselocalcount: | 864 | if uselocalcount: |
| 865 | count = FetchMethod.localcount_internal_helper(ud, d, name) | 865 | count = FetchMethod.localcount_internal_helper(ud, d, name) |
| @@ -887,7 +887,7 @@ class FetchMethod(object): | |||
| 887 | 887 | ||
| 888 | def generate_revision_key(self, url, ud, d, name): | 888 | def generate_revision_key(self, url, ud, d, name): |
| 889 | key = self._revision_key(url, ud, d, name) | 889 | key = self._revision_key(url, ud, d, name) |
| 890 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 890 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 891 | 891 | ||
| 892 | class Fetch(object): | 892 | class Fetch(object): |
| 893 | def __init__(self, urls, d, cache = True): | 893 | def __init__(self, urls, d, cache = True): |
| @@ -897,7 +897,7 @@ class Fetch(object): | |||
| 897 | self.d = d | 897 | self.d = d |
| 898 | self.ud = {} | 898 | self.ud = {} |
| 899 | 899 | ||
| 900 | fn = bb.data.getVar('FILE', d, True) | 900 | fn = d.getVar('FILE', True) |
| 901 | if cache and fn in urldata_cache: | 901 | if cache and fn in urldata_cache: |
| 902 | self.ud = urldata_cache[fn] | 902 | self.ud = urldata_cache[fn] |
| 903 | 903 | ||
| @@ -913,7 +913,7 @@ class Fetch(object): | |||
| 913 | self.ud[url] = FetchData(url, self.d) | 913 | self.ud[url] = FetchData(url, self.d) |
| 914 | 914 | ||
| 915 | self.ud[url].setup_localpath(self.d) | 915 | self.ud[url].setup_localpath(self.d) |
| 916 | return bb.data.expand(self.ud[url].localpath, self.d) | 916 | return self.d.expand(self.ud[url].localpath) |
| 917 | 917 | ||
| 918 | def localpaths(self): | 918 | def localpaths(self): |
| 919 | """ | 919 | """ |
| @@ -935,8 +935,8 @@ class Fetch(object): | |||
| 935 | if len(urls) == 0: | 935 | if len(urls) == 0: |
| 936 | urls = self.urls | 936 | urls = self.urls |
| 937 | 937 | ||
| 938 | network = bb.data.getVar("BB_NO_NETWORK", self.d, True) | 938 | network = self.d.getVar("BB_NO_NETWORK", True) |
| 939 | premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1") | 939 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") |
| 940 | 940 | ||
| 941 | for u in urls: | 941 | for u in urls: |
| 942 | ud = self.ud[u] | 942 | ud = self.ud[u] |
| @@ -947,17 +947,17 @@ class Fetch(object): | |||
| 947 | lf = bb.utils.lockfile(ud.lockfile) | 947 | lf = bb.utils.lockfile(ud.lockfile) |
| 948 | 948 | ||
| 949 | try: | 949 | try: |
| 950 | bb.data.setVar("BB_NO_NETWORK", network, self.d) | 950 | self.d.setVar("BB_NO_NETWORK", network) |
| 951 | 951 | ||
| 952 | if not m.need_update(u, ud, self.d): | 952 | if not m.need_update(u, ud, self.d): |
| 953 | localpath = ud.localpath | 953 | localpath = ud.localpath |
| 954 | elif m.try_premirror(u, ud, self.d): | 954 | elif m.try_premirror(u, ud, self.d): |
| 955 | logger.debug(1, "Trying PREMIRRORS") | 955 | logger.debug(1, "Trying PREMIRRORS") |
| 956 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) | 956 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
| 957 | localpath = try_mirrors(self.d, ud, mirrors, False) | 957 | localpath = try_mirrors(self.d, ud, mirrors, False) |
| 958 | 958 | ||
| 959 | if premirroronly: | 959 | if premirroronly: |
| 960 | bb.data.setVar("BB_NO_NETWORK", "1", self.d) | 960 | self.d.setVar("BB_NO_NETWORK", "1") |
| 961 | 961 | ||
| 962 | if not localpath and m.need_update(u, ud, self.d): | 962 | if not localpath and m.need_update(u, ud, self.d): |
| 963 | try: | 963 | try: |
| @@ -979,7 +979,7 @@ class Fetch(object): | |||
| 979 | if os.path.isfile(ud.localpath): | 979 | if os.path.isfile(ud.localpath): |
| 980 | bb.utils.remove(ud.localpath) | 980 | bb.utils.remove(ud.localpath) |
| 981 | logger.debug(1, "Trying MIRRORS") | 981 | logger.debug(1, "Trying MIRRORS") |
| 982 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) | 982 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
| 983 | localpath = try_mirrors (self.d, ud, mirrors) | 983 | localpath = try_mirrors (self.d, ud, mirrors) |
| 984 | 984 | ||
| 985 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): | 985 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): |
| @@ -1004,7 +1004,7 @@ class Fetch(object): | |||
| 1004 | m = ud.method | 1004 | m = ud.method |
| 1005 | logger.debug(1, "Testing URL %s", u) | 1005 | logger.debug(1, "Testing URL %s", u) |
| 1006 | # First try checking uri, u, from PREMIRRORS | 1006 | # First try checking uri, u, from PREMIRRORS |
| 1007 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) | 1007 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
| 1008 | ret = try_mirrors(self.d, ud, mirrors, True) | 1008 | ret = try_mirrors(self.d, ud, mirrors, True) |
| 1009 | if not ret: | 1009 | if not ret: |
| 1010 | # Next try checking from the original uri, u | 1010 | # Next try checking from the original uri, u |
| @@ -1012,7 +1012,7 @@ class Fetch(object): | |||
| 1012 | ret = m.checkstatus(u, ud, self.d) | 1012 | ret = m.checkstatus(u, ud, self.d) |
| 1013 | except: | 1013 | except: |
| 1014 | # Finally, try checking uri, u, from MIRRORS | 1014 | # Finally, try checking uri, u, from MIRRORS |
| 1015 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) | 1015 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
| 1016 | ret = try_mirrors (self.d, ud, mirrors, True) | 1016 | ret = try_mirrors (self.d, ud, mirrors, True) |
| 1017 | 1017 | ||
| 1018 | if not ret: | 1018 | if not ret: |
| @@ -1030,7 +1030,7 @@ class Fetch(object): | |||
| 1030 | ud = self.ud[u] | 1030 | ud = self.ud[u] |
| 1031 | ud.setup_localpath(self.d) | 1031 | ud.setup_localpath(self.d) |
| 1032 | 1032 | ||
| 1033 | if bb.data.expand(self.localpath, self.d) is None: | 1033 | if self.d.expand(self.localpath) is None: |
| 1034 | continue | 1034 | continue |
| 1035 | 1035 | ||
| 1036 | if ud.lockfile: | 1036 | if ud.lockfile: |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index 87a35d97a3..f203c5abb1 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
| @@ -68,7 +68,7 @@ class Git(FetchMethod): | |||
| 68 | # | 68 | # |
| 69 | # Only enable _sortable revision if the key is set | 69 | # Only enable _sortable revision if the key is set |
| 70 | # | 70 | # |
| 71 | if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): | 71 | if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True): |
| 72 | self._sortable_buildindex = self._sortable_buildindex_disabled | 72 | self._sortable_buildindex = self._sortable_buildindex_disabled |
| 73 | def supports(self, url, ud, d): | 73 | def supports(self, url, ud, d): |
| 74 | """ | 74 | """ |
| @@ -146,7 +146,7 @@ class Git(FetchMethod): | |||
| 146 | def try_premirror(self, u, ud, d): | 146 | def try_premirror(self, u, ud, d): |
| 147 | # If we don't do this, updating an existing checkout with only premirrors | 147 | # If we don't do this, updating an existing checkout with only premirrors |
| 148 | # is not possible | 148 | # is not possible |
| 149 | if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None: | 149 | if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: |
| 150 | return True | 150 | return True |
| 151 | if os.path.exists(ud.clonedir): | 151 | if os.path.exists(ud.clonedir): |
| 152 | return False | 152 | return False |
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index eee8d9cddb..c5005aec9a 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
| @@ -62,9 +62,9 @@ def update_mtime(f): | |||
| 62 | def mark_dependency(d, f): | 62 | def mark_dependency(d, f): |
| 63 | if f.startswith('./'): | 63 | if f.startswith('./'): |
| 64 | f = "%s/%s" % (os.getcwd(), f[2:]) | 64 | f = "%s/%s" % (os.getcwd(), f[2:]) |
| 65 | deps = bb.data.getVar('__depends', d) or set() | 65 | deps = d.getVar('__depends') or set() |
| 66 | deps.update([(f, cached_mtime(f))]) | 66 | deps.update([(f, cached_mtime(f))]) |
| 67 | bb.data.setVar('__depends', deps, d) | 67 | d.setVar('__depends', deps) |
| 68 | 68 | ||
| 69 | def supports(fn, data): | 69 | def supports(fn, data): |
| 70 | """Returns true if we have a handler for this file, false otherwise""" | 70 | """Returns true if we have a handler for this file, false otherwise""" |
| @@ -90,7 +90,7 @@ def init_parser(d): | |||
| 90 | 90 | ||
| 91 | def resolve_file(fn, d): | 91 | def resolve_file(fn, d): |
| 92 | if not os.path.isabs(fn): | 92 | if not os.path.isabs(fn): |
| 93 | bbpath = bb.data.getVar("BBPATH", d, True) | 93 | bbpath = d.getVar("BBPATH", True) |
| 94 | newfn = bb.utils.which(bbpath, fn) | 94 | newfn = bb.utils.which(bbpath, fn) |
| 95 | if not newfn: | 95 | if not newfn: |
| 96 | raise IOError("file %s not found in %s" % (fn, bbpath)) | 96 | raise IOError("file %s not found in %s" % (fn, bbpath)) |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index 3f9065a34c..31c930d9cf 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
| @@ -54,7 +54,7 @@ class IncludeNode(AstNode): | |||
| 54 | """ | 54 | """ |
| 55 | Include the file and evaluate the statements | 55 | Include the file and evaluate the statements |
| 56 | """ | 56 | """ |
| 57 | s = bb.data.expand(self.what_file, data) | 57 | s = data.expand(self.what_file) |
| 58 | logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) | 58 | logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) |
| 59 | 59 | ||
| 60 | # TODO: Cache those includes... maybe not here though | 60 | # TODO: Cache those includes... maybe not here though |
| @@ -69,7 +69,7 @@ class ExportNode(AstNode): | |||
| 69 | self.var = var | 69 | self.var = var |
| 70 | 70 | ||
| 71 | def eval(self, data): | 71 | def eval(self, data): |
| 72 | bb.data.setVarFlag(self.var, "export", 1, data) | 72 | data.setVarFlag(self.var, "export", 1) |
| 73 | 73 | ||
| 74 | class DataNode(AstNode): | 74 | class DataNode(AstNode): |
| 75 | """ | 75 | """ |
| @@ -92,7 +92,7 @@ class DataNode(AstNode): | |||
| 92 | groupd = self.groupd | 92 | groupd = self.groupd |
| 93 | key = groupd["var"] | 93 | key = groupd["var"] |
| 94 | if "exp" in groupd and groupd["exp"] != None: | 94 | if "exp" in groupd and groupd["exp"] != None: |
| 95 | bb.data.setVarFlag(key, "export", 1, data) | 95 | data.setVarFlag(key, "export", 1) |
| 96 | if "ques" in groupd and groupd["ques"] != None: | 96 | if "ques" in groupd and groupd["ques"] != None: |
| 97 | val = self.getFunc(key, data) | 97 | val = self.getFunc(key, data) |
| 98 | if val == None: | 98 | if val == None: |
| @@ -100,7 +100,7 @@ class DataNode(AstNode): | |||
| 100 | elif "colon" in groupd and groupd["colon"] != None: | 100 | elif "colon" in groupd and groupd["colon"] != None: |
| 101 | e = data.createCopy() | 101 | e = data.createCopy() |
| 102 | bb.data.update_data(e) | 102 | bb.data.update_data(e) |
| 103 | val = bb.data.expand(groupd["value"], e, key + "[:=]") | 103 | val = e.expand(groupd["value"], key + "[:=]") |
| 104 | elif "append" in groupd and groupd["append"] != None: | 104 | elif "append" in groupd and groupd["append"] != None: |
| 105 | val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"]) | 105 | val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"]) |
| 106 | elif "prepend" in groupd and groupd["prepend"] != None: | 106 | elif "prepend" in groupd and groupd["prepend"] != None: |
| @@ -113,11 +113,11 @@ class DataNode(AstNode): | |||
| 113 | val = groupd["value"] | 113 | val = groupd["value"] |
| 114 | 114 | ||
| 115 | if 'flag' in groupd and groupd['flag'] != None: | 115 | if 'flag' in groupd and groupd['flag'] != None: |
| 116 | bb.data.setVarFlag(key, groupd['flag'], val, data) | 116 | data.setVarFlag(key, groupd['flag'], val) |
| 117 | elif groupd["lazyques"]: | 117 | elif groupd["lazyques"]: |
| 118 | bb.data.setVarFlag(key, "defaultval", val, data) | 118 | data.setVarFlag(key, "defaultval", val) |
| 119 | else: | 119 | else: |
| 120 | bb.data.setVar(key, val, data) | 120 | data.setVar(key, val) |
| 121 | 121 | ||
| 122 | class MethodNode(AstNode): | 122 | class MethodNode(AstNode): |
| 123 | def __init__(self, filename, lineno, func_name, body): | 123 | def __init__(self, filename, lineno, func_name, body): |
| @@ -131,12 +131,12 @@ class MethodNode(AstNode): | |||
| 131 | if not funcname in bb.methodpool._parsed_fns: | 131 | if not funcname in bb.methodpool._parsed_fns: |
| 132 | text = "def %s(d):\n" % (funcname) + '\n'.join(self.body) | 132 | text = "def %s(d):\n" % (funcname) + '\n'.join(self.body) |
| 133 | bb.methodpool.insert_method(funcname, text, self.filename) | 133 | bb.methodpool.insert_method(funcname, text, self.filename) |
| 134 | anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or [] | 134 | anonfuncs = data.getVar('__BBANONFUNCS') or [] |
| 135 | anonfuncs.append(funcname) | 135 | anonfuncs.append(funcname) |
| 136 | bb.data.setVar('__BBANONFUNCS', anonfuncs, data) | 136 | data.setVar('__BBANONFUNCS', anonfuncs) |
| 137 | else: | 137 | else: |
| 138 | bb.data.setVarFlag(self.func_name, "func", 1, data) | 138 | data.setVarFlag(self.func_name, "func", 1) |
| 139 | bb.data.setVar(self.func_name, '\n'.join(self.body), data) | 139 | data.setVar(self.func_name, '\n'.join(self.body)) |
| 140 | 140 | ||
| 141 | class PythonMethodNode(AstNode): | 141 | class PythonMethodNode(AstNode): |
| 142 | def __init__(self, filename, lineno, function, define, body): | 142 | def __init__(self, filename, lineno, function, define, body): |
| @@ -152,9 +152,9 @@ class PythonMethodNode(AstNode): | |||
| 152 | text = '\n'.join(self.body) | 152 | text = '\n'.join(self.body) |
| 153 | if not bb.methodpool.parsed_module(self.define): | 153 | if not bb.methodpool.parsed_module(self.define): |
| 154 | bb.methodpool.insert_method(self.define, text, self.filename) | 154 | bb.methodpool.insert_method(self.define, text, self.filename) |
| 155 | bb.data.setVarFlag(self.function, "func", 1, data) | 155 | data.setVarFlag(self.function, "func", 1) |
| 156 | bb.data.setVarFlag(self.function, "python", 1, data) | 156 | data.setVarFlag(self.function, "python", 1) |
| 157 | bb.data.setVar(self.function, text, data) | 157 | data.setVar(self.function, text) |
| 158 | 158 | ||
| 159 | class MethodFlagsNode(AstNode): | 159 | class MethodFlagsNode(AstNode): |
| 160 | def __init__(self, filename, lineno, key, m): | 160 | def __init__(self, filename, lineno, key, m): |
| @@ -163,19 +163,19 @@ class MethodFlagsNode(AstNode): | |||
| 163 | self.m = m | 163 | self.m = m |
| 164 | 164 | ||
| 165 | def eval(self, data): | 165 | def eval(self, data): |
| 166 | if bb.data.getVar(self.key, data): | 166 | if data.getVar(self.key): |
| 167 | # clean up old version of this piece of metadata, as its | 167 | # clean up old version of this piece of metadata, as its |
| 168 | # flags could cause problems | 168 | # flags could cause problems |
| 169 | bb.data.setVarFlag(self.key, 'python', None, data) | 169 | data.setVarFlag(self.key, 'python', None) |
| 170 | bb.data.setVarFlag(self.key, 'fakeroot', None, data) | 170 | data.setVarFlag(self.key, 'fakeroot', None) |
| 171 | if self.m.group("py") is not None: | 171 | if self.m.group("py") is not None: |
| 172 | bb.data.setVarFlag(self.key, "python", "1", data) | 172 | data.setVarFlag(self.key, "python", "1") |
| 173 | else: | 173 | else: |
| 174 | bb.data.delVarFlag(self.key, "python", data) | 174 | data.delVarFlag(self.key, "python") |
| 175 | if self.m.group("fr") is not None: | 175 | if self.m.group("fr") is not None: |
| 176 | bb.data.setVarFlag(self.key, "fakeroot", "1", data) | 176 | data.setVarFlag(self.key, "fakeroot", "1") |
| 177 | else: | 177 | else: |
| 178 | bb.data.delVarFlag(self.key, "fakeroot", data) | 178 | data.delVarFlag(self.key, "fakeroot") |
| 179 | 179 | ||
| 180 | class ExportFuncsNode(AstNode): | 180 | class ExportFuncsNode(AstNode): |
| 181 | def __init__(self, filename, lineno, fns, classes): | 181 | def __init__(self, filename, lineno, fns, classes): |
| @@ -197,25 +197,25 @@ class ExportFuncsNode(AstNode): | |||
| 197 | vars.append([allvars[0], allvars[2]]) | 197 | vars.append([allvars[0], allvars[2]]) |
| 198 | 198 | ||
| 199 | for (var, calledvar) in vars: | 199 | for (var, calledvar) in vars: |
| 200 | if bb.data.getVar(var, data) and not bb.data.getVarFlag(var, 'export_func', data): | 200 | if data.getVar(var) and not data.getVarFlag(var, 'export_func'): |
| 201 | continue | 201 | continue |
| 202 | 202 | ||
| 203 | if bb.data.getVar(var, data): | 203 | if data.getVar(var): |
| 204 | bb.data.setVarFlag(var, 'python', None, data) | 204 | data.setVarFlag(var, 'python', None) |
| 205 | bb.data.setVarFlag(var, 'func', None, data) | 205 | data.setVarFlag(var, 'func', None) |
| 206 | 206 | ||
| 207 | for flag in [ "func", "python" ]: | 207 | for flag in [ "func", "python" ]: |
| 208 | if bb.data.getVarFlag(calledvar, flag, data): | 208 | if data.getVarFlag(calledvar, flag): |
| 209 | bb.data.setVarFlag(var, flag, bb.data.getVarFlag(calledvar, flag, data), data) | 209 | data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag)) |
| 210 | for flag in [ "dirs" ]: | 210 | for flag in [ "dirs" ]: |
| 211 | if bb.data.getVarFlag(var, flag, data): | 211 | if data.getVarFlag(var, flag): |
| 212 | bb.data.setVarFlag(calledvar, flag, bb.data.getVarFlag(var, flag, data), data) | 212 | data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag)) |
| 213 | 213 | ||
| 214 | if bb.data.getVarFlag(calledvar, "python", data): | 214 | if data.getVarFlag(calledvar, "python"): |
| 215 | bb.data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", data) | 215 | data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n") |
| 216 | else: | 216 | else: |
| 217 | bb.data.setVar(var, "\t" + calledvar + "\n", data) | 217 | data.setVar(var, "\t" + calledvar + "\n") |
| 218 | bb.data.setVarFlag(var, 'export_func', '1', data) | 218 | data.setVarFlag(var, 'export_func', '1') |
| 219 | 219 | ||
| 220 | class AddTaskNode(AstNode): | 220 | class AddTaskNode(AstNode): |
| 221 | def __init__(self, filename, lineno, func, before, after): | 221 | def __init__(self, filename, lineno, func, before, after): |
| @@ -229,25 +229,25 @@ class AddTaskNode(AstNode): | |||
| 229 | if self.func[:3] != "do_": | 229 | if self.func[:3] != "do_": |
| 230 | var = "do_" + self.func | 230 | var = "do_" + self.func |
| 231 | 231 | ||
| 232 | bb.data.setVarFlag(var, "task", 1, data) | 232 | data.setVarFlag(var, "task", 1) |
| 233 | bbtasks = bb.data.getVar('__BBTASKS', data) or [] | 233 | bbtasks = data.getVar('__BBTASKS') or [] |
| 234 | if not var in bbtasks: | 234 | if not var in bbtasks: |
| 235 | bbtasks.append(var) | 235 | bbtasks.append(var) |
| 236 | bb.data.setVar('__BBTASKS', bbtasks, data) | 236 | data.setVar('__BBTASKS', bbtasks) |
| 237 | 237 | ||
| 238 | existing = bb.data.getVarFlag(var, "deps", data) or [] | 238 | existing = data.getVarFlag(var, "deps") or [] |
| 239 | if self.after is not None: | 239 | if self.after is not None: |
| 240 | # set up deps for function | 240 | # set up deps for function |
| 241 | for entry in self.after.split(): | 241 | for entry in self.after.split(): |
| 242 | if entry not in existing: | 242 | if entry not in existing: |
| 243 | existing.append(entry) | 243 | existing.append(entry) |
| 244 | bb.data.setVarFlag(var, "deps", existing, data) | 244 | data.setVarFlag(var, "deps", existing) |
| 245 | if self.before is not None: | 245 | if self.before is not None: |
| 246 | # set up things that depend on this func | 246 | # set up things that depend on this func |
| 247 | for entry in self.before.split(): | 247 | for entry in self.before.split(): |
| 248 | existing = bb.data.getVarFlag(entry, "deps", data) or [] | 248 | existing = data.getVarFlag(entry, "deps") or [] |
| 249 | if var not in existing: | 249 | if var not in existing: |
| 250 | bb.data.setVarFlag(entry, "deps", [var] + existing, data) | 250 | data.setVarFlag(entry, "deps", [var] + existing) |
| 251 | 251 | ||
| 252 | class BBHandlerNode(AstNode): | 252 | class BBHandlerNode(AstNode): |
| 253 | def __init__(self, filename, lineno, fns): | 253 | def __init__(self, filename, lineno, fns): |
| @@ -255,11 +255,11 @@ class BBHandlerNode(AstNode): | |||
| 255 | self.hs = fns.split() | 255 | self.hs = fns.split() |
| 256 | 256 | ||
| 257 | def eval(self, data): | 257 | def eval(self, data): |
| 258 | bbhands = bb.data.getVar('__BBHANDLERS', data) or [] | 258 | bbhands = data.getVar('__BBHANDLERS') or [] |
| 259 | for h in self.hs: | 259 | for h in self.hs: |
| 260 | bbhands.append(h) | 260 | bbhands.append(h) |
| 261 | bb.data.setVarFlag(h, "handler", 1, data) | 261 | data.setVarFlag(h, "handler", 1) |
| 262 | bb.data.setVar('__BBHANDLERS', bbhands, data) | 262 | data.setVar('__BBHANDLERS', bbhands) |
| 263 | 263 | ||
| 264 | class InheritNode(AstNode): | 264 | class InheritNode(AstNode): |
| 265 | def __init__(self, filename, lineno, classes): | 265 | def __init__(self, filename, lineno, classes): |
| @@ -308,9 +308,9 @@ def handleInherit(statements, filename, lineno, m): | |||
| 308 | 308 | ||
| 309 | def finalize(fn, d, variant = None): | 309 | def finalize(fn, d, variant = None): |
| 310 | all_handlers = {} | 310 | all_handlers = {} |
| 311 | for var in bb.data.getVar('__BBHANDLERS', d) or []: | 311 | for var in d.getVar('__BBHANDLERS') or []: |
| 312 | # try to add the handler | 312 | # try to add the handler |
| 313 | handler = bb.data.getVar(var, d) | 313 | handler = d.getVar(var) |
| 314 | bb.event.register(var, handler) | 314 | bb.event.register(var, handler) |
| 315 | 315 | ||
| 316 | bb.event.fire(bb.event.RecipePreFinalise(fn), d) | 316 | bb.event.fire(bb.event.RecipePreFinalise(fn), d) |
| @@ -318,12 +318,12 @@ def finalize(fn, d, variant = None): | |||
| 318 | bb.data.expandKeys(d) | 318 | bb.data.expandKeys(d) |
| 319 | bb.data.update_data(d) | 319 | bb.data.update_data(d) |
| 320 | code = [] | 320 | code = [] |
| 321 | for funcname in bb.data.getVar("__BBANONFUNCS", d) or []: | 321 | for funcname in d.getVar("__BBANONFUNCS") or []: |
| 322 | code.append("%s(d)" % funcname) | 322 | code.append("%s(d)" % funcname) |
| 323 | bb.utils.simple_exec("\n".join(code), {"d": d}) | 323 | bb.utils.simple_exec("\n".join(code), {"d": d}) |
| 324 | bb.data.update_data(d) | 324 | bb.data.update_data(d) |
| 325 | 325 | ||
| 326 | tasklist = bb.data.getVar('__BBTASKS', d) or [] | 326 | tasklist = d.getVar('__BBTASKS') or [] |
| 327 | bb.build.add_tasks(tasklist, d) | 327 | bb.build.add_tasks(tasklist, d) |
| 328 | 328 | ||
| 329 | bb.parse.siggen.finalise(fn, d, variant) | 329 | bb.parse.siggen.finalise(fn, d, variant) |
| @@ -378,7 +378,7 @@ def multi_finalize(fn, d): | |||
| 378 | try: | 378 | try: |
| 379 | finalize(fn, d) | 379 | finalize(fn, d) |
| 380 | except bb.parse.SkipPackage as e: | 380 | except bb.parse.SkipPackage as e: |
| 381 | bb.data.setVar("__SKIPPED", e.args[0], d) | 381 | d.setVar("__SKIPPED", e.args[0]) |
| 382 | datastores = {"": safe_d} | 382 | datastores = {"": safe_d} |
| 383 | 383 | ||
| 384 | versions = (d.getVar("BBVERSIONS", True) or "").split() | 384 | versions = (d.getVar("BBVERSIONS", True) or "").split() |
| @@ -421,7 +421,7 @@ def multi_finalize(fn, d): | |||
| 421 | try: | 421 | try: |
| 422 | finalize(fn, d) | 422 | finalize(fn, d) |
| 423 | except bb.parse.SkipPackage as e: | 423 | except bb.parse.SkipPackage as e: |
| 424 | bb.data.setVar("__SKIPPED", e.args[0], d) | 424 | d.setVar("__SKIPPED", e.args[0]) |
| 425 | 425 | ||
| 426 | _create_variants(datastores, versions, verfunc) | 426 | _create_variants(datastores, versions, verfunc) |
| 427 | 427 | ||
| @@ -461,7 +461,7 @@ def multi_finalize(fn, d): | |||
| 461 | if not onlyfinalise or variant in onlyfinalise: | 461 | if not onlyfinalise or variant in onlyfinalise: |
| 462 | finalize(fn, variant_d, variant) | 462 | finalize(fn, variant_d, variant) |
| 463 | except bb.parse.SkipPackage as e: | 463 | except bb.parse.SkipPackage as e: |
| 464 | bb.data.setVar("__SKIPPED", e.args[0], variant_d) | 464 | variant_d.setVar("__SKIPPED", e.args[0]) |
| 465 | 465 | ||
| 466 | if len(datastores) > 1: | 466 | if len(datastores) > 1: |
| 467 | variants = filter(None, datastores.iterkeys()) | 467 | variants = filter(None, datastores.iterkeys()) |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index 8c1bd60abf..2d6e331a1d 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
| @@ -159,7 +159,7 @@ def handle(fn, d, include): | |||
| 159 | return ast.multi_finalize(fn, d) | 159 | return ast.multi_finalize(fn, d) |
| 160 | 160 | ||
| 161 | if oldfile: | 161 | if oldfile: |
| 162 | bb.data.setVar("FILE", oldfile, d) | 162 | d.setVar("FILE", oldfile) |
| 163 | 163 | ||
| 164 | # we have parsed the bb class now | 164 | # we have parsed the bb class now |
| 165 | if ext == ".bbclass" or ext == ".inc": | 165 | if ext == ".bbclass" or ext == ".inc": |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index e168d24b4c..6ae9d973e7 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
| @@ -24,7 +24,7 @@ | |||
| 24 | # with this program; if not, write to the Free Software Foundation, Inc., | 24 | # with this program; if not, write to the Free Software Foundation, Inc., |
| 25 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 25 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| 26 | 26 | ||
| 27 | import re, bb.data, os | 27 | import re, os |
| 28 | import logging | 28 | import logging |
| 29 | import bb.utils | 29 | import bb.utils |
| 30 | from bb.parse import ParseError, resolve_file, ast, logger | 30 | from bb.parse import ParseError, resolve_file, ast, logger |
| @@ -36,9 +36,9 @@ __require_regexp__ = re.compile( r"require\s+(.+)" ) | |||
| 36 | __export_regexp__ = re.compile( r"export\s+(.+)" ) | 36 | __export_regexp__ = re.compile( r"export\s+(.+)" ) |
| 37 | 37 | ||
| 38 | def init(data): | 38 | def init(data): |
| 39 | topdir = bb.data.getVar('TOPDIR', data) | 39 | topdir = data.getVar('TOPDIR') |
| 40 | if not topdir: | 40 | if not topdir: |
| 41 | bb.data.setVar('TOPDIR', os.getcwd(), data) | 41 | data.setVar('TOPDIR', os.getcwd()) |
| 42 | 42 | ||
| 43 | 43 | ||
| 44 | def supports(fn, d): | 44 | def supports(fn, d): |
| @@ -53,12 +53,12 @@ def include(oldfn, fn, data, error_out): | |||
| 53 | return None | 53 | return None |
| 54 | 54 | ||
| 55 | import bb | 55 | import bb |
| 56 | fn = bb.data.expand(fn, data) | 56 | fn = data.expand(fn) |
| 57 | oldfn = bb.data.expand(oldfn, data) | 57 | oldfn = data.expand(oldfn) |
| 58 | 58 | ||
| 59 | if not os.path.isabs(fn): | 59 | if not os.path.isabs(fn): |
| 60 | dname = os.path.dirname(oldfn) | 60 | dname = os.path.dirname(oldfn) |
| 61 | bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1)) | 61 | bbpath = "%s:%s" % (dname, data.getVar("BBPATH", 1)) |
| 62 | abs_fn = bb.utils.which(bbpath, fn) | 62 | abs_fn = bb.utils.which(bbpath, fn) |
| 63 | if abs_fn: | 63 | if abs_fn: |
| 64 | fn = abs_fn | 64 | fn = abs_fn |
| @@ -77,7 +77,7 @@ def handle(fn, data, include): | |||
| 77 | if include == 0: | 77 | if include == 0: |
| 78 | oldfile = None | 78 | oldfile = None |
| 79 | else: | 79 | else: |
| 80 | oldfile = bb.data.getVar('FILE', data) | 80 | oldfile = data.getVar('FILE') |
| 81 | 81 | ||
| 82 | abs_fn = resolve_file(fn, data) | 82 | abs_fn = resolve_file(fn, data) |
| 83 | f = open(abs_fn, 'r') | 83 | f = open(abs_fn, 'r') |
| @@ -102,10 +102,10 @@ def handle(fn, data, include): | |||
| 102 | feeder(lineno, s, fn, statements) | 102 | feeder(lineno, s, fn, statements) |
| 103 | 103 | ||
| 104 | # DONE WITH PARSING... time to evaluate | 104 | # DONE WITH PARSING... time to evaluate |
| 105 | bb.data.setVar('FILE', abs_fn, data) | 105 | data.setVar('FILE', abs_fn) |
| 106 | statements.eval(data) | 106 | statements.eval(data) |
| 107 | if oldfile: | 107 | if oldfile: |
| 108 | bb.data.setVar('FILE', oldfile, data) | 108 | data.setVar('FILE', oldfile) |
| 109 | 109 | ||
| 110 | return data | 110 | return data |
| 111 | 111 | ||
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index 551b58a2a9..17620ef331 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py | |||
| @@ -192,9 +192,9 @@ def connect(database): | |||
| 192 | 192 | ||
| 193 | def persist(domain, d): | 193 | def persist(domain, d): |
| 194 | """Convenience factory for SQLTable objects based upon metadata""" | 194 | """Convenience factory for SQLTable objects based upon metadata""" |
| 195 | import bb.data, bb.utils | 195 | import bb.utils |
| 196 | cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or | 196 | cachedir = (d.getVar("PERSISTENT_DIR", True) or |
| 197 | bb.data.getVar("CACHE", d, True)) | 197 | d.getVar("CACHE", True)) |
| 198 | if not cachedir: | 198 | if not cachedir: |
| 199 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") | 199 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") |
| 200 | sys.exit(1) | 200 | sys.exit(1) |
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py index b2f98963d8..4543447057 100644 --- a/bitbake/lib/bb/providers.py +++ b/bitbake/lib/bb/providers.py | |||
| @@ -84,10 +84,10 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | |||
| 84 | preferred_ver = None | 84 | preferred_ver = None |
| 85 | 85 | ||
| 86 | localdata = data.createCopy(cfgData) | 86 | localdata = data.createCopy(cfgData) |
| 87 | bb.data.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn), localdata) | 87 | localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn)) |
| 88 | bb.data.update_data(localdata) | 88 | bb.data.update_data(localdata) |
| 89 | 89 | ||
| 90 | preferred_v = bb.data.getVar('PREFERRED_VERSION', localdata, True) | 90 | preferred_v = localdata.getVar('PREFERRED_VERSION', True) |
| 91 | if preferred_v: | 91 | if preferred_v: |
| 92 | m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) | 92 | m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) |
| 93 | if m: | 93 | if m: |
| @@ -248,7 +248,7 @@ def filterProviders(providers, item, cfgData, dataCache): | |||
| 248 | 248 | ||
| 249 | eligible = _filterProviders(providers, item, cfgData, dataCache) | 249 | eligible = _filterProviders(providers, item, cfgData, dataCache) |
| 250 | 250 | ||
| 251 | prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1) | 251 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, 1) |
| 252 | if prefervar: | 252 | if prefervar: |
| 253 | dataCache.preferred[item] = prefervar | 253 | dataCache.preferred[item] = prefervar |
| 254 | 254 | ||
| @@ -286,7 +286,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): | |||
| 286 | pn = dataCache.pkg_fn[p] | 286 | pn = dataCache.pkg_fn[p] |
| 287 | provides = dataCache.pn_provides[pn] | 287 | provides = dataCache.pn_provides[pn] |
| 288 | for provide in provides: | 288 | for provide in provides: |
| 289 | prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1) | 289 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, 1) |
| 290 | logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) | 290 | logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) |
| 291 | if prefervar in pns and pns[prefervar] not in preferred: | 291 | if prefervar in pns and pns[prefervar] not in preferred: |
| 292 | var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) | 292 | var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 3fc384d5bb..5a5164b7b5 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
| @@ -188,8 +188,8 @@ class RunQueueData: | |||
| 188 | self.targets = targets | 188 | self.targets = targets |
| 189 | self.rq = rq | 189 | self.rq = rq |
| 190 | 190 | ||
| 191 | self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or "" | 191 | self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", 1) or "" |
| 192 | self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split() | 192 | self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", 1) or "").split() |
| 193 | 193 | ||
| 194 | self.reset() | 194 | self.reset() |
| 195 | 195 | ||
| @@ -765,9 +765,9 @@ class RunQueue: | |||
| 765 | self.cfgData = cfgData | 765 | self.cfgData = cfgData |
| 766 | self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets) | 766 | self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets) |
| 767 | 767 | ||
| 768 | self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, True) or "perfile" | 768 | self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile" |
| 769 | self.hashvalidate = bb.data.getVar("BB_HASHCHECK_FUNCTION", cfgData, True) or None | 769 | self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None |
| 770 | self.setsceneverify = bb.data.getVar("BB_SETSCENE_VERIFY_FUNCTION", cfgData, True) or None | 770 | self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None |
| 771 | 771 | ||
| 772 | self.state = runQueuePrepare | 772 | self.state = runQueuePrepare |
| 773 | 773 | ||
| @@ -1007,8 +1007,8 @@ class RunQueueExecute: | |||
| 1007 | self.cfgData = rq.cfgData | 1007 | self.cfgData = rq.cfgData |
| 1008 | self.rqdata = rq.rqdata | 1008 | self.rqdata = rq.rqdata |
| 1009 | 1009 | ||
| 1010 | self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", self.cfgData, 1) or 1) | 1010 | self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", 1) or 1) |
| 1011 | self.scheduler = bb.data.getVar("BB_SCHEDULER", self.cfgData, 1) or "speed" | 1011 | self.scheduler = self.cfgData.getVar("BB_SCHEDULER", 1) or "speed" |
| 1012 | 1012 | ||
| 1013 | self.runq_buildable = [] | 1013 | self.runq_buildable = [] |
| 1014 | self.runq_running = [] | 1014 | self.runq_running = [] |
| @@ -1132,9 +1132,9 @@ class RunQueueExecute: | |||
| 1132 | if umask: | 1132 | if umask: |
| 1133 | os.umask(umask) | 1133 | os.umask(umask) |
| 1134 | 1134 | ||
| 1135 | bb.data.setVar("BB_WORKERCONTEXT", "1", self.cooker.configuration.data) | 1135 | self.cooker.configuration.data.setVar("BB_WORKERCONTEXT", "1") |
| 1136 | bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data) | 1136 | self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self) |
| 1137 | bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn, self.cooker.configuration.data) | 1137 | self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn) |
| 1138 | bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps) | 1138 | bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps) |
| 1139 | ret = 0 | 1139 | ret = 0 |
| 1140 | try: | 1140 | try: |
| @@ -1255,7 +1255,7 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
| 1255 | if type(obj) is type and | 1255 | if type(obj) is type and |
| 1256 | issubclass(obj, RunQueueScheduler)) | 1256 | issubclass(obj, RunQueueScheduler)) |
| 1257 | 1257 | ||
| 1258 | user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True) | 1258 | user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True) |
| 1259 | if user_schedulers: | 1259 | if user_schedulers: |
| 1260 | for sched in user_schedulers.split(): | 1260 | for sched in user_schedulers.split(): |
| 1261 | if not "." in sched: | 1261 | if not "." in sched: |
| @@ -1702,8 +1702,8 @@ class runQueueTaskCompleted(runQueueEvent): | |||
| 1702 | """ | 1702 | """ |
| 1703 | 1703 | ||
| 1704 | def check_stamp_fn(fn, taskname, d): | 1704 | def check_stamp_fn(fn, taskname, d): |
| 1705 | rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d) | 1705 | rqexe = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY") |
| 1706 | fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d) | 1706 | fn = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2") |
| 1707 | fnid = rqexe.rqdata.taskData.getfn_id(fn) | 1707 | fnid = rqexe.rqdata.taskData.getfn_id(fn) |
| 1708 | taskid = rqexe.rqdata.get_task_id(fnid, taskname) | 1708 | taskid = rqexe.rqdata.get_task_id(fnid, taskname) |
| 1709 | if taskid is not None: | 1709 | if taskid is not None: |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 91b4160b2d..8d768cea7d 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -16,7 +16,7 @@ def init(d): | |||
| 16 | siggens = [obj for obj in globals().itervalues() | 16 | siggens = [obj for obj in globals().itervalues() |
| 17 | if type(obj) is type and issubclass(obj, SignatureGenerator)] | 17 | if type(obj) is type and issubclass(obj, SignatureGenerator)] |
| 18 | 18 | ||
| 19 | desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop" | 19 | desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop" |
| 20 | for sg in siggens: | 20 | for sg in siggens: |
| 21 | if desired == sg.name: | 21 | if desired == sg.name: |
| 22 | return sg(d) | 22 | return sg(d) |
diff --git a/bitbake/lib/bb/ui/crumbs/configurator.py b/bitbake/lib/bb/ui/crumbs/configurator.py index 458e05626d..837ee1ca91 100644 --- a/bitbake/lib/bb/ui/crumbs/configurator.py +++ b/bitbake/lib/bb/ui/crumbs/configurator.py | |||
| @@ -58,7 +58,7 @@ class Configurator(gobject.GObject): | |||
| 58 | 58 | ||
| 59 | def _loadConf(self, path): | 59 | def _loadConf(self, path): |
| 60 | def getString(var): | 60 | def getString(var): |
| 61 | return bb.data.getVar(var, data, True) or "" | 61 | return data.getVar(var, True) or "" |
| 62 | 62 | ||
| 63 | if self.orig_config: | 63 | if self.orig_config: |
| 64 | del self.orig_config | 64 | del self.orig_config |
| @@ -125,7 +125,7 @@ class Configurator(gobject.GObject): | |||
| 125 | self.loaded_layers = {} | 125 | self.loaded_layers = {} |
| 126 | data = bb.data.init() | 126 | data = bb.data.init() |
| 127 | data = self._parse(self.bblayers, data) | 127 | data = self._parse(self.bblayers, data) |
| 128 | layers = (bb.data.getVar('BBLAYERS', data, True) or "").split() | 128 | layers = (data.getVar('BBLAYERS', True) or "").split() |
| 129 | for layer in layers: | 129 | for layer in layers: |
| 130 | # TODO: we may be better off calling the layer by its | 130 | # TODO: we may be better off calling the layer by its |
| 131 | # BBFILE_COLLECTIONS value? | 131 | # BBFILE_COLLECTIONS value? |
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 521a0683fc..a26635a19a 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
| @@ -562,7 +562,7 @@ def filter_environment(good_vars): | |||
| 562 | 562 | ||
| 563 | def create_interactive_env(d): | 563 | def create_interactive_env(d): |
| 564 | for k in preserved_envvars_exported_interactive(): | 564 | for k in preserved_envvars_exported_interactive(): |
| 565 | os.setenv(k, bb.data.getVar(k, d, True)) | 565 | os.setenv(k, d.getVar(k, True)) |
| 566 | 566 | ||
| 567 | def approved_variables(): | 567 | def approved_variables(): |
| 568 | """ | 568 | """ |
| @@ -601,9 +601,9 @@ def build_environment(d): | |||
| 601 | """ | 601 | """ |
| 602 | import bb.data | 602 | import bb.data |
| 603 | for var in bb.data.keys(d): | 603 | for var in bb.data.keys(d): |
| 604 | export = bb.data.getVarFlag(var, "export", d) | 604 | export = d.getVarFlag(var, "export") |
| 605 | if export: | 605 | if export: |
| 606 | os.environ[var] = bb.data.getVar(var, d, True) or "" | 606 | os.environ[var] = d.getVar(var, True) or "" |
| 607 | 607 | ||
| 608 | def remove(path, recurse=False): | 608 | def remove(path, recurse=False): |
| 609 | """Equivalent to rm -f or rm -rf""" | 609 | """Equivalent to rm -f or rm -rf""" |
