diff options
42 files changed, 279 insertions, 279 deletions
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers index 946def220c..1e2cfbcac1 100755 --- a/bitbake/bin/bitbake-layers +++ b/bitbake/bin/bitbake-layers | |||
| @@ -89,7 +89,7 @@ def main(): | |||
| 89 | tinfoil = tinfoil_init(False) | 89 | tinfoil = tinfoil_init(False) |
| 90 | try: | 90 | try: |
| 91 | for path in ([topdir] + | 91 | for path in ([topdir] + |
| 92 | tinfoil.config_data.getVar('BBPATH', True).split(':')): | 92 | tinfoil.config_data.getVar('BBPATH').split(':')): |
| 93 | pluginpath = os.path.join(path, 'lib', 'bblayers') | 93 | pluginpath = os.path.join(path, 'lib', 'bblayers') |
| 94 | bb.utils.load_plugins(logger, plugins, pluginpath) | 94 | bb.utils.load_plugins(logger, plugins, pluginpath) |
| 95 | 95 | ||
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker index db3c4b184f..8d043946cb 100755 --- a/bitbake/bin/bitbake-worker +++ b/bitbake/bin/bitbake-worker | |||
| @@ -228,7 +228,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append | |||
| 228 | the_data = bb_cache.loadDataFull(fn, appends) | 228 | the_data = bb_cache.loadDataFull(fn, appends) |
| 229 | the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task]) | 229 | the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task]) |
| 230 | 230 | ||
| 231 | bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", ""))) | 231 | bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", ""))) |
| 232 | 232 | ||
| 233 | # exported_vars() returns a generator which *cannot* be passed to os.environ.update() | 233 | # exported_vars() returns a generator which *cannot* be passed to os.environ.update() |
| 234 | # successfully. We also need to unset anything from the environment which shouldn't be there | 234 | # successfully. We also need to unset anything from the environment which shouldn't be there |
| @@ -247,7 +247,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append | |||
| 247 | if task_exports: | 247 | if task_exports: |
| 248 | for e in task_exports.split(): | 248 | for e in task_exports.split(): |
| 249 | the_data.setVarFlag(e, 'export', '1') | 249 | the_data.setVarFlag(e, 'export', '1') |
| 250 | v = the_data.getVar(e, True) | 250 | v = the_data.getVar(e) |
| 251 | if v is not None: | 251 | if v is not None: |
| 252 | os.environ[e] = v | 252 | os.environ[e] = v |
| 253 | 253 | ||
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml index 2a3340b399..6e1642c677 100644 --- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml | |||
| @@ -38,7 +38,7 @@ | |||
| 38 | The code to execute the first part of this process, a fetch, | 38 | The code to execute the first part of this process, a fetch, |
| 39 | looks something like the following: | 39 | looks something like the following: |
| 40 | <literallayout class='monospaced'> | 40 | <literallayout class='monospaced'> |
| 41 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 41 | src_uri = (d.getVar('SRC_URI') or "").split() |
| 42 | fetcher = bb.fetch2.Fetch(src_uri, d) | 42 | fetcher = bb.fetch2.Fetch(src_uri, d) |
| 43 | fetcher.download() | 43 | fetcher.download() |
| 44 | </literallayout> | 44 | </literallayout> |
| @@ -52,7 +52,7 @@ | |||
| 52 | <para> | 52 | <para> |
| 53 | The instantiation of the fetch class is usually followed by: | 53 | The instantiation of the fetch class is usually followed by: |
| 54 | <literallayout class='monospaced'> | 54 | <literallayout class='monospaced'> |
| 55 | rootdir = l.getVar('WORKDIR', True) | 55 | rootdir = l.getVar('WORKDIR') |
| 56 | fetcher.unpack(rootdir) | 56 | fetcher.unpack(rootdir) |
| 57 | </literallayout> | 57 | </literallayout> |
| 58 | This code unpacks the downloaded files to the | 58 | This code unpacks the downloaded files to the |
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml index 71bb25bf72..6103f34f0b 100644 --- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml | |||
| @@ -1165,7 +1165,7 @@ | |||
| 1165 | <literallayout class='monospaced'> | 1165 | <literallayout class='monospaced'> |
| 1166 | python some_python_function () { | 1166 | python some_python_function () { |
| 1167 | d.setVar("TEXT", "Hello World") | 1167 | d.setVar("TEXT", "Hello World") |
| 1168 | print d.getVar("TEXT", True) | 1168 | print d.getVar("TEXT") |
| 1169 | } | 1169 | } |
| 1170 | </literallayout> | 1170 | </literallayout> |
| 1171 | Because the Python "bb" and "os" modules are already | 1171 | Because the Python "bb" and "os" modules are already |
| @@ -1180,7 +1180,7 @@ | |||
| 1180 | to freely set variable values to expandable expressions | 1180 | to freely set variable values to expandable expressions |
| 1181 | without having them expanded prematurely. | 1181 | without having them expanded prematurely. |
| 1182 | If you do wish to expand a variable within a Python | 1182 | If you do wish to expand a variable within a Python |
| 1183 | function, use <filename>d.getVar("X", True)</filename>. | 1183 | function, use <filename>d.getVar("X")</filename>. |
| 1184 | Or, for more complicated expressions, use | 1184 | Or, for more complicated expressions, use |
| 1185 | <filename>d.expand()</filename>. | 1185 | <filename>d.expand()</filename>. |
| 1186 | </note> | 1186 | </note> |
| @@ -1232,7 +1232,7 @@ | |||
| 1232 | Here is an example: | 1232 | Here is an example: |
| 1233 | <literallayout class='monospaced'> | 1233 | <literallayout class='monospaced'> |
| 1234 | def get_depends(d): | 1234 | def get_depends(d): |
| 1235 | if d.getVar('SOMECONDITION', True): | 1235 | if d.getVar('SOMECONDITION'): |
| 1236 | return "dependencywithcond" | 1236 | return "dependencywithcond" |
| 1237 | else: | 1237 | else: |
| 1238 | return "dependency" | 1238 | return "dependency" |
| @@ -1367,7 +1367,7 @@ | |||
| 1367 | based on the value of another variable: | 1367 | based on the value of another variable: |
| 1368 | <literallayout class='monospaced'> | 1368 | <literallayout class='monospaced'> |
| 1369 | python () { | 1369 | python () { |
| 1370 | if d.getVar('SOMEVAR', True) == 'value': | 1370 | if d.getVar('SOMEVAR') == 'value': |
| 1371 | d.setVar('ANOTHERVAR', 'value2') | 1371 | d.setVar('ANOTHERVAR', 'value2') |
| 1372 | } | 1372 | } |
| 1373 | </literallayout> | 1373 | </literallayout> |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index e807a0948c..bdd83cd955 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
| @@ -91,13 +91,13 @@ class TaskBase(event.Event): | |||
| 91 | 91 | ||
| 92 | def __init__(self, t, logfile, d): | 92 | def __init__(self, t, logfile, d): |
| 93 | self._task = t | 93 | self._task = t |
| 94 | self._package = d.getVar("PF", True) | 94 | self._package = d.getVar("PF") |
| 95 | self.taskfile = d.getVar("FILE", True) | 95 | self.taskfile = d.getVar("FILE") |
| 96 | self.taskname = self._task | 96 | self.taskname = self._task |
| 97 | self.logfile = logfile | 97 | self.logfile = logfile |
| 98 | self.time = time.time() | 98 | self.time = time.time() |
| 99 | event.Event.__init__(self) | 99 | event.Event.__init__(self) |
| 100 | self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName()) | 100 | self._message = "recipe %s: task %s: %s" % (d.getVar("PF"), t, self.getDisplayName()) |
| 101 | 101 | ||
| 102 | def getTask(self): | 102 | def getTask(self): |
| 103 | return self._task | 103 | return self._task |
| @@ -226,17 +226,17 @@ def exec_func(func, d, dirs = None, pythonexception=False): | |||
| 226 | else: | 226 | else: |
| 227 | lockfiles = None | 227 | lockfiles = None |
| 228 | 228 | ||
| 229 | tempdir = d.getVar('T', True) | 229 | tempdir = d.getVar('T') |
| 230 | 230 | ||
| 231 | # or func allows items to be executed outside of the normal | 231 | # or func allows items to be executed outside of the normal |
| 232 | # task set, such as buildhistory | 232 | # task set, such as buildhistory |
| 233 | task = d.getVar('BB_RUNTASK', True) or func | 233 | task = d.getVar('BB_RUNTASK') or func |
| 234 | if task == func: | 234 | if task == func: |
| 235 | taskfunc = task | 235 | taskfunc = task |
| 236 | else: | 236 | else: |
| 237 | taskfunc = "%s.%s" % (task, func) | 237 | taskfunc = "%s.%s" % (task, func) |
| 238 | 238 | ||
| 239 | runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" | 239 | runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}" |
| 240 | runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) | 240 | runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) |
| 241 | runfile = os.path.join(tempdir, runfn) | 241 | runfile = os.path.join(tempdir, runfn) |
| 242 | bb.utils.mkdirhier(os.path.dirname(runfile)) | 242 | bb.utils.mkdirhier(os.path.dirname(runfile)) |
| @@ -368,7 +368,7 @@ exit $ret | |||
| 368 | 368 | ||
| 369 | cmd = runfile | 369 | cmd = runfile |
| 370 | if d.getVarFlag(func, 'fakeroot', False): | 370 | if d.getVarFlag(func, 'fakeroot', False): |
| 371 | fakerootcmd = d.getVar('FAKEROOT', True) | 371 | fakerootcmd = d.getVar('FAKEROOT') |
| 372 | if fakerootcmd: | 372 | if fakerootcmd: |
| 373 | cmd = [fakerootcmd, runfile] | 373 | cmd = [fakerootcmd, runfile] |
| 374 | 374 | ||
| @@ -429,7 +429,7 @@ exit $ret | |||
| 429 | else: | 429 | else: |
| 430 | break | 430 | break |
| 431 | 431 | ||
| 432 | tempdir = d.getVar('T', True) | 432 | tempdir = d.getVar('T') |
| 433 | fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid()) | 433 | fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid()) |
| 434 | if os.path.exists(fifopath): | 434 | if os.path.exists(fifopath): |
| 435 | os.unlink(fifopath) | 435 | os.unlink(fifopath) |
| @@ -442,7 +442,7 @@ exit $ret | |||
| 442 | with open(os.devnull, 'r+') as stdin: | 442 | with open(os.devnull, 'r+') as stdin: |
| 443 | bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)]) | 443 | bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)]) |
| 444 | except bb.process.CmdError: | 444 | except bb.process.CmdError: |
| 445 | logfn = d.getVar('BB_LOGFILE', True) | 445 | logfn = d.getVar('BB_LOGFILE') |
| 446 | raise FuncFailed(func, logfn) | 446 | raise FuncFailed(func, logfn) |
| 447 | finally: | 447 | finally: |
| 448 | os.unlink(fifopath) | 448 | os.unlink(fifopath) |
| @@ -473,18 +473,18 @@ def _exec_task(fn, task, d, quieterr): | |||
| 473 | logger.debug(1, "Executing task %s", task) | 473 | logger.debug(1, "Executing task %s", task) |
| 474 | 474 | ||
| 475 | localdata = _task_data(fn, task, d) | 475 | localdata = _task_data(fn, task, d) |
| 476 | tempdir = localdata.getVar('T', True) | 476 | tempdir = localdata.getVar('T') |
| 477 | if not tempdir: | 477 | if not tempdir: |
| 478 | bb.fatal("T variable not set, unable to build") | 478 | bb.fatal("T variable not set, unable to build") |
| 479 | 479 | ||
| 480 | # Change nice level if we're asked to | 480 | # Change nice level if we're asked to |
| 481 | nice = localdata.getVar("BB_TASK_NICE_LEVEL", True) | 481 | nice = localdata.getVar("BB_TASK_NICE_LEVEL") |
| 482 | if nice: | 482 | if nice: |
| 483 | curnice = os.nice(0) | 483 | curnice = os.nice(0) |
| 484 | nice = int(nice) - curnice | 484 | nice = int(nice) - curnice |
| 485 | newnice = os.nice(nice) | 485 | newnice = os.nice(nice) |
| 486 | logger.debug(1, "Renice to %s " % newnice) | 486 | logger.debug(1, "Renice to %s " % newnice) |
| 487 | ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True) | 487 | ionice = localdata.getVar("BB_TASK_IONICE_LEVEL") |
| 488 | if ionice: | 488 | if ionice: |
| 489 | try: | 489 | try: |
| 490 | cls, prio = ionice.split(".", 1) | 490 | cls, prio = ionice.split(".", 1) |
| @@ -495,7 +495,7 @@ def _exec_task(fn, task, d, quieterr): | |||
| 495 | bb.utils.mkdirhier(tempdir) | 495 | bb.utils.mkdirhier(tempdir) |
| 496 | 496 | ||
| 497 | # Determine the logfile to generate | 497 | # Determine the logfile to generate |
| 498 | logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}' | 498 | logfmt = localdata.getVar('BB_LOGFMT') or 'log.{task}.{pid}' |
| 499 | logbase = logfmt.format(task=task, pid=os.getpid()) | 499 | logbase = logfmt.format(task=task, pid=os.getpid()) |
| 500 | 500 | ||
| 501 | # Document the order of the tasks... | 501 | # Document the order of the tasks... |
| @@ -627,7 +627,7 @@ def exec_task(fn, task, d, profile = False): | |||
| 627 | quieterr = True | 627 | quieterr = True |
| 628 | 628 | ||
| 629 | if profile: | 629 | if profile: |
| 630 | profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task) | 630 | profname = "profile-%s.log" % (d.getVar("PN") + "-" + task) |
| 631 | try: | 631 | try: |
| 632 | import cProfile as profile | 632 | import cProfile as profile |
| 633 | except: | 633 | except: |
| @@ -667,8 +667,8 @@ def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): | |||
| 667 | stamp = d.stamp[file_name] | 667 | stamp = d.stamp[file_name] |
| 668 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" | 668 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" |
| 669 | else: | 669 | else: |
| 670 | stamp = d.getVar('STAMP', True) | 670 | stamp = d.getVar('STAMP') |
| 671 | file_name = d.getVar('BB_FILENAME', True) | 671 | file_name = d.getVar('BB_FILENAME') |
| 672 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" | 672 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" |
| 673 | 673 | ||
| 674 | if baseonly: | 674 | if baseonly: |
| @@ -703,8 +703,8 @@ def stamp_cleanmask_internal(taskname, d, file_name): | |||
| 703 | stamp = d.stampclean[file_name] | 703 | stamp = d.stampclean[file_name] |
| 704 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" | 704 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" |
| 705 | else: | 705 | else: |
| 706 | stamp = d.getVar('STAMPCLEAN', True) | 706 | stamp = d.getVar('STAMPCLEAN') |
| 707 | file_name = d.getVar('BB_FILENAME', True) | 707 | file_name = d.getVar('BB_FILENAME') |
| 708 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" | 708 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" |
| 709 | 709 | ||
| 710 | if not stamp: | 710 | if not stamp: |
| @@ -741,7 +741,7 @@ def make_stamp(task, d, file_name = None): | |||
| 741 | # as it completes | 741 | # as it completes |
| 742 | if not task.endswith("_setscene") and task != "do_setscene" and not file_name: | 742 | if not task.endswith("_setscene") and task != "do_setscene" and not file_name: |
| 743 | stampbase = stamp_internal(task, d, None, True) | 743 | stampbase = stamp_internal(task, d, None, True) |
| 744 | file_name = d.getVar('BB_FILENAME', True) | 744 | file_name = d.getVar('BB_FILENAME') |
| 745 | bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) | 745 | bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) |
| 746 | 746 | ||
| 747 | def del_stamp(task, d, file_name = None): | 747 | def del_stamp(task, d, file_name = None): |
| @@ -763,7 +763,7 @@ def write_taint(task, d, file_name = None): | |||
| 763 | if file_name: | 763 | if file_name: |
| 764 | taintfn = d.stamp[file_name] + '.' + task + '.taint' | 764 | taintfn = d.stamp[file_name] + '.' + task + '.taint' |
| 765 | else: | 765 | else: |
| 766 | taintfn = d.getVar('STAMP', True) + '.' + task + '.taint' | 766 | taintfn = d.getVar('STAMP') + '.' + task + '.taint' |
| 767 | bb.utils.mkdirhier(os.path.dirname(taintfn)) | 767 | bb.utils.mkdirhier(os.path.dirname(taintfn)) |
| 768 | # The specific content of the taint file is not really important, | 768 | # The specific content of the taint file is not really important, |
| 769 | # we just need it to be random, so a random UUID is used | 769 | # we just need it to be random, so a random UUID is used |
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index dd9cfdfacf..131abbb1f8 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
| @@ -296,7 +296,7 @@ def parse_recipe(bb_data, bbfile, appends, mc=''): | |||
| 296 | bb_data.setVar("__BBMULTICONFIG", mc) | 296 | bb_data.setVar("__BBMULTICONFIG", mc) |
| 297 | 297 | ||
| 298 | # expand tmpdir to include this topdir | 298 | # expand tmpdir to include this topdir |
| 299 | bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "") | 299 | bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "") |
| 300 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | 300 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) |
| 301 | oldpath = os.path.abspath(os.getcwd()) | 301 | oldpath = os.path.abspath(os.getcwd()) |
| 302 | bb.parse.cached_mtime_noerror(bbfile_loc) | 302 | bb.parse.cached_mtime_noerror(bbfile_loc) |
| @@ -378,7 +378,7 @@ class Cache(NoCache): | |||
| 378 | # It will be used later for deciding whether we | 378 | # It will be used later for deciding whether we |
| 379 | # need extra cache file dump/load support | 379 | # need extra cache file dump/load support |
| 380 | self.caches_array = caches_array | 380 | self.caches_array = caches_array |
| 381 | self.cachedir = data.getVar("CACHE", True) | 381 | self.cachedir = data.getVar("CACHE") |
| 382 | self.clean = set() | 382 | self.clean = set() |
| 383 | self.checked = set() | 383 | self.checked = set() |
| 384 | self.depends_cache = {} | 384 | self.depends_cache = {} |
| @@ -792,8 +792,8 @@ class MultiProcessCache(object): | |||
| 792 | self.cachedata_extras = self.create_cachedata() | 792 | self.cachedata_extras = self.create_cachedata() |
| 793 | 793 | ||
| 794 | def init_cache(self, d, cache_file_name=None): | 794 | def init_cache(self, d, cache_file_name=None): |
| 795 | cachedir = (d.getVar("PERSISTENT_DIR", True) or | 795 | cachedir = (d.getVar("PERSISTENT_DIR") or |
| 796 | d.getVar("CACHE", True)) | 796 | d.getVar("CACHE")) |
| 797 | if cachedir in [None, '']: | 797 | if cachedir in [None, '']: |
| 798 | return | 798 | return |
| 799 | bb.utils.mkdirhier(cachedir) | 799 | bb.utils.mkdirhier(cachedir) |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index c08af4593f..1b820d8836 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
| @@ -323,7 +323,7 @@ class BBCooker: | |||
| 323 | # Need to preserve BB_CONSOLELOG over resets | 323 | # Need to preserve BB_CONSOLELOG over resets |
| 324 | consolelog = None | 324 | consolelog = None |
| 325 | if hasattr(self, "data"): | 325 | if hasattr(self, "data"): |
| 326 | consolelog = self.data.getVar("BB_CONSOLELOG", True) | 326 | consolelog = self.data.getVar("BB_CONSOLELOG") |
| 327 | 327 | ||
| 328 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: | 328 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: |
| 329 | self.enableDataTracking() | 329 | self.enableDataTracking() |
| @@ -518,7 +518,7 @@ class BBCooker: | |||
| 518 | bb.msg.loggerVerboseLogs = True | 518 | bb.msg.loggerVerboseLogs = True |
| 519 | 519 | ||
| 520 | # Change nice level if we're asked to | 520 | # Change nice level if we're asked to |
| 521 | nice = self.data.getVar("BB_NICE_LEVEL", True) | 521 | nice = self.data.getVar("BB_NICE_LEVEL") |
| 522 | if nice: | 522 | if nice: |
| 523 | curnice = os.nice(0) | 523 | curnice = os.nice(0) |
| 524 | nice = int(nice) - curnice | 524 | nice = int(nice) - curnice |
| @@ -531,7 +531,7 @@ class BBCooker: | |||
| 531 | for mc in self.multiconfigs: | 531 | for mc in self.multiconfigs: |
| 532 | self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) | 532 | self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) |
| 533 | 533 | ||
| 534 | self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True)) | 534 | self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) |
| 535 | 535 | ||
| 536 | def updateConfigOpts(self, options, environment): | 536 | def updateConfigOpts(self, options, environment): |
| 537 | clean = True | 537 | clean = True |
| @@ -611,7 +611,7 @@ class BBCooker: | |||
| 611 | fn = self.matchFile(fn) | 611 | fn = self.matchFile(fn) |
| 612 | fn = bb.cache.realfn2virtual(fn, cls, mc) | 612 | fn = bb.cache.realfn2virtual(fn, cls, mc) |
| 613 | elif len(pkgs_to_build) == 1: | 613 | elif len(pkgs_to_build) == 1: |
| 614 | ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or "" | 614 | ignore = self.expanded_data.getVar("ASSUME_PROVIDED") or "" |
| 615 | if pkgs_to_build[0] in set(ignore.split()): | 615 | if pkgs_to_build[0] in set(ignore.split()): |
| 616 | bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) | 616 | bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) |
| 617 | 617 | ||
| @@ -995,7 +995,7 @@ class BBCooker: | |||
| 995 | bb.data.expandKeys(localdata) | 995 | bb.data.expandKeys(localdata) |
| 996 | 996 | ||
| 997 | # Handle PREFERRED_PROVIDERS | 997 | # Handle PREFERRED_PROVIDERS |
| 998 | for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): | 998 | for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split(): |
| 999 | try: | 999 | try: |
| 1000 | (providee, provider) = p.split(':') | 1000 | (providee, provider) = p.split(':') |
| 1001 | except: | 1001 | except: |
| @@ -1006,7 +1006,7 @@ class BBCooker: | |||
| 1006 | self.recipecaches[mc].preferred[providee] = provider | 1006 | self.recipecaches[mc].preferred[providee] = provider |
| 1007 | 1007 | ||
| 1008 | def findCoreBaseFiles(self, subdir, configfile): | 1008 | def findCoreBaseFiles(self, subdir, configfile): |
| 1009 | corebase = self.data.getVar('COREBASE', True) or "" | 1009 | corebase = self.data.getVar('COREBASE') or "" |
| 1010 | paths = [] | 1010 | paths = [] |
| 1011 | for root, dirs, files in os.walk(corebase + '/' + subdir): | 1011 | for root, dirs, files in os.walk(corebase + '/' + subdir): |
| 1012 | for d in dirs: | 1012 | for d in dirs: |
| @@ -1056,7 +1056,7 @@ class BBCooker: | |||
| 1056 | """ | 1056 | """ |
| 1057 | 1057 | ||
| 1058 | matches = [] | 1058 | matches = [] |
| 1059 | bbpaths = self.data.getVar('BBPATH', True).split(':') | 1059 | bbpaths = self.data.getVar('BBPATH').split(':') |
| 1060 | for path in bbpaths: | 1060 | for path in bbpaths: |
| 1061 | dirpath = os.path.join(path, directory) | 1061 | dirpath = os.path.join(path, directory) |
| 1062 | if os.path.exists(dirpath): | 1062 | if os.path.exists(dirpath): |
| @@ -1078,7 +1078,7 @@ class BBCooker: | |||
| 1078 | 1078 | ||
| 1079 | data = self.data | 1079 | data = self.data |
| 1080 | # iterate configs | 1080 | # iterate configs |
| 1081 | bbpaths = data.getVar('BBPATH', True).split(':') | 1081 | bbpaths = data.getVar('BBPATH').split(':') |
| 1082 | for path in bbpaths: | 1082 | for path in bbpaths: |
| 1083 | confpath = os.path.join(path, "conf", var) | 1083 | confpath = os.path.join(path, "conf", var) |
| 1084 | if os.path.exists(confpath): | 1084 | if os.path.exists(confpath): |
| @@ -1147,7 +1147,7 @@ class BBCooker: | |||
| 1147 | bb.debug(1,'Processing %s in collection list' % (c)) | 1147 | bb.debug(1,'Processing %s in collection list' % (c)) |
| 1148 | 1148 | ||
| 1149 | # Get collection priority if defined explicitly | 1149 | # Get collection priority if defined explicitly |
| 1150 | priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True) | 1150 | priority = self.data.getVar("BBFILE_PRIORITY_%s" % c) |
| 1151 | if priority: | 1151 | if priority: |
| 1152 | try: | 1152 | try: |
| 1153 | prio = int(priority) | 1153 | prio = int(priority) |
| @@ -1161,7 +1161,7 @@ class BBCooker: | |||
| 1161 | collection_priorities[c] = None | 1161 | collection_priorities[c] = None |
| 1162 | 1162 | ||
| 1163 | # Check dependencies and store information for priority calculation | 1163 | # Check dependencies and store information for priority calculation |
| 1164 | deps = self.data.getVar("LAYERDEPENDS_%s" % c, True) | 1164 | deps = self.data.getVar("LAYERDEPENDS_%s" % c) |
| 1165 | if deps: | 1165 | if deps: |
| 1166 | try: | 1166 | try: |
| 1167 | depDict = bb.utils.explode_dep_versions2(deps) | 1167 | depDict = bb.utils.explode_dep_versions2(deps) |
| @@ -1170,7 +1170,7 @@ class BBCooker: | |||
| 1170 | for dep, oplist in list(depDict.items()): | 1170 | for dep, oplist in list(depDict.items()): |
| 1171 | if dep in collection_list: | 1171 | if dep in collection_list: |
| 1172 | for opstr in oplist: | 1172 | for opstr in oplist: |
| 1173 | layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) | 1173 | layerver = self.data.getVar("LAYERVERSION_%s" % dep) |
| 1174 | (op, depver) = opstr.split() | 1174 | (op, depver) = opstr.split() |
| 1175 | if layerver: | 1175 | if layerver: |
| 1176 | try: | 1176 | try: |
| @@ -1191,7 +1191,7 @@ class BBCooker: | |||
| 1191 | collection_depends[c] = [] | 1191 | collection_depends[c] = [] |
| 1192 | 1192 | ||
| 1193 | # Check recommends and store information for priority calculation | 1193 | # Check recommends and store information for priority calculation |
| 1194 | recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True) | 1194 | recs = self.data.getVar("LAYERRECOMMENDS_%s" % c) |
| 1195 | if recs: | 1195 | if recs: |
| 1196 | try: | 1196 | try: |
| 1197 | recDict = bb.utils.explode_dep_versions2(recs) | 1197 | recDict = bb.utils.explode_dep_versions2(recs) |
| @@ -1201,7 +1201,7 @@ class BBCooker: | |||
| 1201 | if rec in collection_list: | 1201 | if rec in collection_list: |
| 1202 | if oplist: | 1202 | if oplist: |
| 1203 | opstr = oplist[0] | 1203 | opstr = oplist[0] |
| 1204 | layerver = self.data.getVar("LAYERVERSION_%s" % rec, True) | 1204 | layerver = self.data.getVar("LAYERVERSION_%s" % rec) |
| 1205 | if layerver: | 1205 | if layerver: |
| 1206 | (op, recver) = opstr.split() | 1206 | (op, recver) = opstr.split() |
| 1207 | try: | 1207 | try: |
| @@ -1235,7 +1235,7 @@ class BBCooker: | |||
| 1235 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities | 1235 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities |
| 1236 | for c in collection_list: | 1236 | for c in collection_list: |
| 1237 | calc_layer_priority(c) | 1237 | calc_layer_priority(c) |
| 1238 | regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True) | 1238 | regex = self.data.getVar("BBFILE_PATTERN_%s" % c) |
| 1239 | if regex == None: | 1239 | if regex == None: |
| 1240 | parselog.error("BBFILE_PATTERN_%s not defined" % c) | 1240 | parselog.error("BBFILE_PATTERN_%s not defined" % c) |
| 1241 | errors = True | 1241 | errors = True |
| @@ -1367,7 +1367,7 @@ class BBCooker: | |||
| 1367 | taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) | 1367 | taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) |
| 1368 | taskdata[mc].add_provider(self.data, self.recipecaches[mc], item) | 1368 | taskdata[mc].add_provider(self.data, self.recipecaches[mc], item) |
| 1369 | 1369 | ||
| 1370 | buildname = self.data.getVar("BUILDNAME", True) | 1370 | buildname = self.data.getVar("BUILDNAME") |
| 1371 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data) | 1371 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data) |
| 1372 | 1372 | ||
| 1373 | # Execute the runqueue | 1373 | # Execute the runqueue |
| @@ -1586,7 +1586,7 @@ class BBCooker: | |||
| 1586 | bb.event.fire(bb.event.SanityCheck(False), self.data) | 1586 | bb.event.fire(bb.event.SanityCheck(False), self.data) |
| 1587 | 1587 | ||
| 1588 | for mc in self.multiconfigs: | 1588 | for mc in self.multiconfigs: |
| 1589 | ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or "" | 1589 | ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or "" |
| 1590 | self.recipecaches[mc].ignored_dependencies = set(ignore.split()) | 1590 | self.recipecaches[mc].ignored_dependencies = set(ignore.split()) |
| 1591 | 1591 | ||
| 1592 | for dep in self.configuration.extra_assume_provided: | 1592 | for dep in self.configuration.extra_assume_provided: |
| @@ -1627,7 +1627,7 @@ class BBCooker: | |||
| 1627 | if len(pkgs_to_build) == 0: | 1627 | if len(pkgs_to_build) == 0: |
| 1628 | raise NothingToBuild | 1628 | raise NothingToBuild |
| 1629 | 1629 | ||
| 1630 | ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split() | 1630 | ignore = (self.expanded_data.getVar("ASSUME_PROVIDED") or "").split() |
| 1631 | for pkg in pkgs_to_build: | 1631 | for pkg in pkgs_to_build: |
| 1632 | if pkg in ignore: | 1632 | if pkg in ignore: |
| 1633 | parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) | 1633 | parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) |
| @@ -1797,7 +1797,7 @@ class CookerCollectFiles(object): | |||
| 1797 | 1797 | ||
| 1798 | collectlog.debug(1, "collecting .bb files") | 1798 | collectlog.debug(1, "collecting .bb files") |
| 1799 | 1799 | ||
| 1800 | files = (config.getVar( "BBFILES", True) or "").split() | 1800 | files = (config.getVar( "BBFILES") or "").split() |
| 1801 | config.setVar("BBFILES", " ".join(files)) | 1801 | config.setVar("BBFILES", " ".join(files)) |
| 1802 | 1802 | ||
| 1803 | # Sort files by priority | 1803 | # Sort files by priority |
| @@ -1827,7 +1827,7 @@ class CookerCollectFiles(object): | |||
| 1827 | if g not in newfiles: | 1827 | if g not in newfiles: |
| 1828 | newfiles.append(g) | 1828 | newfiles.append(g) |
| 1829 | 1829 | ||
| 1830 | bbmask = config.getVar('BBMASK', True) | 1830 | bbmask = config.getVar('BBMASK') |
| 1831 | 1831 | ||
| 1832 | if bbmask: | 1832 | if bbmask: |
| 1833 | # First validate the individual regular expressions and ignore any | 1833 | # First validate the individual regular expressions and ignore any |
| @@ -1923,7 +1923,7 @@ class CookerCollectFiles(object): | |||
| 1923 | 1923 | ||
| 1924 | for collection, pattern, regex, _ in self.bbfile_config_priorities: | 1924 | for collection, pattern, regex, _ in self.bbfile_config_priorities: |
| 1925 | if regex in unmatched: | 1925 | if regex in unmatched: |
| 1926 | if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1': | 1926 | if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1': |
| 1927 | collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) | 1927 | collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) |
| 1928 | 1928 | ||
| 1929 | return priorities | 1929 | return priorities |
| @@ -2080,7 +2080,7 @@ class CookerParser(object): | |||
| 2080 | self.toparse = self.total - len(self.fromcache) | 2080 | self.toparse = self.total - len(self.fromcache) |
| 2081 | self.progress_chunk = int(max(self.toparse / 100, 1)) | 2081 | self.progress_chunk = int(max(self.toparse / 100, 1)) |
| 2082 | 2082 | ||
| 2083 | self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or | 2083 | self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or |
| 2084 | multiprocessing.cpu_count()), len(self.willparse)) | 2084 | multiprocessing.cpu_count()), len(self.willparse)) |
| 2085 | 2085 | ||
| 2086 | self.start() | 2086 | self.start() |
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py index b07c266439..680f79a3ec 100644 --- a/bitbake/lib/bb/cookerdata.py +++ b/bitbake/lib/bb/cookerdata.py | |||
| @@ -212,7 +212,7 @@ def _inherit(bbclass, data): | |||
| 212 | 212 | ||
| 213 | def findConfigFile(configfile, data): | 213 | def findConfigFile(configfile, data): |
| 214 | search = [] | 214 | search = [] |
| 215 | bbpath = data.getVar("BBPATH", True) | 215 | bbpath = data.getVar("BBPATH") |
| 216 | if bbpath: | 216 | if bbpath: |
| 217 | for i in bbpath.split(":"): | 217 | for i in bbpath.split(":"): |
| 218 | search.append(os.path.join(i, "conf", configfile)) | 218 | search.append(os.path.join(i, "conf", configfile)) |
| @@ -286,7 +286,7 @@ class CookerDataBuilder(object): | |||
| 286 | self.data_hash = self.data.get_hash() | 286 | self.data_hash = self.data.get_hash() |
| 287 | self.mcdata[''] = self.data | 287 | self.mcdata[''] = self.data |
| 288 | 288 | ||
| 289 | multiconfig = (self.data.getVar("BBMULTICONFIG", True) or "").split() | 289 | multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() |
| 290 | for config in multiconfig: | 290 | for config in multiconfig: |
| 291 | mcdata = self.parseConfigurationFiles(['conf/multiconfig/%s.conf' % config] + self.prefiles, self.postfiles) | 291 | mcdata = self.parseConfigurationFiles(['conf/multiconfig/%s.conf' % config] + self.prefiles, self.postfiles) |
| 292 | bb.event.fire(bb.event.ConfigParsed(), mcdata) | 292 | bb.event.fire(bb.event.ConfigParsed(), mcdata) |
| @@ -319,7 +319,7 @@ class CookerDataBuilder(object): | |||
| 319 | data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) | 319 | data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) |
| 320 | data = parse_config_file(layerconf, data) | 320 | data = parse_config_file(layerconf, data) |
| 321 | 321 | ||
| 322 | layers = (data.getVar('BBLAYERS', True) or "").split() | 322 | layers = (data.getVar('BBLAYERS') or "").split() |
| 323 | 323 | ||
| 324 | data = bb.data.createCopy(data) | 324 | data = bb.data.createCopy(data) |
| 325 | approved = bb.utils.approved_variables() | 325 | approved = bb.utils.approved_variables() |
| @@ -342,7 +342,7 @@ class CookerDataBuilder(object): | |||
| 342 | data.delVar('LAYERDIR_RE') | 342 | data.delVar('LAYERDIR_RE') |
| 343 | data.delVar('LAYERDIR') | 343 | data.delVar('LAYERDIR') |
| 344 | 344 | ||
| 345 | if not data.getVar("BBPATH", True): | 345 | if not data.getVar("BBPATH"): |
| 346 | msg = "The BBPATH variable is not set" | 346 | msg = "The BBPATH variable is not set" |
| 347 | if not layerconf: | 347 | if not layerconf: |
| 348 | msg += (" and bitbake did not find a conf/bblayers.conf file in" | 348 | msg += (" and bitbake did not find a conf/bblayers.conf file in" |
| @@ -357,7 +357,7 @@ class CookerDataBuilder(object): | |||
| 357 | data = parse_config_file(p, data) | 357 | data = parse_config_file(p, data) |
| 358 | 358 | ||
| 359 | # Handle any INHERITs and inherit the base class | 359 | # Handle any INHERITs and inherit the base class |
| 360 | bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() | 360 | bbclasses = ["base"] + (data.getVar('INHERIT') or "").split() |
| 361 | for bbclass in bbclasses: | 361 | for bbclass in bbclasses: |
| 362 | data = _inherit(bbclass, data) | 362 | data = _inherit(bbclass, data) |
| 363 | 363 | ||
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 1a2946f943..02a1140d0f 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
| @@ -121,7 +121,7 @@ def inheritFromOS(d, savedenv, permitted): | |||
| 121 | for s in savedenv.keys(): | 121 | for s in savedenv.keys(): |
| 122 | if s in permitted: | 122 | if s in permitted: |
| 123 | try: | 123 | try: |
| 124 | d.setVar(s, savedenv.getVar(s, True), op = 'from env') | 124 | d.setVar(s, savedenv.getVar(s), op = 'from env') |
| 125 | if s in exportlist: | 125 | if s in exportlist: |
| 126 | d.setVarFlag(s, "export", True, op = 'auto env export') | 126 | d.setVarFlag(s, "export", True, op = 'auto env export') |
| 127 | except TypeError: | 127 | except TypeError: |
| @@ -141,7 +141,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): | |||
| 141 | try: | 141 | try: |
| 142 | if all: | 142 | if all: |
| 143 | oval = d.getVar(var, False) | 143 | oval = d.getVar(var, False) |
| 144 | val = d.getVar(var, True) | 144 | val = d.getVar(var) |
| 145 | except (KeyboardInterrupt, bb.build.FuncFailed): | 145 | except (KeyboardInterrupt, bb.build.FuncFailed): |
| 146 | raise | 146 | raise |
| 147 | except Exception as exc: | 147 | except Exception as exc: |
| @@ -208,9 +208,9 @@ def exported_vars(d): | |||
| 208 | k = list(exported_keys(d)) | 208 | k = list(exported_keys(d)) |
| 209 | for key in k: | 209 | for key in k: |
| 210 | try: | 210 | try: |
| 211 | value = d.getVar(key, True) | 211 | value = d.getVar(key) |
| 212 | except Exception as err: | 212 | except Exception as err: |
| 213 | bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE", True), key, err)) | 213 | bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE"), key, err)) |
| 214 | continue | 214 | continue |
| 215 | 215 | ||
| 216 | if value is not None: | 216 | if value is not None: |
| @@ -225,7 +225,7 @@ def emit_func(func, o=sys.__stdout__, d = init()): | |||
| 225 | 225 | ||
| 226 | o.write('\n') | 226 | o.write('\n') |
| 227 | emit_var(func, o, d, False) and o.write('\n') | 227 | emit_var(func, o, d, False) and o.write('\n') |
| 228 | newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True)) | 228 | newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func)) |
| 229 | newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) | 229 | newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) |
| 230 | seen = set() | 230 | seen = set() |
| 231 | while newdeps: | 231 | while newdeps: |
| @@ -235,7 +235,7 @@ def emit_func(func, o=sys.__stdout__, d = init()): | |||
| 235 | for dep in deps: | 235 | for dep in deps: |
| 236 | if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): | 236 | if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): |
| 237 | emit_var(dep, o, d, False) and o.write('\n') | 237 | emit_var(dep, o, d, False) and o.write('\n') |
| 238 | newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True)) | 238 | newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) |
| 239 | newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) | 239 | newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) |
| 240 | newdeps -= seen | 240 | newdeps -= seen |
| 241 | 241 | ||
| @@ -295,7 +295,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): | |||
| 295 | def handle_contains(value, contains, d): | 295 | def handle_contains(value, contains, d): |
| 296 | newvalue = "" | 296 | newvalue = "" |
| 297 | for k in sorted(contains): | 297 | for k in sorted(contains): |
| 298 | l = (d.getVar(k, True) or "").split() | 298 | l = (d.getVar(k) or "").split() |
| 299 | for word in sorted(contains[k]): | 299 | for word in sorted(contains[k]): |
| 300 | if word in l: | 300 | if word in l: |
| 301 | newvalue += "\n%s{%s} = Set" % (k, word) | 301 | newvalue += "\n%s{%s} = Set" % (k, word) |
| @@ -313,7 +313,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): | |||
| 313 | if varflags.get("python"): | 313 | if varflags.get("python"): |
| 314 | parser = bb.codeparser.PythonParser(key, logger) | 314 | parser = bb.codeparser.PythonParser(key, logger) |
| 315 | if value and "\t" in value: | 315 | if value and "\t" in value: |
| 316 | logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True))) | 316 | logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE"))) |
| 317 | parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) | 317 | parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) |
| 318 | deps = deps | parser.references | 318 | deps = deps | parser.references |
| 319 | deps = deps | (keys & parser.execs) | 319 | deps = deps | (keys & parser.execs) |
| @@ -368,7 +368,7 @@ def generate_dependencies(d): | |||
| 368 | 368 | ||
| 369 | keys = set(key for key in d if not key.startswith("__")) | 369 | keys = set(key for key in d if not key.startswith("__")) |
| 370 | shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) | 370 | shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) |
| 371 | varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True) | 371 | varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') |
| 372 | 372 | ||
| 373 | deps = {} | 373 | deps = {} |
| 374 | values = {} | 374 | values = {} |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index b7badb6a37..482399a95d 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
| @@ -146,7 +146,7 @@ class DataContext(dict): | |||
| 146 | self['d'] = metadata | 146 | self['d'] = metadata |
| 147 | 147 | ||
| 148 | def __missing__(self, key): | 148 | def __missing__(self, key): |
| 149 | value = self.metadata.getVar(key, True) | 149 | value = self.metadata.getVar(key) |
| 150 | if value is None or self.metadata.getVarFlag(key, 'func', False): | 150 | if value is None or self.metadata.getVarFlag(key, 'func', False): |
| 151 | raise KeyError(key) | 151 | raise KeyError(key) |
| 152 | else: | 152 | else: |
| @@ -318,7 +318,7 @@ class VariableHistory(object): | |||
| 318 | the files in which they were added. | 318 | the files in which they were added. |
| 319 | """ | 319 | """ |
| 320 | history = self.variable(var) | 320 | history = self.variable(var) |
| 321 | finalitems = (d.getVar(var, True) or '').split() | 321 | finalitems = (d.getVar(var) or '').split() |
| 322 | filemap = {} | 322 | filemap = {} |
| 323 | isset = False | 323 | isset = False |
| 324 | for event in history: | 324 | for event in history: |
| @@ -426,11 +426,11 @@ class DataSmart(MutableMapping): | |||
| 426 | # Can end up here recursively so setup dummy values | 426 | # Can end up here recursively so setup dummy values |
| 427 | self.overrides = [] | 427 | self.overrides = [] |
| 428 | self.overridesset = set() | 428 | self.overridesset = set() |
| 429 | self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] | 429 | self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] |
| 430 | self.overridesset = set(self.overrides) | 430 | self.overridesset = set(self.overrides) |
| 431 | self.inoverride = False | 431 | self.inoverride = False |
| 432 | self.expand_cache = {} | 432 | self.expand_cache = {} |
| 433 | newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] | 433 | newoverrides = (self.getVar("OVERRIDES") or "").split(":") or [] |
| 434 | if newoverrides == self.overrides: | 434 | if newoverrides == self.overrides: |
| 435 | break | 435 | break |
| 436 | self.overrides = newoverrides | 436 | self.overrides = newoverrides |
| @@ -541,7 +541,7 @@ class DataSmart(MutableMapping): | |||
| 541 | nextnew = set() | 541 | nextnew = set() |
| 542 | self.overridevars.update(new) | 542 | self.overridevars.update(new) |
| 543 | for i in new: | 543 | for i in new: |
| 544 | vardata = self.expandWithRefs(self.getVar(i, True), i) | 544 | vardata = self.expandWithRefs(self.getVar(i), i) |
| 545 | nextnew.update(vardata.references) | 545 | nextnew.update(vardata.references) |
| 546 | nextnew.update(vardata.contains.keys()) | 546 | nextnew.update(vardata.contains.keys()) |
| 547 | new = nextnew | 547 | new = nextnew |
| @@ -937,7 +937,7 @@ class DataSmart(MutableMapping): | |||
| 937 | bb.data.expandKeys(d) | 937 | bb.data.expandKeys(d) |
| 938 | bb.data.update_data(d) | 938 | bb.data.update_data(d) |
| 939 | 939 | ||
| 940 | config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split()) | 940 | config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split()) |
| 941 | keys = set(key for key in iter(d) if not key.startswith("__")) | 941 | keys = set(key for key in iter(d) if not key.startswith("__")) |
| 942 | for key in keys: | 942 | for key in keys: |
| 943 | if key in config_whitelist: | 943 | if key in config_whitelist: |
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 5c76b22529..ced43630ea 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -491,7 +491,7 @@ def fetcher_init(d): | |||
| 491 | Calls before this must not hit the cache. | 491 | Calls before this must not hit the cache. |
| 492 | """ | 492 | """ |
| 493 | # When to drop SCM head revisions controlled by user policy | 493 | # When to drop SCM head revisions controlled by user policy |
| 494 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" | 494 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" |
| 495 | if srcrev_policy == "cache": | 495 | if srcrev_policy == "cache": |
| 496 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 496 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 497 | elif srcrev_policy == "clear": | 497 | elif srcrev_policy == "clear": |
| @@ -572,7 +572,7 @@ def verify_checksum(ud, d, precomputed={}): | |||
| 572 | 572 | ||
| 573 | if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: | 573 | if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: |
| 574 | # If strict checking enabled and neither sum defined, raise error | 574 | # If strict checking enabled and neither sum defined, raise error |
| 575 | strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" | 575 | strict = d.getVar("BB_STRICT_CHECKSUM") or "0" |
| 576 | if strict == "1": | 576 | if strict == "1": |
| 577 | logger.error('No checksum specified for %s, please add at least one to the recipe:\n' | 577 | logger.error('No checksum specified for %s, please add at least one to the recipe:\n' |
| 578 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % | 578 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % |
| @@ -718,7 +718,7 @@ def subprocess_setup(): | |||
| 718 | 718 | ||
| 719 | def get_autorev(d): | 719 | def get_autorev(d): |
| 720 | # only not cache src rev in autorev case | 720 | # only not cache src rev in autorev case |
| 721 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": | 721 | if d.getVar('BB_SRCREV_POLICY') != "cache": |
| 722 | d.setVar('BB_DONT_CACHE', '1') | 722 | d.setVar('BB_DONT_CACHE', '1') |
| 723 | return "AUTOINC" | 723 | return "AUTOINC" |
| 724 | 724 | ||
| @@ -737,7 +737,7 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
| 737 | """ | 737 | """ |
| 738 | 738 | ||
| 739 | scms = [] | 739 | scms = [] |
| 740 | fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) | 740 | fetcher = Fetch(d.getVar('SRC_URI').split(), d) |
| 741 | urldata = fetcher.ud | 741 | urldata = fetcher.ud |
| 742 | for u in urldata: | 742 | for u in urldata: |
| 743 | if urldata[u].method.supports_srcrev(): | 743 | if urldata[u].method.supports_srcrev(): |
| @@ -757,7 +757,7 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
| 757 | # | 757 | # |
| 758 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 758 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| 759 | # | 759 | # |
| 760 | format = d.getVar('SRCREV_FORMAT', True) | 760 | format = d.getVar('SRCREV_FORMAT') |
| 761 | if not format: | 761 | if not format: |
| 762 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | 762 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
| 763 | 763 | ||
| @@ -821,7 +821,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
| 821 | 821 | ||
| 822 | origenv = d.getVar("BB_ORIGENV", False) | 822 | origenv = d.getVar("BB_ORIGENV", False) |
| 823 | for var in exportvars: | 823 | for var in exportvars: |
| 824 | val = d.getVar(var, True) or (origenv and origenv.getVar(var, True)) | 824 | val = d.getVar(var) or (origenv and origenv.getVar(var)) |
| 825 | if val: | 825 | if val: |
| 826 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | 826 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) |
| 827 | 827 | ||
| @@ -860,7 +860,7 @@ def check_network_access(d, info = "", url = None): | |||
| 860 | """ | 860 | """ |
| 861 | log remote network access, and error if BB_NO_NETWORK is set | 861 | log remote network access, and error if BB_NO_NETWORK is set |
| 862 | """ | 862 | """ |
| 863 | if d.getVar("BB_NO_NETWORK", True) == "1": | 863 | if d.getVar("BB_NO_NETWORK") == "1": |
| 864 | raise NetworkAccess(url, info) | 864 | raise NetworkAccess(url, info) |
| 865 | else: | 865 | else: |
| 866 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) | 866 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) |
| @@ -958,7 +958,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
| 958 | 958 | ||
| 959 | # We may be obtaining a mirror tarball which needs further processing by the real fetcher | 959 | # We may be obtaining a mirror tarball which needs further processing by the real fetcher |
| 960 | # If that tarball is a local file:// we need to provide a symlink to it | 960 | # If that tarball is a local file:// we need to provide a symlink to it |
| 961 | dldir = ld.getVar("DL_DIR", True) | 961 | dldir = ld.getVar("DL_DIR") |
| 962 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ | 962 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ |
| 963 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): | 963 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
| 964 | # Create donestamp in old format to avoid triggering a re-download | 964 | # Create donestamp in old format to avoid triggering a re-download |
| @@ -1032,14 +1032,14 @@ def trusted_network(d, url): | |||
| 1032 | BB_ALLOWED_NETWORKS is set globally or for a specific recipe. | 1032 | BB_ALLOWED_NETWORKS is set globally or for a specific recipe. |
| 1033 | Note: modifies SRC_URI & mirrors. | 1033 | Note: modifies SRC_URI & mirrors. |
| 1034 | """ | 1034 | """ |
| 1035 | if d.getVar('BB_NO_NETWORK', True) == "1": | 1035 | if d.getVar('BB_NO_NETWORK') == "1": |
| 1036 | return True | 1036 | return True |
| 1037 | 1037 | ||
| 1038 | pkgname = d.expand(d.getVar('PN', False)) | 1038 | pkgname = d.expand(d.getVar('PN', False)) |
| 1039 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) | 1039 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) |
| 1040 | 1040 | ||
| 1041 | if not trusted_hosts: | 1041 | if not trusted_hosts: |
| 1042 | trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True) | 1042 | trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS') |
| 1043 | 1043 | ||
| 1044 | # Not enabled. | 1044 | # Not enabled. |
| 1045 | if not trusted_hosts: | 1045 | if not trusted_hosts: |
| @@ -1071,7 +1071,7 @@ def srcrev_internal_helper(ud, d, name): | |||
| 1071 | """ | 1071 | """ |
| 1072 | 1072 | ||
| 1073 | srcrev = None | 1073 | srcrev = None |
| 1074 | pn = d.getVar("PN", True) | 1074 | pn = d.getVar("PN") |
| 1075 | attempts = [] | 1075 | attempts = [] |
| 1076 | if name != '' and pn: | 1076 | if name != '' and pn: |
| 1077 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) | 1077 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) |
| @@ -1082,7 +1082,7 @@ def srcrev_internal_helper(ud, d, name): | |||
| 1082 | attempts.append("SRCREV") | 1082 | attempts.append("SRCREV") |
| 1083 | 1083 | ||
| 1084 | for a in attempts: | 1084 | for a in attempts: |
| 1085 | srcrev = d.getVar(a, True) | 1085 | srcrev = d.getVar(a) |
| 1086 | if srcrev and srcrev != "INVALID": | 1086 | if srcrev and srcrev != "INVALID": |
| 1087 | break | 1087 | break |
| 1088 | 1088 | ||
| @@ -1115,7 +1115,7 @@ def get_checksum_file_list(d): | |||
| 1115 | """ | 1115 | """ |
| 1116 | fetch = Fetch([], d, cache = False, localonly = True) | 1116 | fetch = Fetch([], d, cache = False, localonly = True) |
| 1117 | 1117 | ||
| 1118 | dl_dir = d.getVar('DL_DIR', True) | 1118 | dl_dir = d.getVar('DL_DIR') |
| 1119 | filelist = [] | 1119 | filelist = [] |
| 1120 | for u in fetch.urls: | 1120 | for u in fetch.urls: |
| 1121 | ud = fetch.ud[u] | 1121 | ud = fetch.ud[u] |
| @@ -1129,9 +1129,9 @@ def get_checksum_file_list(d): | |||
| 1129 | if f.startswith(dl_dir): | 1129 | if f.startswith(dl_dir): |
| 1130 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else | 1130 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else |
| 1131 | if os.path.exists(f): | 1131 | if os.path.exists(f): |
| 1132 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) | 1132 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f))) |
| 1133 | else: | 1133 | else: |
| 1134 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) | 1134 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f))) |
| 1135 | filelist.append(f + ":" + str(os.path.exists(f))) | 1135 | filelist.append(f + ":" + str(os.path.exists(f))) |
| 1136 | 1136 | ||
| 1137 | return " ".join(filelist) | 1137 | return " ".join(filelist) |
| @@ -1204,7 +1204,7 @@ class FetchData(object): | |||
| 1204 | raise NonLocalMethod() | 1204 | raise NonLocalMethod() |
| 1205 | 1205 | ||
| 1206 | if self.parm.get("proto", None) and "protocol" not in self.parm: | 1206 | if self.parm.get("proto", None) and "protocol" not in self.parm: |
| 1207 | logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) | 1207 | logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN')) |
| 1208 | self.parm["protocol"] = self.parm.get("proto", None) | 1208 | self.parm["protocol"] = self.parm.get("proto", None) |
| 1209 | 1209 | ||
| 1210 | if hasattr(self.method, "urldata_init"): | 1210 | if hasattr(self.method, "urldata_init"): |
| @@ -1217,7 +1217,7 @@ class FetchData(object): | |||
| 1217 | elif self.localfile: | 1217 | elif self.localfile: |
| 1218 | self.localpath = self.method.localpath(self, d) | 1218 | self.localpath = self.method.localpath(self, d) |
| 1219 | 1219 | ||
| 1220 | dldir = d.getVar("DL_DIR", True) | 1220 | dldir = d.getVar("DL_DIR") |
| 1221 | 1221 | ||
| 1222 | if not self.needdonestamp: | 1222 | if not self.needdonestamp: |
| 1223 | return | 1223 | return |
| @@ -1257,12 +1257,12 @@ class FetchData(object): | |||
| 1257 | if "srcdate" in self.parm: | 1257 | if "srcdate" in self.parm: |
| 1258 | return self.parm['srcdate'] | 1258 | return self.parm['srcdate'] |
| 1259 | 1259 | ||
| 1260 | pn = d.getVar("PN", True) | 1260 | pn = d.getVar("PN") |
| 1261 | 1261 | ||
| 1262 | if pn: | 1262 | if pn: |
| 1263 | return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) | 1263 | return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE") |
| 1264 | 1264 | ||
| 1265 | return d.getVar("SRCDATE", True) or d.getVar("DATE", True) | 1265 | return d.getVar("SRCDATE") or d.getVar("DATE") |
| 1266 | 1266 | ||
| 1267 | class FetchMethod(object): | 1267 | class FetchMethod(object): |
| 1268 | """Base class for 'fetch'ing data""" | 1268 | """Base class for 'fetch'ing data""" |
| @@ -1282,7 +1282,7 @@ class FetchMethod(object): | |||
| 1282 | Can also setup variables in urldata for use in go (saving code duplication | 1282 | Can also setup variables in urldata for use in go (saving code duplication |
| 1283 | and duplicate code execution) | 1283 | and duplicate code execution) |
| 1284 | """ | 1284 | """ |
| 1285 | return os.path.join(d.getVar("DL_DIR", True), urldata.localfile) | 1285 | return os.path.join(d.getVar("DL_DIR"), urldata.localfile) |
| 1286 | 1286 | ||
| 1287 | def supports_checksum(self, urldata): | 1287 | def supports_checksum(self, urldata): |
| 1288 | """ | 1288 | """ |
| @@ -1450,7 +1450,7 @@ class FetchMethod(object): | |||
| 1450 | if not cmd: | 1450 | if not cmd: |
| 1451 | return | 1451 | return |
| 1452 | 1452 | ||
| 1453 | path = data.getVar('PATH', True) | 1453 | path = data.getVar('PATH') |
| 1454 | if path: | 1454 | if path: |
| 1455 | cmd = "PATH=\"%s\" %s" % (path, cmd) | 1455 | cmd = "PATH=\"%s\" %s" % (path, cmd) |
| 1456 | bb.note("Unpacking %s to %s/" % (file, unpackdir)) | 1456 | bb.note("Unpacking %s to %s/" % (file, unpackdir)) |
| @@ -1507,7 +1507,7 @@ class FetchMethod(object): | |||
| 1507 | 1507 | ||
| 1508 | def generate_revision_key(self, ud, d, name): | 1508 | def generate_revision_key(self, ud, d, name): |
| 1509 | key = self._revision_key(ud, d, name) | 1509 | key = self._revision_key(ud, d, name) |
| 1510 | return "%s-%s" % (key, d.getVar("PN", True) or "") | 1510 | return "%s-%s" % (key, d.getVar("PN") or "") |
| 1511 | 1511 | ||
| 1512 | class Fetch(object): | 1512 | class Fetch(object): |
| 1513 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): | 1513 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): |
| @@ -1515,14 +1515,14 @@ class Fetch(object): | |||
| 1515 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") | 1515 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") |
| 1516 | 1516 | ||
| 1517 | if len(urls) == 0: | 1517 | if len(urls) == 0: |
| 1518 | urls = d.getVar("SRC_URI", True).split() | 1518 | urls = d.getVar("SRC_URI").split() |
| 1519 | self.urls = urls | 1519 | self.urls = urls |
| 1520 | self.d = d | 1520 | self.d = d |
| 1521 | self.ud = {} | 1521 | self.ud = {} |
| 1522 | self.connection_cache = connection_cache | 1522 | self.connection_cache = connection_cache |
| 1523 | 1523 | ||
| 1524 | fn = d.getVar('FILE', True) | 1524 | fn = d.getVar('FILE') |
| 1525 | mc = d.getVar('__BBMULTICONFIG', True) or "" | 1525 | mc = d.getVar('__BBMULTICONFIG') or "" |
| 1526 | if cache and fn and mc + fn in urldata_cache: | 1526 | if cache and fn and mc + fn in urldata_cache: |
| 1527 | self.ud = urldata_cache[mc + fn] | 1527 | self.ud = urldata_cache[mc + fn] |
| 1528 | 1528 | ||
| @@ -1565,8 +1565,8 @@ class Fetch(object): | |||
| 1565 | if not urls: | 1565 | if not urls: |
| 1566 | urls = self.urls | 1566 | urls = self.urls |
| 1567 | 1567 | ||
| 1568 | network = self.d.getVar("BB_NO_NETWORK", True) | 1568 | network = self.d.getVar("BB_NO_NETWORK") |
| 1569 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") | 1569 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1") |
| 1570 | 1570 | ||
| 1571 | for u in urls: | 1571 | for u in urls: |
| 1572 | ud = self.ud[u] | 1572 | ud = self.ud[u] |
| @@ -1584,7 +1584,7 @@ class Fetch(object): | |||
| 1584 | localpath = ud.localpath | 1584 | localpath = ud.localpath |
| 1585 | elif m.try_premirror(ud, self.d): | 1585 | elif m.try_premirror(ud, self.d): |
| 1586 | logger.debug(1, "Trying PREMIRRORS") | 1586 | logger.debug(1, "Trying PREMIRRORS") |
| 1587 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) | 1587 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
| 1588 | localpath = try_mirrors(self, self.d, ud, mirrors, False) | 1588 | localpath = try_mirrors(self, self.d, ud, mirrors, False) |
| 1589 | 1589 | ||
| 1590 | if premirroronly: | 1590 | if premirroronly: |
| @@ -1624,7 +1624,7 @@ class Fetch(object): | |||
| 1624 | if not verified_stamp: | 1624 | if not verified_stamp: |
| 1625 | m.clean(ud, self.d) | 1625 | m.clean(ud, self.d) |
| 1626 | logger.debug(1, "Trying MIRRORS") | 1626 | logger.debug(1, "Trying MIRRORS") |
| 1627 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) | 1627 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
| 1628 | localpath = try_mirrors(self, self.d, ud, mirrors) | 1628 | localpath = try_mirrors(self, self.d, ud, mirrors) |
| 1629 | 1629 | ||
| 1630 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): | 1630 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): |
| @@ -1657,7 +1657,7 @@ class Fetch(object): | |||
| 1657 | m = ud.method | 1657 | m = ud.method |
| 1658 | logger.debug(1, "Testing URL %s", u) | 1658 | logger.debug(1, "Testing URL %s", u) |
| 1659 | # First try checking uri, u, from PREMIRRORS | 1659 | # First try checking uri, u, from PREMIRRORS |
| 1660 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) | 1660 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
| 1661 | ret = try_mirrors(self, self.d, ud, mirrors, True) | 1661 | ret = try_mirrors(self, self.d, ud, mirrors, True) |
| 1662 | if not ret: | 1662 | if not ret: |
| 1663 | # Next try checking from the original uri, u | 1663 | # Next try checking from the original uri, u |
| @@ -1665,7 +1665,7 @@ class Fetch(object): | |||
| 1665 | ret = m.checkstatus(self, ud, self.d) | 1665 | ret = m.checkstatus(self, ud, self.d) |
| 1666 | except: | 1666 | except: |
| 1667 | # Finally, try checking uri, u, from MIRRORS | 1667 | # Finally, try checking uri, u, from MIRRORS |
| 1668 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) | 1668 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
| 1669 | ret = try_mirrors(self, self.d, ud, mirrors, True) | 1669 | ret = try_mirrors(self, self.d, ud, mirrors, True) |
| 1670 | 1670 | ||
| 1671 | if not ret: | 1671 | if not ret: |
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py index d35b2dabeb..8df8f53d2f 100644 --- a/bitbake/lib/bb/fetch2/clearcase.py +++ b/bitbake/lib/bb/fetch2/clearcase.py | |||
| @@ -108,13 +108,13 @@ class ClearCase(FetchMethod): | |||
| 108 | else: | 108 | else: |
| 109 | ud.module = "" | 109 | ud.module = "" |
| 110 | 110 | ||
| 111 | ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") | 111 | ud.basecmd = d.getVar("FETCHCMD_ccrc") or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") |
| 112 | 112 | ||
| 113 | if d.getVar("SRCREV", True) == "INVALID": | 113 | if d.getVar("SRCREV") == "INVALID": |
| 114 | raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") | 114 | raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") |
| 115 | 115 | ||
| 116 | ud.label = d.getVar("SRCREV", False) | 116 | ud.label = d.getVar("SRCREV", False) |
| 117 | ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True) | 117 | ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC") |
| 118 | 118 | ||
| 119 | ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) | 119 | ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) |
| 120 | 120 | ||
| @@ -124,7 +124,7 @@ class ClearCase(FetchMethod): | |||
| 124 | 124 | ||
| 125 | ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) | 125 | ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) |
| 126 | ud.csname = "%s-config-spec" % (ud.identifier) | 126 | ud.csname = "%s-config-spec" % (ud.identifier) |
| 127 | ud.ccasedir = os.path.join(d.getVar("DL_DIR", True), ud.type) | 127 | ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) |
| 128 | ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) | 128 | ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) |
| 129 | ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) | 129 | ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) |
| 130 | ud.localfile = "%s.tar.gz" % (ud.identifier) | 130 | ud.localfile = "%s.tar.gz" % (ud.identifier) |
| @@ -144,7 +144,7 @@ class ClearCase(FetchMethod): | |||
| 144 | self.debug("configspecfile = %s" % ud.configspecfile) | 144 | self.debug("configspecfile = %s" % ud.configspecfile) |
| 145 | self.debug("localfile = %s" % ud.localfile) | 145 | self.debug("localfile = %s" % ud.localfile) |
| 146 | 146 | ||
| 147 | ud.localfile = os.path.join(d.getVar("DL_DIR", True), ud.localfile) | 147 | ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile) |
| 148 | 148 | ||
| 149 | def _build_ccase_command(self, ud, command): | 149 | def _build_ccase_command(self, ud, command): |
| 150 | """ | 150 | """ |
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py index 5ff70ba921..64c50c2165 100644 --- a/bitbake/lib/bb/fetch2/cvs.py +++ b/bitbake/lib/bb/fetch2/cvs.py | |||
| @@ -87,10 +87,10 @@ class Cvs(FetchMethod): | |||
| 87 | cvsroot = ud.path | 87 | cvsroot = ud.path |
| 88 | else: | 88 | else: |
| 89 | cvsroot = ":" + method | 89 | cvsroot = ":" + method |
| 90 | cvsproxyhost = d.getVar('CVS_PROXY_HOST', True) | 90 | cvsproxyhost = d.getVar('CVS_PROXY_HOST') |
| 91 | if cvsproxyhost: | 91 | if cvsproxyhost: |
| 92 | cvsroot += ";proxy=" + cvsproxyhost | 92 | cvsroot += ";proxy=" + cvsproxyhost |
| 93 | cvsproxyport = d.getVar('CVS_PROXY_PORT', True) | 93 | cvsproxyport = d.getVar('CVS_PROXY_PORT') |
| 94 | if cvsproxyport: | 94 | if cvsproxyport: |
| 95 | cvsroot += ";proxyport=" + cvsproxyport | 95 | cvsroot += ";proxyport=" + cvsproxyport |
| 96 | cvsroot += ":" + ud.user | 96 | cvsroot += ":" + ud.user |
| @@ -110,7 +110,7 @@ class Cvs(FetchMethod): | |||
| 110 | if ud.tag: | 110 | if ud.tag: |
| 111 | options.append("-r %s" % ud.tag) | 111 | options.append("-r %s" % ud.tag) |
| 112 | 112 | ||
| 113 | cvsbasecmd = d.getVar("FETCHCMD_cvs", True) | 113 | cvsbasecmd = d.getVar("FETCHCMD_cvs") |
| 114 | cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module | 114 | cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module |
| 115 | cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) | 115 | cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) |
| 116 | 116 | ||
| @@ -120,8 +120,8 @@ class Cvs(FetchMethod): | |||
| 120 | 120 | ||
| 121 | # create module directory | 121 | # create module directory |
| 122 | logger.debug(2, "Fetch: checking for module directory") | 122 | logger.debug(2, "Fetch: checking for module directory") |
| 123 | pkg = d.getVar('PN', True) | 123 | pkg = d.getVar('PN') |
| 124 | pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) | 124 | pkgdir = os.path.join(d.getVar('CVSDIR'), pkg) |
| 125 | moddir = os.path.join(pkgdir, localdir) | 125 | moddir = os.path.join(pkgdir, localdir) |
| 126 | workdir = None | 126 | workdir = None |
| 127 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): | 127 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): |
| @@ -164,8 +164,8 @@ class Cvs(FetchMethod): | |||
| 164 | def clean(self, ud, d): | 164 | def clean(self, ud, d): |
| 165 | """ Clean CVS Files and tarballs """ | 165 | """ Clean CVS Files and tarballs """ |
| 166 | 166 | ||
| 167 | pkg = d.getVar('PN', True) | 167 | pkg = d.getVar('PN') |
| 168 | pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg) | 168 | pkgdir = os.path.join(d.getVar("CVSDIR"), pkg) |
| 169 | 169 | ||
| 170 | bb.utils.remove(pkgdir, True) | 170 | bb.utils.remove(pkgdir, True) |
| 171 | bb.utils.remove(ud.localpath) | 171 | bb.utils.remove(ud.localpath) |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index 6b618345c3..cb9fa3fb1a 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
| @@ -182,9 +182,9 @@ class Git(FetchMethod): | |||
| 182 | if ud.usehead: | 182 | if ud.usehead: |
| 183 | ud.unresolvedrev['default'] = 'HEAD' | 183 | ud.unresolvedrev['default'] = 'HEAD' |
| 184 | 184 | ||
| 185 | ud.basecmd = d.getVar("FETCHCMD_git", True) or "git -c core.fsyncobjectfiles=0" | 185 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" |
| 186 | 186 | ||
| 187 | ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) or "0") != "0") or ud.rebaseable | 187 | ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") or ud.rebaseable |
| 188 | 188 | ||
| 189 | ud.setup_revisons(d) | 189 | ud.setup_revisons(d) |
| 190 | 190 | ||
| @@ -207,8 +207,8 @@ class Git(FetchMethod): | |||
| 207 | for name in ud.names: | 207 | for name in ud.names: |
| 208 | gitsrcname = gitsrcname + '_' + ud.revisions[name] | 208 | gitsrcname = gitsrcname + '_' + ud.revisions[name] |
| 209 | ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) | 209 | ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) |
| 210 | ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) | 210 | ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball) |
| 211 | gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/") | 211 | gitdir = d.getVar("GITDIR") or (d.getVar("DL_DIR") + "/git2/") |
| 212 | ud.clonedir = os.path.join(gitdir, gitsrcname) | 212 | ud.clonedir = os.path.join(gitdir, gitsrcname) |
| 213 | 213 | ||
| 214 | ud.localfile = ud.clonedir | 214 | ud.localfile = ud.clonedir |
| @@ -229,7 +229,7 @@ class Git(FetchMethod): | |||
| 229 | def try_premirror(self, ud, d): | 229 | def try_premirror(self, ud, d): |
| 230 | # If we don't do this, updating an existing checkout with only premirrors | 230 | # If we don't do this, updating an existing checkout with only premirrors |
| 231 | # is not possible | 231 | # is not possible |
| 232 | if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: | 232 | if d.getVar("BB_FETCH_PREMIRRORONLY") is not None: |
| 233 | return True | 233 | return True |
| 234 | if os.path.exists(ud.clonedir): | 234 | if os.path.exists(ud.clonedir): |
| 235 | return False | 235 | return False |
| @@ -418,7 +418,7 @@ class Git(FetchMethod): | |||
| 418 | """ | 418 | """ |
| 419 | pupver = ('', '') | 419 | pupver = ('', '') |
| 420 | 420 | ||
| 421 | tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)") | 421 | tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)") |
| 422 | try: | 422 | try: |
| 423 | output = self._lsremote(ud, d, "refs/tags/*") | 423 | output = self._lsremote(ud, d, "refs/tags/*") |
| 424 | except bb.fetch2.FetchError or bb.fetch2.NetworkAccess: | 424 | except bb.fetch2.FetchError or bb.fetch2.NetworkAccess: |
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py index a1419aade5..ee5b2dd6f3 100644 --- a/bitbake/lib/bb/fetch2/hg.py +++ b/bitbake/lib/bb/fetch2/hg.py | |||
| @@ -78,15 +78,15 @@ class Hg(FetchMethod): | |||
| 78 | hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \ | 78 | hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \ |
| 79 | ud.host, ud.path.replace('/', '.')) | 79 | ud.host, ud.path.replace('/', '.')) |
| 80 | ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname | 80 | ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname |
| 81 | ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) | 81 | ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball) |
| 82 | 82 | ||
| 83 | hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/") | 83 | hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/") |
| 84 | ud.pkgdir = os.path.join(hgdir, hgsrcname) | 84 | ud.pkgdir = os.path.join(hgdir, hgsrcname) |
| 85 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | 85 | ud.moddir = os.path.join(ud.pkgdir, ud.module) |
| 86 | ud.localfile = ud.moddir | 86 | ud.localfile = ud.moddir |
| 87 | ud.basecmd = d.getVar("FETCHCMD_hg", True) or "/usr/bin/env hg" | 87 | ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg" |
| 88 | 88 | ||
| 89 | ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) | 89 | ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") |
| 90 | 90 | ||
| 91 | def need_update(self, ud, d): | 91 | def need_update(self, ud, d): |
| 92 | revTag = ud.parm.get('rev', 'tip') | 92 | revTag = ud.parm.get('rev', 'tip') |
| @@ -99,7 +99,7 @@ class Hg(FetchMethod): | |||
| 99 | def try_premirror(self, ud, d): | 99 | def try_premirror(self, ud, d): |
| 100 | # If we don't do this, updating an existing checkout with only premirrors | 100 | # If we don't do this, updating an existing checkout with only premirrors |
| 101 | # is not possible | 101 | # is not possible |
| 102 | if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: | 102 | if d.getVar("BB_FETCH_PREMIRRORONLY") is not None: |
| 103 | return True | 103 | return True |
| 104 | if os.path.exists(ud.moddir): | 104 | if os.path.exists(ud.moddir): |
| 105 | return False | 105 | return False |
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 5adfd5d448..83778883e0 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
| @@ -63,13 +63,13 @@ class Local(FetchMethod): | |||
| 63 | newpath = path | 63 | newpath = path |
| 64 | if path[0] == "/": | 64 | if path[0] == "/": |
| 65 | return [path] | 65 | return [path] |
| 66 | filespath = d.getVar('FILESPATH', True) | 66 | filespath = d.getVar('FILESPATH') |
| 67 | if filespath: | 67 | if filespath: |
| 68 | logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) | 68 | logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) |
| 69 | newpath, hist = bb.utils.which(filespath, path, history=True) | 69 | newpath, hist = bb.utils.which(filespath, path, history=True) |
| 70 | searched.extend(hist) | 70 | searched.extend(hist) |
| 71 | if not newpath: | 71 | if not newpath: |
| 72 | filesdir = d.getVar('FILESDIR', True) | 72 | filesdir = d.getVar('FILESDIR') |
| 73 | if filesdir: | 73 | if filesdir: |
| 74 | logger.debug(2, "Searching for %s in path: %s" % (path, filesdir)) | 74 | logger.debug(2, "Searching for %s in path: %s" % (path, filesdir)) |
| 75 | newpath = os.path.join(filesdir, path) | 75 | newpath = os.path.join(filesdir, path) |
| @@ -81,7 +81,7 @@ class Local(FetchMethod): | |||
| 81 | logger.debug(2, "Searching for %s in path: %s" % (path, newpath)) | 81 | logger.debug(2, "Searching for %s in path: %s" % (path, newpath)) |
| 82 | return searched | 82 | return searched |
| 83 | if not os.path.exists(newpath): | 83 | if not os.path.exists(newpath): |
| 84 | dldirfile = os.path.join(d.getVar("DL_DIR", True), path) | 84 | dldirfile = os.path.join(d.getVar("DL_DIR"), path) |
| 85 | logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) | 85 | logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) |
| 86 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) | 86 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) |
| 87 | searched.append(dldirfile) | 87 | searched.append(dldirfile) |
| @@ -100,13 +100,13 @@ class Local(FetchMethod): | |||
| 100 | # no need to fetch local files, we'll deal with them in place. | 100 | # no need to fetch local files, we'll deal with them in place. |
| 101 | if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): | 101 | if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): |
| 102 | locations = [] | 102 | locations = [] |
| 103 | filespath = d.getVar('FILESPATH', True) | 103 | filespath = d.getVar('FILESPATH') |
| 104 | if filespath: | 104 | if filespath: |
| 105 | locations = filespath.split(":") | 105 | locations = filespath.split(":") |
| 106 | filesdir = d.getVar('FILESDIR', True) | 106 | filesdir = d.getVar('FILESDIR') |
| 107 | if filesdir: | 107 | if filesdir: |
| 108 | locations.append(filesdir) | 108 | locations.append(filesdir) |
| 109 | locations.append(d.getVar("DL_DIR", True)) | 109 | locations.append(d.getVar("DL_DIR")) |
| 110 | 110 | ||
| 111 | msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) | 111 | msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) |
| 112 | raise FetchError(msg) | 112 | raise FetchError(msg) |
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py index 66ab075b1c..cbeb8ff889 100644 --- a/bitbake/lib/bb/fetch2/npm.py +++ b/bitbake/lib/bb/fetch2/npm.py | |||
| @@ -87,12 +87,12 @@ class Npm(FetchMethod): | |||
| 87 | bb.utils.mkdirhier(ud.pkgdatadir) | 87 | bb.utils.mkdirhier(ud.pkgdatadir) |
| 88 | ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) | 88 | ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) |
| 89 | 89 | ||
| 90 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " | 90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " |
| 91 | ud.prefixdir = prefixdir | 91 | ud.prefixdir = prefixdir |
| 92 | 92 | ||
| 93 | ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) or "0") != "0") | 93 | ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") |
| 94 | ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) | 94 | ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) |
| 95 | ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) | 95 | ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball) |
| 96 | 96 | ||
| 97 | def need_update(self, ud, d): | 97 | def need_update(self, ud, d): |
| 98 | if os.path.exists(ud.localpath): | 98 | if os.path.exists(ud.localpath): |
| @@ -102,7 +102,7 @@ class Npm(FetchMethod): | |||
| 102 | def _runwget(self, ud, d, command, quiet): | 102 | def _runwget(self, ud, d, command, quiet): |
| 103 | logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) | 103 | logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) |
| 104 | bb.fetch2.check_network_access(d, command) | 104 | bb.fetch2.check_network_access(d, command) |
| 105 | dldir = d.getVar("DL_DIR", True) | 105 | dldir = d.getVar("DL_DIR") |
| 106 | runfetchcmd(command, d, quiet, workdir=dldir) | 106 | runfetchcmd(command, d, quiet, workdir=dldir) |
| 107 | 107 | ||
| 108 | def _unpackdep(self, ud, pkg, data, destdir, dldir, d): | 108 | def _unpackdep(self, ud, pkg, data, destdir, dldir, d): |
| @@ -116,7 +116,7 @@ class Npm(FetchMethod): | |||
| 116 | # Change to subdir before executing command | 116 | # Change to subdir before executing command |
| 117 | if not os.path.exists(destdir): | 117 | if not os.path.exists(destdir): |
| 118 | os.makedirs(destdir) | 118 | os.makedirs(destdir) |
| 119 | path = d.getVar('PATH', True) | 119 | path = d.getVar('PATH') |
| 120 | if path: | 120 | if path: |
| 121 | cmd = "PATH=\"%s\" %s" % (path, cmd) | 121 | cmd = "PATH=\"%s\" %s" % (path, cmd) |
| 122 | bb.note("Unpacking %s to %s/" % (file, destdir)) | 122 | bb.note("Unpacking %s to %s/" % (file, destdir)) |
| @@ -132,7 +132,7 @@ class Npm(FetchMethod): | |||
| 132 | 132 | ||
| 133 | 133 | ||
| 134 | def unpack(self, ud, destdir, d): | 134 | def unpack(self, ud, destdir, d): |
| 135 | dldir = d.getVar("DL_DIR", True) | 135 | dldir = d.getVar("DL_DIR") |
| 136 | depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) | 136 | depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) |
| 137 | with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: | 137 | with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: |
| 138 | workobj = json.load(datafile) | 138 | workobj = json.load(datafile) |
| @@ -251,12 +251,12 @@ class Npm(FetchMethod): | |||
| 251 | lockdown = {} | 251 | lockdown = {} |
| 252 | 252 | ||
| 253 | if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): | 253 | if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): |
| 254 | dest = d.getVar("DL_DIR", True) | 254 | dest = d.getVar("DL_DIR") |
| 255 | bb.utils.mkdirhier(dest) | 255 | bb.utils.mkdirhier(dest) |
| 256 | runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) | 256 | runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) |
| 257 | return | 257 | return |
| 258 | 258 | ||
| 259 | shwrf = d.getVar('NPM_SHRINKWRAP', True) | 259 | shwrf = d.getVar('NPM_SHRINKWRAP') |
| 260 | logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) | 260 | logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) |
| 261 | if shwrf: | 261 | if shwrf: |
| 262 | try: | 262 | try: |
| @@ -266,7 +266,7 @@ class Npm(FetchMethod): | |||
| 266 | raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) | 266 | raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) |
| 267 | elif not ud.ignore_checksums: | 267 | elif not ud.ignore_checksums: |
| 268 | logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) | 268 | logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) |
| 269 | lckdf = d.getVar('NPM_LOCKDOWN', True) | 269 | lckdf = d.getVar('NPM_LOCKDOWN') |
| 270 | logger.debug(2, "NPM lockdown file is %s" % lckdf) | 270 | logger.debug(2, "NPM lockdown file is %s" % lckdf) |
| 271 | if lckdf: | 271 | if lckdf: |
| 272 | try: | 272 | try: |
| @@ -292,7 +292,7 @@ class Npm(FetchMethod): | |||
| 292 | if os.path.islink(ud.fullmirror): | 292 | if os.path.islink(ud.fullmirror): |
| 293 | os.unlink(ud.fullmirror) | 293 | os.unlink(ud.fullmirror) |
| 294 | 294 | ||
| 295 | dldir = d.getVar("DL_DIR", True) | 295 | dldir = d.getVar("DL_DIR") |
| 296 | logger.info("Creating tarball of npm data") | 296 | logger.info("Creating tarball of npm data") |
| 297 | runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, | 297 | runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, |
| 298 | workdir=dldir) | 298 | workdir=dldir) |
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py index 295abf953b..26f88e1f46 100644 --- a/bitbake/lib/bb/fetch2/osc.py +++ b/bitbake/lib/bb/fetch2/osc.py | |||
| @@ -34,7 +34,7 @@ class Osc(FetchMethod): | |||
| 34 | 34 | ||
| 35 | # Create paths to osc checkouts | 35 | # Create paths to osc checkouts |
| 36 | relpath = self._strip_leading_slashes(ud.path) | 36 | relpath = self._strip_leading_slashes(ud.path) |
| 37 | ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host) | 37 | ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host) |
| 38 | ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) | 38 | ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) |
| 39 | 39 | ||
| 40 | if 'rev' in ud.parm: | 40 | if 'rev' in ud.parm: |
| @@ -84,7 +84,7 @@ class Osc(FetchMethod): | |||
| 84 | 84 | ||
| 85 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 85 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") |
| 86 | 86 | ||
| 87 | if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK): | 87 | if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): |
| 88 | oscupdatecmd = self._buildosccommand(ud, d, "update") | 88 | oscupdatecmd = self._buildosccommand(ud, d, "update") |
| 89 | logger.info("Update "+ ud.url) | 89 | logger.info("Update "+ ud.url) |
| 90 | # update sources there | 90 | # update sources there |
| @@ -112,7 +112,7 @@ class Osc(FetchMethod): | |||
| 112 | Generate a .oscrc to be used for this run. | 112 | Generate a .oscrc to be used for this run. |
| 113 | """ | 113 | """ |
| 114 | 114 | ||
| 115 | config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc") | 115 | config_path = os.path.join(d.getVar('OSCDIR'), "oscrc") |
| 116 | if (os.path.exists(config_path)): | 116 | if (os.path.exists(config_path)): |
| 117 | os.remove(config_path) | 117 | os.remove(config_path) |
| 118 | 118 | ||
| @@ -121,8 +121,8 @@ class Osc(FetchMethod): | |||
| 121 | f.write("apisrv = %s\n" % ud.host) | 121 | f.write("apisrv = %s\n" % ud.host) |
| 122 | f.write("scheme = http\n") | 122 | f.write("scheme = http\n") |
| 123 | f.write("su-wrapper = su -c\n") | 123 | f.write("su-wrapper = su -c\n") |
| 124 | f.write("build-root = %s\n" % d.getVar('WORKDIR', True)) | 124 | f.write("build-root = %s\n" % d.getVar('WORKDIR')) |
| 125 | f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True)) | 125 | f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) |
| 126 | f.write("extra-pkgs = gzip\n") | 126 | f.write("extra-pkgs = gzip\n") |
| 127 | f.write("\n") | 127 | f.write("\n") |
| 128 | f.write("[%s]\n" % ud.host) | 128 | f.write("[%s]\n" % ud.host) |
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index 50cb479096..be73ca0518 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
| @@ -44,13 +44,13 @@ class Perforce(FetchMethod): | |||
| 44 | provided by the env, use it. If P4PORT is specified by the recipe, use | 44 | provided by the env, use it. If P4PORT is specified by the recipe, use |
| 45 | its values, which may override the settings in P4CONFIG. | 45 | its values, which may override the settings in P4CONFIG. |
| 46 | """ | 46 | """ |
| 47 | ud.basecmd = d.getVar('FETCHCMD_p4', True) | 47 | ud.basecmd = d.getVar('FETCHCMD_p4') |
| 48 | if not ud.basecmd: | 48 | if not ud.basecmd: |
| 49 | ud.basecmd = "/usr/bin/env p4" | 49 | ud.basecmd = "/usr/bin/env p4" |
| 50 | 50 | ||
| 51 | ud.dldir = d.getVar('P4DIR', True) | 51 | ud.dldir = d.getVar('P4DIR') |
| 52 | if not ud.dldir: | 52 | if not ud.dldir: |
| 53 | ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4') | 53 | ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4') |
| 54 | 54 | ||
| 55 | path = ud.url.split('://')[1] | 55 | path = ud.url.split('://')[1] |
| 56 | path = path.split(';')[0] | 56 | path = path.split(';')[0] |
| @@ -62,7 +62,7 @@ class Perforce(FetchMethod): | |||
| 62 | ud.path = path | 62 | ud.path = path |
| 63 | 63 | ||
| 64 | ud.usingp4config = False | 64 | ud.usingp4config = False |
| 65 | p4port = d.getVar('P4PORT', True) | 65 | p4port = d.getVar('P4PORT') |
| 66 | 66 | ||
| 67 | if p4port: | 67 | if p4port: |
| 68 | logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) | 68 | logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) |
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py index bfd4ae16aa..24dcebb0cb 100644 --- a/bitbake/lib/bb/fetch2/repo.py +++ b/bitbake/lib/bb/fetch2/repo.py | |||
| @@ -56,12 +56,12 @@ class Repo(FetchMethod): | |||
| 56 | def download(self, ud, d): | 56 | def download(self, ud, d): |
| 57 | """Fetch url""" | 57 | """Fetch url""" |
| 58 | 58 | ||
| 59 | if os.access(os.path.join(d.getVar("DL_DIR", True), ud.localfile), os.R_OK): | 59 | if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): |
| 60 | logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) | 60 | logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) |
| 61 | return | 61 | return |
| 62 | 62 | ||
| 63 | gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) | 63 | gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) |
| 64 | repodir = d.getVar("REPODIR", True) or os.path.join(d.getVar("DL_DIR", True), "repo") | 64 | repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo") |
| 65 | codir = os.path.join(repodir, gitsrcname, ud.manifest) | 65 | codir = os.path.join(repodir, gitsrcname, ud.manifest) |
| 66 | 66 | ||
| 67 | if ud.user: | 67 | if ud.user: |
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py index 48ddfc176a..da857dd9ff 100644 --- a/bitbake/lib/bb/fetch2/sftp.py +++ b/bitbake/lib/bb/fetch2/sftp.py | |||
| @@ -104,7 +104,7 @@ class SFTP(FetchMethod): | |||
| 104 | port = '-P %d' % urlo.port | 104 | port = '-P %d' % urlo.port |
| 105 | urlo.port = None | 105 | urlo.port = None |
| 106 | 106 | ||
| 107 | dldir = d.getVar('DL_DIR', True) | 107 | dldir = d.getVar('DL_DIR') |
| 108 | lpath = os.path.join(dldir, ud.localfile) | 108 | lpath = os.path.join(dldir, ud.localfile) |
| 109 | 109 | ||
| 110 | user = '' | 110 | user = '' |
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py index 56f9b7eb35..e668b0d4b7 100644 --- a/bitbake/lib/bb/fetch2/ssh.py +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
| @@ -87,11 +87,11 @@ class SSH(FetchMethod): | |||
| 87 | m = __pattern__.match(urldata.url) | 87 | m = __pattern__.match(urldata.url) |
| 88 | path = m.group('path') | 88 | path = m.group('path') |
| 89 | host = m.group('host') | 89 | host = m.group('host') |
| 90 | urldata.localpath = os.path.join(d.getVar('DL_DIR', True), | 90 | urldata.localpath = os.path.join(d.getVar('DL_DIR'), |
| 91 | os.path.basename(os.path.normpath(path))) | 91 | os.path.basename(os.path.normpath(path))) |
| 92 | 92 | ||
| 93 | def download(self, urldata, d): | 93 | def download(self, urldata, d): |
| 94 | dldir = d.getVar('DL_DIR', True) | 94 | dldir = d.getVar('DL_DIR') |
| 95 | 95 | ||
| 96 | m = __pattern__.match(urldata.url) | 96 | m = __pattern__.match(urldata.url) |
| 97 | path = m.group('path') | 97 | path = m.group('path') |
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index 6ca79d35d7..b568c72049 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
| @@ -50,7 +50,7 @@ class Svn(FetchMethod): | |||
| 50 | if not "module" in ud.parm: | 50 | if not "module" in ud.parm: |
| 51 | raise MissingParameterError('module', ud.url) | 51 | raise MissingParameterError('module', ud.url) |
| 52 | 52 | ||
| 53 | ud.basecmd = d.getVar('FETCHCMD_svn', True) | 53 | ud.basecmd = d.getVar('FETCHCMD_svn') |
| 54 | 54 | ||
| 55 | ud.module = ud.parm["module"] | 55 | ud.module = ud.parm["module"] |
| 56 | 56 | ||
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index ecb946aa81..4ba63df0a8 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
| @@ -88,7 +88,7 @@ class Wget(FetchMethod): | |||
| 88 | if not ud.localfile: | 88 | if not ud.localfile: |
| 89 | ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) | 89 | ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) |
| 90 | 90 | ||
| 91 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" | 91 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" |
| 92 | 92 | ||
| 93 | def _runwget(self, ud, d, command, quiet): | 93 | def _runwget(self, ud, d, command, quiet): |
| 94 | 94 | ||
| @@ -104,7 +104,7 @@ class Wget(FetchMethod): | |||
| 104 | fetchcmd = self.basecmd | 104 | fetchcmd = self.basecmd |
| 105 | 105 | ||
| 106 | if 'downloadfilename' in ud.parm: | 106 | if 'downloadfilename' in ud.parm: |
| 107 | dldir = d.getVar("DL_DIR", True) | 107 | dldir = d.getVar("DL_DIR") |
| 108 | bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) | 108 | bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) |
| 109 | fetchcmd += " -O " + dldir + os.sep + ud.localfile | 109 | fetchcmd += " -O " + dldir + os.sep + ud.localfile |
| 110 | 110 | ||
| @@ -543,7 +543,7 @@ class Wget(FetchMethod): | |||
| 543 | self.suffix_regex_comp = re.compile(psuffix_regex) | 543 | self.suffix_regex_comp = re.compile(psuffix_regex) |
| 544 | 544 | ||
| 545 | # compile regex, can be specific by package or generic regex | 545 | # compile regex, can be specific by package or generic regex |
| 546 | pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True) | 546 | pn_regex = d.getVar('UPSTREAM_CHECK_REGEX') |
| 547 | if pn_regex: | 547 | if pn_regex: |
| 548 | package_custom_regex_comp = re.compile(pn_regex) | 548 | package_custom_regex_comp = re.compile(pn_regex) |
| 549 | else: | 549 | else: |
| @@ -564,7 +564,7 @@ class Wget(FetchMethod): | |||
| 564 | sanity check to ensure same name and type. | 564 | sanity check to ensure same name and type. |
| 565 | """ | 565 | """ |
| 566 | package = ud.path.split("/")[-1] | 566 | package = ud.path.split("/")[-1] |
| 567 | current_version = ['', d.getVar('PV', True), ''] | 567 | current_version = ['', d.getVar('PV'), ''] |
| 568 | 568 | ||
| 569 | """possible to have no version in pkg name, such as spectrum-fw""" | 569 | """possible to have no version in pkg name, such as spectrum-fw""" |
| 570 | if not re.search("\d+", package): | 570 | if not re.search("\d+", package): |
| @@ -579,7 +579,7 @@ class Wget(FetchMethod): | |||
| 579 | bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) | 579 | bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) |
| 580 | 580 | ||
| 581 | uri = "" | 581 | uri = "" |
| 582 | regex_uri = d.getVar("UPSTREAM_CHECK_URI", True) | 582 | regex_uri = d.getVar("UPSTREAM_CHECK_URI") |
| 583 | if not regex_uri: | 583 | if not regex_uri: |
| 584 | path = ud.path.split(package)[0] | 584 | path = ud.path.split(package)[0] |
| 585 | 585 | ||
| @@ -588,7 +588,7 @@ class Wget(FetchMethod): | |||
| 588 | dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") | 588 | dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") |
| 589 | m = dirver_regex.search(path) | 589 | m = dirver_regex.search(path) |
| 590 | if m: | 590 | if m: |
| 591 | pn = d.getVar('PN', True) | 591 | pn = d.getVar('PN') |
| 592 | dirver = m.group('dirver') | 592 | dirver = m.group('dirver') |
| 593 | 593 | ||
| 594 | dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn))) | 594 | dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn))) |
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py index 203c405044..dff57ad878 100644 --- a/bitbake/lib/bb/monitordisk.py +++ b/bitbake/lib/bb/monitordisk.py | |||
| @@ -141,7 +141,7 @@ def getInterval(configuration): | |||
| 141 | spaceDefault = 50 * 1024 * 1024 | 141 | spaceDefault = 50 * 1024 * 1024 |
| 142 | inodeDefault = 5 * 1024 | 142 | inodeDefault = 5 * 1024 |
| 143 | 143 | ||
| 144 | interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True) | 144 | interval = configuration.getVar("BB_DISKMON_WARNINTERVAL") |
| 145 | if not interval: | 145 | if not interval: |
| 146 | return spaceDefault, inodeDefault | 146 | return spaceDefault, inodeDefault |
| 147 | else: | 147 | else: |
| @@ -179,7 +179,7 @@ class diskMonitor: | |||
| 179 | self.enableMonitor = False | 179 | self.enableMonitor = False |
| 180 | self.configuration = configuration | 180 | self.configuration = configuration |
| 181 | 181 | ||
| 182 | BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None | 182 | BBDirs = configuration.getVar("BB_DISKMON_DIRS") or None |
| 183 | if BBDirs: | 183 | if BBDirs: |
| 184 | self.devDict = getDiskData(BBDirs, configuration) | 184 | self.devDict = getDiskData(BBDirs, configuration) |
| 185 | if self.devDict: | 185 | if self.devDict: |
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index 26ae7ead86..a2952ecc0f 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
| @@ -123,7 +123,7 @@ def init_parser(d): | |||
| 123 | 123 | ||
| 124 | def resolve_file(fn, d): | 124 | def resolve_file(fn, d): |
| 125 | if not os.path.isabs(fn): | 125 | if not os.path.isabs(fn): |
| 126 | bbpath = d.getVar("BBPATH", True) | 126 | bbpath = d.getVar("BBPATH") |
| 127 | newfn, attempts = bb.utils.which(bbpath, fn, history=True) | 127 | newfn, attempts = bb.utils.which(bbpath, fn, history=True) |
| 128 | for af in attempts: | 128 | for af in attempts: |
| 129 | mark_dependency(d, af) | 129 | mark_dependency(d, af) |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index fa83b18981..a3990e5b3e 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
| @@ -407,7 +407,7 @@ def _expand_versions(versions): | |||
| 407 | versions = itertools.chain(newversions, versions) | 407 | versions = itertools.chain(newversions, versions) |
| 408 | 408 | ||
| 409 | def multi_finalize(fn, d): | 409 | def multi_finalize(fn, d): |
| 410 | appends = (d.getVar("__BBAPPEND", True) or "").split() | 410 | appends = (d.getVar("__BBAPPEND") or "").split() |
| 411 | for append in appends: | 411 | for append in appends: |
| 412 | logger.debug(1, "Appending .bbappend file %s to %s", append, fn) | 412 | logger.debug(1, "Appending .bbappend file %s to %s", append, fn) |
| 413 | bb.parse.BBHandler.handle(append, d, True) | 413 | bb.parse.BBHandler.handle(append, d, True) |
| @@ -422,16 +422,16 @@ def multi_finalize(fn, d): | |||
| 422 | d.setVar("__SKIPPED", e.args[0]) | 422 | d.setVar("__SKIPPED", e.args[0]) |
| 423 | datastores = {"": safe_d} | 423 | datastores = {"": safe_d} |
| 424 | 424 | ||
| 425 | versions = (d.getVar("BBVERSIONS", True) or "").split() | 425 | versions = (d.getVar("BBVERSIONS") or "").split() |
| 426 | if versions: | 426 | if versions: |
| 427 | pv = orig_pv = d.getVar("PV", True) | 427 | pv = orig_pv = d.getVar("PV") |
| 428 | baseversions = {} | 428 | baseversions = {} |
| 429 | 429 | ||
| 430 | def verfunc(ver, d, pv_d = None): | 430 | def verfunc(ver, d, pv_d = None): |
| 431 | if pv_d is None: | 431 | if pv_d is None: |
| 432 | pv_d = d | 432 | pv_d = d |
| 433 | 433 | ||
| 434 | overrides = d.getVar("OVERRIDES", True).split(":") | 434 | overrides = d.getVar("OVERRIDES").split(":") |
| 435 | pv_d.setVar("PV", ver) | 435 | pv_d.setVar("PV", ver) |
| 436 | overrides.append(ver) | 436 | overrides.append(ver) |
| 437 | bpv = baseversions.get(ver) or orig_pv | 437 | bpv = baseversions.get(ver) or orig_pv |
| @@ -466,7 +466,7 @@ def multi_finalize(fn, d): | |||
| 466 | 466 | ||
| 467 | _create_variants(datastores, versions, verfunc, onlyfinalise) | 467 | _create_variants(datastores, versions, verfunc, onlyfinalise) |
| 468 | 468 | ||
| 469 | extended = d.getVar("BBCLASSEXTEND", True) or "" | 469 | extended = d.getVar("BBCLASSEXTEND") or "" |
| 470 | if extended: | 470 | if extended: |
| 471 | # the following is to support bbextends with arguments, for e.g. multilib | 471 | # the following is to support bbextends with arguments, for e.g. multilib |
| 472 | # an example is as follows: | 472 | # an example is as follows: |
| @@ -484,7 +484,7 @@ def multi_finalize(fn, d): | |||
| 484 | else: | 484 | else: |
| 485 | extendedmap[ext] = ext | 485 | extendedmap[ext] = ext |
| 486 | 486 | ||
| 487 | pn = d.getVar("PN", True) | 487 | pn = d.getVar("PN") |
| 488 | def extendfunc(name, d): | 488 | def extendfunc(name, d): |
| 489 | if name != extendedmap[name]: | 489 | if name != extendedmap[name]: |
| 490 | d.setVar("BBEXTENDCURR", extendedmap[name]) | 490 | d.setVar("BBEXTENDCURR", extendedmap[name]) |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index c54a07979d..f2a215105b 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
| @@ -66,7 +66,7 @@ def inherit(files, fn, lineno, d): | |||
| 66 | file = os.path.join('classes', '%s.bbclass' % file) | 66 | file = os.path.join('classes', '%s.bbclass' % file) |
| 67 | 67 | ||
| 68 | if not os.path.isabs(file): | 68 | if not os.path.isabs(file): |
| 69 | bbpath = d.getVar("BBPATH", True) | 69 | bbpath = d.getVar("BBPATH") |
| 70 | abs_fn, attempts = bb.utils.which(bbpath, file, history=True) | 70 | abs_fn, attempts = bb.utils.which(bbpath, file, history=True) |
| 71 | for af in attempts: | 71 | for af in attempts: |
| 72 | if af != abs_fn: | 72 | if af != abs_fn: |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index 875250de40..5759cb20ed 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
| @@ -83,16 +83,16 @@ def include(parentfn, fn, lineno, data, error_out): | |||
| 83 | 83 | ||
| 84 | if not os.path.isabs(fn): | 84 | if not os.path.isabs(fn): |
| 85 | dname = os.path.dirname(parentfn) | 85 | dname = os.path.dirname(parentfn) |
| 86 | bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True)) | 86 | bbpath = "%s:%s" % (dname, data.getVar("BBPATH")) |
| 87 | abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) | 87 | abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) |
| 88 | if abs_fn and bb.parse.check_dependency(data, abs_fn): | 88 | if abs_fn and bb.parse.check_dependency(data, abs_fn): |
| 89 | logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True))) | 89 | logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE'))) |
| 90 | for af in attempts: | 90 | for af in attempts: |
| 91 | bb.parse.mark_dependency(data, af) | 91 | bb.parse.mark_dependency(data, af) |
| 92 | if abs_fn: | 92 | if abs_fn: |
| 93 | fn = abs_fn | 93 | fn = abs_fn |
| 94 | elif bb.parse.check_dependency(data, fn): | 94 | elif bb.parse.check_dependency(data, fn): |
| 95 | logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True))) | 95 | logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE'))) |
| 96 | 96 | ||
| 97 | try: | 97 | try: |
| 98 | bb.parse.handle(fn, data, True) | 98 | bb.parse.handle(fn, data, True) |
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index bb6deca526..e6bbff88ea 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py | |||
| @@ -207,8 +207,8 @@ def connect(database): | |||
| 207 | def persist(domain, d): | 207 | def persist(domain, d): |
| 208 | """Convenience factory for SQLTable objects based upon metadata""" | 208 | """Convenience factory for SQLTable objects based upon metadata""" |
| 209 | import bb.utils | 209 | import bb.utils |
| 210 | cachedir = (d.getVar("PERSISTENT_DIR", True) or | 210 | cachedir = (d.getVar("PERSISTENT_DIR") or |
| 211 | d.getVar("CACHE", True)) | 211 | d.getVar("CACHE")) |
| 212 | if not cachedir: | 212 | if not cachedir: |
| 213 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") | 213 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") |
| 214 | sys.exit(1) | 214 | sys.exit(1) |
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py index db02a0b0de..0e9c8344d2 100644 --- a/bitbake/lib/bb/providers.py +++ b/bitbake/lib/bb/providers.py | |||
| @@ -123,11 +123,11 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | |||
| 123 | 123 | ||
| 124 | # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot | 124 | # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot |
| 125 | # hence we do this manually rather than use OVERRIDES | 125 | # hence we do this manually rather than use OVERRIDES |
| 126 | preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True) | 126 | preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn) |
| 127 | if not preferred_v: | 127 | if not preferred_v: |
| 128 | preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True) | 128 | preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn) |
| 129 | if not preferred_v: | 129 | if not preferred_v: |
| 130 | preferred_v = cfgData.getVar("PREFERRED_VERSION", True) | 130 | preferred_v = cfgData.getVar("PREFERRED_VERSION") |
| 131 | 131 | ||
| 132 | if preferred_v: | 132 | if preferred_v: |
| 133 | m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) | 133 | m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) |
| @@ -289,7 +289,7 @@ def filterProviders(providers, item, cfgData, dataCache): | |||
| 289 | 289 | ||
| 290 | eligible = _filterProviders(providers, item, cfgData, dataCache) | 290 | eligible = _filterProviders(providers, item, cfgData, dataCache) |
| 291 | 291 | ||
| 292 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True) | 292 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item) |
| 293 | if prefervar: | 293 | if prefervar: |
| 294 | dataCache.preferred[item] = prefervar | 294 | dataCache.preferred[item] = prefervar |
| 295 | 295 | ||
| @@ -318,7 +318,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): | |||
| 318 | eligible = _filterProviders(providers, item, cfgData, dataCache) | 318 | eligible = _filterProviders(providers, item, cfgData, dataCache) |
| 319 | 319 | ||
| 320 | # First try and match any PREFERRED_RPROVIDER entry | 320 | # First try and match any PREFERRED_RPROVIDER entry |
| 321 | prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True) | 321 | prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item) |
| 322 | foundUnique = False | 322 | foundUnique = False |
| 323 | if prefervar: | 323 | if prefervar: |
| 324 | for p in eligible: | 324 | for p in eligible: |
| @@ -345,7 +345,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): | |||
| 345 | pn = dataCache.pkg_fn[p] | 345 | pn = dataCache.pkg_fn[p] |
| 346 | provides = dataCache.pn_provides[pn] | 346 | provides = dataCache.pn_provides[pn] |
| 347 | for provide in provides: | 347 | for provide in provides: |
| 348 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True) | 348 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide) |
| 349 | #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) | 349 | #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) |
| 350 | if prefervar in pns and pns[prefervar] not in preferred: | 350 | if prefervar in pns and pns[prefervar] not in preferred: |
| 351 | var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) | 351 | var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 84b268580f..51d68a5cf8 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
| @@ -262,8 +262,8 @@ class RunQueueData: | |||
| 262 | self.rq = rq | 262 | self.rq = rq |
| 263 | self.warn_multi_bb = False | 263 | self.warn_multi_bb = False |
| 264 | 264 | ||
| 265 | self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or "" | 265 | self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or "" |
| 266 | self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() | 266 | self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split() |
| 267 | self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData) | 267 | self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData) |
| 268 | self.setscenewhitelist_checked = False | 268 | self.setscenewhitelist_checked = False |
| 269 | self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() | 269 | self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() |
| @@ -976,10 +976,10 @@ class RunQueue: | |||
| 976 | self.cfgData = cfgData | 976 | self.cfgData = cfgData |
| 977 | self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) | 977 | self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) |
| 978 | 978 | ||
| 979 | self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile" | 979 | self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile" |
| 980 | self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None | 980 | self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None |
| 981 | self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2", True) or None | 981 | self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2") or None |
| 982 | self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None | 982 | self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None |
| 983 | 983 | ||
| 984 | self.state = runQueuePrepare | 984 | self.state = runQueuePrepare |
| 985 | 985 | ||
| @@ -997,8 +997,8 @@ class RunQueue: | |||
| 997 | magic = "decafbadbad" | 997 | magic = "decafbadbad" |
| 998 | if fakeroot: | 998 | if fakeroot: |
| 999 | magic = magic + "beef" | 999 | magic = magic + "beef" |
| 1000 | fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True) | 1000 | fakerootcmd = self.cfgData.getVar("FAKEROOTCMD") |
| 1001 | fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split() | 1001 | fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV") or "").split() |
| 1002 | env = os.environ.copy() | 1002 | env = os.environ.copy() |
| 1003 | for key, value in (var.split('=') for var in fakerootenv): | 1003 | for key, value in (var.split('=') for var in fakerootenv): |
| 1004 | env[key] = value | 1004 | env[key] = value |
| @@ -1024,9 +1024,9 @@ class RunQueue: | |||
| 1024 | "logdefaultverboselogs" : bb.msg.loggerVerboseLogs, | 1024 | "logdefaultverboselogs" : bb.msg.loggerVerboseLogs, |
| 1025 | "logdefaultdomain" : bb.msg.loggerDefaultDomains, | 1025 | "logdefaultdomain" : bb.msg.loggerDefaultDomains, |
| 1026 | "prhost" : self.cooker.prhost, | 1026 | "prhost" : self.cooker.prhost, |
| 1027 | "buildname" : self.cfgData.getVar("BUILDNAME", True), | 1027 | "buildname" : self.cfgData.getVar("BUILDNAME"), |
| 1028 | "date" : self.cfgData.getVar("DATE", True), | 1028 | "date" : self.cfgData.getVar("DATE"), |
| 1029 | "time" : self.cfgData.getVar("TIME", True), | 1029 | "time" : self.cfgData.getVar("TIME"), |
| 1030 | } | 1030 | } |
| 1031 | 1031 | ||
| 1032 | worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") | 1032 | worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") |
| @@ -1427,8 +1427,8 @@ class RunQueueExecute: | |||
| 1427 | self.cfgData = rq.cfgData | 1427 | self.cfgData = rq.cfgData |
| 1428 | self.rqdata = rq.rqdata | 1428 | self.rqdata = rq.rqdata |
| 1429 | 1429 | ||
| 1430 | self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1) | 1430 | self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) |
| 1431 | self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed" | 1431 | self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" |
| 1432 | 1432 | ||
| 1433 | self.runq_buildable = set() | 1433 | self.runq_buildable = set() |
| 1434 | self.runq_running = set() | 1434 | self.runq_running = set() |
| @@ -1630,7 +1630,7 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
| 1630 | if type(obj) is type and | 1630 | if type(obj) is type and |
| 1631 | issubclass(obj, RunQueueScheduler)) | 1631 | issubclass(obj, RunQueueScheduler)) |
| 1632 | 1632 | ||
| 1633 | user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True) | 1633 | user_schedulers = self.cfgData.getVar("BB_SCHEDULERS") |
| 1634 | if user_schedulers: | 1634 | if user_schedulers: |
| 1635 | for sched in user_schedulers.split(): | 1635 | for sched in user_schedulers.split(): |
| 1636 | if not "." in sched: | 1636 | if not "." in sched: |
| @@ -2402,9 +2402,9 @@ class runQueuePipe(): | |||
| 2402 | self.input.close() | 2402 | self.input.close() |
| 2403 | 2403 | ||
| 2404 | def get_setscene_enforce_whitelist(d): | 2404 | def get_setscene_enforce_whitelist(d): |
| 2405 | if d.getVar('BB_SETSCENE_ENFORCE', True) != '1': | 2405 | if d.getVar('BB_SETSCENE_ENFORCE') != '1': |
| 2406 | return None | 2406 | return None |
| 2407 | whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST", True) or "").split() | 2407 | whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split() |
| 2408 | outlist = [] | 2408 | outlist = [] |
| 2409 | for item in whitelist[:]: | 2409 | for item in whitelist[:]: |
| 2410 | if item.startswith('%:'): | 2410 | if item.startswith('%:'): |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index c1685a9e46..fa8a6b1623 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -13,7 +13,7 @@ def init(d): | |||
| 13 | siggens = [obj for obj in globals().values() | 13 | siggens = [obj for obj in globals().values() |
| 14 | if type(obj) is type and issubclass(obj, SignatureGenerator)] | 14 | if type(obj) is type and issubclass(obj, SignatureGenerator)] |
| 15 | 15 | ||
| 16 | desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop" | 16 | desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop" |
| 17 | for sg in siggens: | 17 | for sg in siggens: |
| 18 | if desired == sg.name: | 18 | if desired == sg.name: |
| 19 | return sg(d) | 19 | return sg(d) |
| @@ -82,10 +82,10 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 82 | self.gendeps = {} | 82 | self.gendeps = {} |
| 83 | self.lookupcache = {} | 83 | self.lookupcache = {} |
| 84 | self.pkgnameextract = re.compile("(?P<fn>.*)\..*") | 84 | self.pkgnameextract = re.compile("(?P<fn>.*)\..*") |
| 85 | self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split()) | 85 | self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) |
| 86 | self.taskwhitelist = None | 86 | self.taskwhitelist = None |
| 87 | self.init_rundepcheck(data) | 87 | self.init_rundepcheck(data) |
| 88 | checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True) | 88 | checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") |
| 89 | if checksum_cache_file: | 89 | if checksum_cache_file: |
| 90 | self.checksum_cache = FileChecksumCache() | 90 | self.checksum_cache = FileChecksumCache() |
| 91 | self.checksum_cache.init_cache(data, checksum_cache_file) | 91 | self.checksum_cache.init_cache(data, checksum_cache_file) |
| @@ -93,7 +93,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 93 | self.checksum_cache = None | 93 | self.checksum_cache = None |
| 94 | 94 | ||
| 95 | def init_rundepcheck(self, data): | 95 | def init_rundepcheck(self, data): |
| 96 | self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None | 96 | self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None |
| 97 | if self.taskwhitelist: | 97 | if self.taskwhitelist: |
| 98 | self.twl = re.compile(self.taskwhitelist) | 98 | self.twl = re.compile(self.taskwhitelist) |
| 99 | else: | 99 | else: |
| @@ -160,7 +160,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 160 | 160 | ||
| 161 | #Slow but can be useful for debugging mismatched basehashes | 161 | #Slow but can be useful for debugging mismatched basehashes |
| 162 | #for task in self.taskdeps[fn]: | 162 | #for task in self.taskdeps[fn]: |
| 163 | # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False) | 163 | # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) |
| 164 | 164 | ||
| 165 | for task in taskdeps: | 165 | for task in taskdeps: |
| 166 | d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task]) | 166 | d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task]) |
| @@ -345,8 +345,8 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | |||
| 345 | 345 | ||
| 346 | def dump_this_task(outfile, d): | 346 | def dump_this_task(outfile, d): |
| 347 | import bb.parse | 347 | import bb.parse |
| 348 | fn = d.getVar("BB_FILENAME", True) | 348 | fn = d.getVar("BB_FILENAME") |
| 349 | task = "do_" + d.getVar("BB_CURRENTTASK", True) | 349 | task = "do_" + d.getVar("BB_CURRENTTASK") |
| 350 | referencestamp = bb.build.stamp_internal(task, d, None, True) | 350 | referencestamp = bb.build.stamp_internal(task, d, None, True) |
| 351 | bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) | 351 | bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) |
| 352 | 352 | ||
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py index b80c315d3d..a681841df3 100644 --- a/bitbake/lib/bb/tests/codeparser.py +++ b/bitbake/lib/bb/tests/codeparser.py | |||
| @@ -68,7 +68,7 @@ class VariableReferenceTest(ReferenceTest): | |||
| 68 | 68 | ||
| 69 | def test_python_reference(self): | 69 | def test_python_reference(self): |
| 70 | self.setEmptyVars(["BAR"]) | 70 | self.setEmptyVars(["BAR"]) |
| 71 | self.parseExpression("${@d.getVar('BAR', True) + 'foo'}") | 71 | self.parseExpression("${@d.getVar('BAR') + 'foo'}") |
| 72 | self.assertReferences(set(["BAR"])) | 72 | self.assertReferences(set(["BAR"])) |
| 73 | 73 | ||
| 74 | class ShellReferenceTest(ReferenceTest): | 74 | class ShellReferenceTest(ReferenceTest): |
| @@ -209,17 +209,17 @@ be. These unit tests are testing snippets.""" | |||
| 209 | return " " + value | 209 | return " " + value |
| 210 | 210 | ||
| 211 | def test_getvar_reference(self): | 211 | def test_getvar_reference(self): |
| 212 | self.parseExpression("d.getVar('foo', True)") | 212 | self.parseExpression("d.getVar('foo')") |
| 213 | self.assertReferences(set(["foo"])) | 213 | self.assertReferences(set(["foo"])) |
| 214 | self.assertExecs(set()) | 214 | self.assertExecs(set()) |
| 215 | 215 | ||
| 216 | def test_getvar_computed_reference(self): | 216 | def test_getvar_computed_reference(self): |
| 217 | self.parseExpression("d.getVar('f' + 'o' + 'o', True)") | 217 | self.parseExpression("d.getVar('f' + 'o' + 'o')") |
| 218 | self.assertReferences(set()) | 218 | self.assertReferences(set()) |
| 219 | self.assertExecs(set()) | 219 | self.assertExecs(set()) |
| 220 | 220 | ||
| 221 | def test_getvar_exec_reference(self): | 221 | def test_getvar_exec_reference(self): |
| 222 | self.parseExpression("eval('d.getVar(\"foo\", True)')") | 222 | self.parseExpression("eval('d.getVar(\"foo\")')") |
| 223 | self.assertReferences(set()) | 223 | self.assertReferences(set()) |
| 224 | self.assertExecs(set(["eval"])) | 224 | self.assertExecs(set(["eval"])) |
| 225 | 225 | ||
| @@ -269,11 +269,11 @@ be. These unit tests are testing snippets.""" | |||
| 269 | class DependencyReferenceTest(ReferenceTest): | 269 | class DependencyReferenceTest(ReferenceTest): |
| 270 | 270 | ||
| 271 | pydata = """ | 271 | pydata = """ |
| 272 | d.getVar('somevar', True) | 272 | d.getVar('somevar') |
| 273 | def test(d): | 273 | def test(d): |
| 274 | foo = 'bar %s' % 'foo' | 274 | foo = 'bar %s' % 'foo' |
| 275 | def test2(d): | 275 | def test2(d): |
| 276 | d.getVar(foo, True) | 276 | d.getVar(foo) |
| 277 | d.getVar('bar', False) | 277 | d.getVar('bar', False) |
| 278 | test2(d) | 278 | test2(d) |
| 279 | 279 | ||
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py index b54eb06797..1a5a28af06 100644 --- a/bitbake/lib/bb/tests/data.py +++ b/bitbake/lib/bb/tests/data.py | |||
| @@ -77,13 +77,13 @@ class DataExpansions(unittest.TestCase): | |||
| 77 | self.assertEqual(str(val), "boo value_of_foo") | 77 | self.assertEqual(str(val), "boo value_of_foo") |
| 78 | 78 | ||
| 79 | def test_python_snippet_getvar(self): | 79 | def test_python_snippet_getvar(self): |
| 80 | val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") | 80 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") |
| 81 | self.assertEqual(str(val), "value_of_foo value_of_bar") | 81 | self.assertEqual(str(val), "value_of_foo value_of_bar") |
| 82 | 82 | ||
| 83 | def test_python_unexpanded(self): | 83 | def test_python_unexpanded(self): |
| 84 | self.d.setVar("bar", "${unsetvar}") | 84 | self.d.setVar("bar", "${unsetvar}") |
| 85 | val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") | 85 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") |
| 86 | self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}") | 86 | self.assertEqual(str(val), "${@d.getVar('foo') + ' ${unsetvar}'}") |
| 87 | 87 | ||
| 88 | def test_python_snippet_syntax_error(self): | 88 | def test_python_snippet_syntax_error(self): |
| 89 | self.d.setVar("FOO", "${@foo = 5}") | 89 | self.d.setVar("FOO", "${@foo = 5}") |
| @@ -99,7 +99,7 @@ class DataExpansions(unittest.TestCase): | |||
| 99 | self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True) | 99 | self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True) |
| 100 | 100 | ||
| 101 | def test_value_containing_value(self): | 101 | def test_value_containing_value(self): |
| 102 | val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") | 102 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") |
| 103 | self.assertEqual(str(val), "value_of_foo value_of_bar") | 103 | self.assertEqual(str(val), "value_of_foo value_of_bar") |
| 104 | 104 | ||
| 105 | def test_reference_undefined_var(self): | 105 | def test_reference_undefined_var(self): |
| @@ -109,7 +109,7 @@ class DataExpansions(unittest.TestCase): | |||
| 109 | def test_double_reference(self): | 109 | def test_double_reference(self): |
| 110 | self.d.setVar("BAR", "bar value") | 110 | self.d.setVar("BAR", "bar value") |
| 111 | self.d.setVar("FOO", "${BAR} foo ${BAR}") | 111 | self.d.setVar("FOO", "${BAR} foo ${BAR}") |
| 112 | val = self.d.getVar("FOO", True) | 112 | val = self.d.getVar("FOO") |
| 113 | self.assertEqual(str(val), "bar value foo bar value") | 113 | self.assertEqual(str(val), "bar value foo bar value") |
| 114 | 114 | ||
| 115 | def test_direct_recursion(self): | 115 | def test_direct_recursion(self): |
| @@ -129,12 +129,12 @@ class DataExpansions(unittest.TestCase): | |||
| 129 | 129 | ||
| 130 | def test_incomplete_varexp_single_quotes(self): | 130 | def test_incomplete_varexp_single_quotes(self): |
| 131 | self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc") | 131 | self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc") |
| 132 | val = self.d.getVar("FOO", True) | 132 | val = self.d.getVar("FOO") |
| 133 | self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc") | 133 | self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc") |
| 134 | 134 | ||
| 135 | def test_nonstring(self): | 135 | def test_nonstring(self): |
| 136 | self.d.setVar("TEST", 5) | 136 | self.d.setVar("TEST", 5) |
| 137 | val = self.d.getVar("TEST", True) | 137 | val = self.d.getVar("TEST") |
| 138 | self.assertEqual(str(val), "5") | 138 | self.assertEqual(str(val), "5") |
| 139 | 139 | ||
| 140 | def test_rename(self): | 140 | def test_rename(self): |
| @@ -234,19 +234,19 @@ class TestConcat(unittest.TestCase): | |||
| 234 | def test_prepend(self): | 234 | def test_prepend(self): |
| 235 | self.d.setVar("TEST", "${VAL}") | 235 | self.d.setVar("TEST", "${VAL}") |
| 236 | self.d.prependVar("TEST", "${FOO}:") | 236 | self.d.prependVar("TEST", "${FOO}:") |
| 237 | self.assertEqual(self.d.getVar("TEST", True), "foo:val") | 237 | self.assertEqual(self.d.getVar("TEST"), "foo:val") |
| 238 | 238 | ||
| 239 | def test_append(self): | 239 | def test_append(self): |
| 240 | self.d.setVar("TEST", "${VAL}") | 240 | self.d.setVar("TEST", "${VAL}") |
| 241 | self.d.appendVar("TEST", ":${BAR}") | 241 | self.d.appendVar("TEST", ":${BAR}") |
| 242 | self.assertEqual(self.d.getVar("TEST", True), "val:bar") | 242 | self.assertEqual(self.d.getVar("TEST"), "val:bar") |
| 243 | 243 | ||
| 244 | def test_multiple_append(self): | 244 | def test_multiple_append(self): |
| 245 | self.d.setVar("TEST", "${VAL}") | 245 | self.d.setVar("TEST", "${VAL}") |
| 246 | self.d.prependVar("TEST", "${FOO}:") | 246 | self.d.prependVar("TEST", "${FOO}:") |
| 247 | self.d.appendVar("TEST", ":val2") | 247 | self.d.appendVar("TEST", ":val2") |
| 248 | self.d.appendVar("TEST", ":${BAR}") | 248 | self.d.appendVar("TEST", ":${BAR}") |
| 249 | self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar") | 249 | self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") |
| 250 | 250 | ||
| 251 | class TestConcatOverride(unittest.TestCase): | 251 | class TestConcatOverride(unittest.TestCase): |
| 252 | def setUp(self): | 252 | def setUp(self): |
| @@ -259,13 +259,13 @@ class TestConcatOverride(unittest.TestCase): | |||
| 259 | self.d.setVar("TEST", "${VAL}") | 259 | self.d.setVar("TEST", "${VAL}") |
| 260 | self.d.setVar("TEST_prepend", "${FOO}:") | 260 | self.d.setVar("TEST_prepend", "${FOO}:") |
| 261 | bb.data.update_data(self.d) | 261 | bb.data.update_data(self.d) |
| 262 | self.assertEqual(self.d.getVar("TEST", True), "foo:val") | 262 | self.assertEqual(self.d.getVar("TEST"), "foo:val") |
| 263 | 263 | ||
| 264 | def test_append(self): | 264 | def test_append(self): |
| 265 | self.d.setVar("TEST", "${VAL}") | 265 | self.d.setVar("TEST", "${VAL}") |
| 266 | self.d.setVar("TEST_append", ":${BAR}") | 266 | self.d.setVar("TEST_append", ":${BAR}") |
| 267 | bb.data.update_data(self.d) | 267 | bb.data.update_data(self.d) |
| 268 | self.assertEqual(self.d.getVar("TEST", True), "val:bar") | 268 | self.assertEqual(self.d.getVar("TEST"), "val:bar") |
| 269 | 269 | ||
| 270 | def test_multiple_append(self): | 270 | def test_multiple_append(self): |
| 271 | self.d.setVar("TEST", "${VAL}") | 271 | self.d.setVar("TEST", "${VAL}") |
| @@ -273,47 +273,47 @@ class TestConcatOverride(unittest.TestCase): | |||
| 273 | self.d.setVar("TEST_append", ":val2") | 273 | self.d.setVar("TEST_append", ":val2") |
| 274 | self.d.setVar("TEST_append", ":${BAR}") | 274 | self.d.setVar("TEST_append", ":${BAR}") |
| 275 | bb.data.update_data(self.d) | 275 | bb.data.update_data(self.d) |
| 276 | self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar") | 276 | self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") |
| 277 | 277 | ||
| 278 | def test_append_unset(self): | 278 | def test_append_unset(self): |
| 279 | self.d.setVar("TEST_prepend", "${FOO}:") | 279 | self.d.setVar("TEST_prepend", "${FOO}:") |
| 280 | self.d.setVar("TEST_append", ":val2") | 280 | self.d.setVar("TEST_append", ":val2") |
| 281 | self.d.setVar("TEST_append", ":${BAR}") | 281 | self.d.setVar("TEST_append", ":${BAR}") |
| 282 | bb.data.update_data(self.d) | 282 | bb.data.update_data(self.d) |
| 283 | self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar") | 283 | self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") |
| 284 | 284 | ||
| 285 | def test_remove(self): | 285 | def test_remove(self): |
| 286 | self.d.setVar("TEST", "${VAL} ${BAR}") | 286 | self.d.setVar("TEST", "${VAL} ${BAR}") |
| 287 | self.d.setVar("TEST_remove", "val") | 287 | self.d.setVar("TEST_remove", "val") |
| 288 | bb.data.update_data(self.d) | 288 | bb.data.update_data(self.d) |
| 289 | self.assertEqual(self.d.getVar("TEST", True), "bar") | 289 | self.assertEqual(self.d.getVar("TEST"), "bar") |
| 290 | 290 | ||
| 291 | def test_doubleref_remove(self): | 291 | def test_doubleref_remove(self): |
| 292 | self.d.setVar("TEST", "${VAL} ${BAR}") | 292 | self.d.setVar("TEST", "${VAL} ${BAR}") |
| 293 | self.d.setVar("TEST_remove", "val") | 293 | self.d.setVar("TEST_remove", "val") |
| 294 | self.d.setVar("TEST_TEST", "${TEST} ${TEST}") | 294 | self.d.setVar("TEST_TEST", "${TEST} ${TEST}") |
| 295 | bb.data.update_data(self.d) | 295 | bb.data.update_data(self.d) |
| 296 | self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar") | 296 | self.assertEqual(self.d.getVar("TEST_TEST"), "bar bar") |
| 297 | 297 | ||
| 298 | def test_empty_remove(self): | 298 | def test_empty_remove(self): |
| 299 | self.d.setVar("TEST", "") | 299 | self.d.setVar("TEST", "") |
| 300 | self.d.setVar("TEST_remove", "val") | 300 | self.d.setVar("TEST_remove", "val") |
| 301 | bb.data.update_data(self.d) | 301 | bb.data.update_data(self.d) |
| 302 | self.assertEqual(self.d.getVar("TEST", True), "") | 302 | self.assertEqual(self.d.getVar("TEST"), "") |
| 303 | 303 | ||
| 304 | def test_remove_expansion(self): | 304 | def test_remove_expansion(self): |
| 305 | self.d.setVar("BAR", "Z") | 305 | self.d.setVar("BAR", "Z") |
| 306 | self.d.setVar("TEST", "${BAR}/X Y") | 306 | self.d.setVar("TEST", "${BAR}/X Y") |
| 307 | self.d.setVar("TEST_remove", "${BAR}/X") | 307 | self.d.setVar("TEST_remove", "${BAR}/X") |
| 308 | bb.data.update_data(self.d) | 308 | bb.data.update_data(self.d) |
| 309 | self.assertEqual(self.d.getVar("TEST", True), "Y") | 309 | self.assertEqual(self.d.getVar("TEST"), "Y") |
| 310 | 310 | ||
| 311 | def test_remove_expansion_items(self): | 311 | def test_remove_expansion_items(self): |
| 312 | self.d.setVar("TEST", "A B C D") | 312 | self.d.setVar("TEST", "A B C D") |
| 313 | self.d.setVar("BAR", "B D") | 313 | self.d.setVar("BAR", "B D") |
| 314 | self.d.setVar("TEST_remove", "${BAR}") | 314 | self.d.setVar("TEST_remove", "${BAR}") |
| 315 | bb.data.update_data(self.d) | 315 | bb.data.update_data(self.d) |
| 316 | self.assertEqual(self.d.getVar("TEST", True), "A C") | 316 | self.assertEqual(self.d.getVar("TEST"), "A C") |
| 317 | 317 | ||
| 318 | class TestOverrides(unittest.TestCase): | 318 | class TestOverrides(unittest.TestCase): |
| 319 | def setUp(self): | 319 | def setUp(self): |
| @@ -323,17 +323,17 @@ class TestOverrides(unittest.TestCase): | |||
| 323 | 323 | ||
| 324 | def test_no_override(self): | 324 | def test_no_override(self): |
| 325 | bb.data.update_data(self.d) | 325 | bb.data.update_data(self.d) |
| 326 | self.assertEqual(self.d.getVar("TEST", True), "testvalue") | 326 | self.assertEqual(self.d.getVar("TEST"), "testvalue") |
| 327 | 327 | ||
| 328 | def test_one_override(self): | 328 | def test_one_override(self): |
| 329 | self.d.setVar("TEST_bar", "testvalue2") | 329 | self.d.setVar("TEST_bar", "testvalue2") |
| 330 | bb.data.update_data(self.d) | 330 | bb.data.update_data(self.d) |
| 331 | self.assertEqual(self.d.getVar("TEST", True), "testvalue2") | 331 | self.assertEqual(self.d.getVar("TEST"), "testvalue2") |
| 332 | 332 | ||
| 333 | def test_one_override_unset(self): | 333 | def test_one_override_unset(self): |
| 334 | self.d.setVar("TEST2_bar", "testvalue2") | 334 | self.d.setVar("TEST2_bar", "testvalue2") |
| 335 | bb.data.update_data(self.d) | 335 | bb.data.update_data(self.d) |
| 336 | self.assertEqual(self.d.getVar("TEST2", True), "testvalue2") | 336 | self.assertEqual(self.d.getVar("TEST2"), "testvalue2") |
| 337 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) | 337 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) |
| 338 | 338 | ||
| 339 | def test_multiple_override(self): | 339 | def test_multiple_override(self): |
| @@ -341,18 +341,18 @@ class TestOverrides(unittest.TestCase): | |||
| 341 | self.d.setVar("TEST_local", "testvalue3") | 341 | self.d.setVar("TEST_local", "testvalue3") |
| 342 | self.d.setVar("TEST_foo", "testvalue4") | 342 | self.d.setVar("TEST_foo", "testvalue4") |
| 343 | bb.data.update_data(self.d) | 343 | bb.data.update_data(self.d) |
| 344 | self.assertEqual(self.d.getVar("TEST", True), "testvalue3") | 344 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
| 345 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) | 345 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) |
| 346 | 346 | ||
| 347 | def test_multiple_combined_overrides(self): | 347 | def test_multiple_combined_overrides(self): |
| 348 | self.d.setVar("TEST_local_foo_bar", "testvalue3") | 348 | self.d.setVar("TEST_local_foo_bar", "testvalue3") |
| 349 | bb.data.update_data(self.d) | 349 | bb.data.update_data(self.d) |
| 350 | self.assertEqual(self.d.getVar("TEST", True), "testvalue3") | 350 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
| 351 | 351 | ||
| 352 | def test_multiple_overrides_unset(self): | 352 | def test_multiple_overrides_unset(self): |
| 353 | self.d.setVar("TEST2_local_foo_bar", "testvalue3") | 353 | self.d.setVar("TEST2_local_foo_bar", "testvalue3") |
| 354 | bb.data.update_data(self.d) | 354 | bb.data.update_data(self.d) |
| 355 | self.assertEqual(self.d.getVar("TEST2", True), "testvalue3") | 355 | self.assertEqual(self.d.getVar("TEST2"), "testvalue3") |
| 356 | 356 | ||
| 357 | def test_keyexpansion_override(self): | 357 | def test_keyexpansion_override(self): |
| 358 | self.d.setVar("LOCAL", "local") | 358 | self.d.setVar("LOCAL", "local") |
| @@ -361,21 +361,21 @@ class TestOverrides(unittest.TestCase): | |||
| 361 | self.d.setVar("TEST_foo", "testvalue4") | 361 | self.d.setVar("TEST_foo", "testvalue4") |
| 362 | bb.data.update_data(self.d) | 362 | bb.data.update_data(self.d) |
| 363 | bb.data.expandKeys(self.d) | 363 | bb.data.expandKeys(self.d) |
| 364 | self.assertEqual(self.d.getVar("TEST", True), "testvalue3") | 364 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
| 365 | 365 | ||
| 366 | def test_rename_override(self): | 366 | def test_rename_override(self): |
| 367 | self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") | 367 | self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") |
| 368 | self.d.setVar("OVERRIDES", "class-target") | 368 | self.d.setVar("OVERRIDES", "class-target") |
| 369 | bb.data.update_data(self.d) | 369 | bb.data.update_data(self.d) |
| 370 | self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") | 370 | self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") |
| 371 | self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a") | 371 | self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a") |
| 372 | 372 | ||
| 373 | def test_underscore_override(self): | 373 | def test_underscore_override(self): |
| 374 | self.d.setVar("TEST_bar", "testvalue2") | 374 | self.d.setVar("TEST_bar", "testvalue2") |
| 375 | self.d.setVar("TEST_some_val", "testvalue3") | 375 | self.d.setVar("TEST_some_val", "testvalue3") |
| 376 | self.d.setVar("TEST_foo", "testvalue4") | 376 | self.d.setVar("TEST_foo", "testvalue4") |
| 377 | self.d.setVar("OVERRIDES", "foo:bar:some_val") | 377 | self.d.setVar("OVERRIDES", "foo:bar:some_val") |
| 378 | self.assertEqual(self.d.getVar("TEST", True), "testvalue3") | 378 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
| 379 | 379 | ||
| 380 | class TestKeyExpansion(unittest.TestCase): | 380 | class TestKeyExpansion(unittest.TestCase): |
| 381 | def setUp(self): | 381 | def setUp(self): |
| @@ -389,7 +389,7 @@ class TestKeyExpansion(unittest.TestCase): | |||
| 389 | with LogRecord() as logs: | 389 | with LogRecord() as logs: |
| 390 | bb.data.expandKeys(self.d) | 390 | bb.data.expandKeys(self.d) |
| 391 | self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs)) | 391 | self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs)) |
| 392 | self.assertEqual(self.d.getVar("VAL_foo", True), "A") | 392 | self.assertEqual(self.d.getVar("VAL_foo"), "A") |
| 393 | 393 | ||
| 394 | class TestFlags(unittest.TestCase): | 394 | class TestFlags(unittest.TestCase): |
| 395 | def setUp(self): | 395 | def setUp(self): |
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py index 0b2706af02..86e86ed711 100644 --- a/bitbake/lib/bb/tests/parse.py +++ b/bitbake/lib/bb/tests/parse.py | |||
| @@ -58,9 +58,9 @@ C = "3" | |||
| 58 | def test_parse_simple(self): | 58 | def test_parse_simple(self): |
| 59 | f = self.parsehelper(self.testfile) | 59 | f = self.parsehelper(self.testfile) |
| 60 | d = bb.parse.handle(f.name, self.d)[''] | 60 | d = bb.parse.handle(f.name, self.d)[''] |
| 61 | self.assertEqual(d.getVar("A", True), "1") | 61 | self.assertEqual(d.getVar("A"), "1") |
| 62 | self.assertEqual(d.getVar("B", True), "2") | 62 | self.assertEqual(d.getVar("B"), "2") |
| 63 | self.assertEqual(d.getVar("C", True), "3") | 63 | self.assertEqual(d.getVar("C"), "3") |
| 64 | 64 | ||
| 65 | def test_parse_incomplete_function(self): | 65 | def test_parse_incomplete_function(self): |
| 66 | testfileB = self.testfile.replace("}", "") | 66 | testfileB = self.testfile.replace("}", "") |
| @@ -80,9 +80,9 @@ unset B[flag] | |||
| 80 | def test_parse_unset(self): | 80 | def test_parse_unset(self): |
| 81 | f = self.parsehelper(self.unsettest) | 81 | f = self.parsehelper(self.unsettest) |
| 82 | d = bb.parse.handle(f.name, self.d)[''] | 82 | d = bb.parse.handle(f.name, self.d)[''] |
| 83 | self.assertEqual(d.getVar("A", True), None) | 83 | self.assertEqual(d.getVar("A"), None) |
| 84 | self.assertEqual(d.getVarFlag("A","flag", True), None) | 84 | self.assertEqual(d.getVarFlag("A","flag", True), None) |
| 85 | self.assertEqual(d.getVar("B", True), "2") | 85 | self.assertEqual(d.getVar("B"), "2") |
| 86 | 86 | ||
| 87 | 87 | ||
| 88 | overridetest = """ | 88 | overridetest = """ |
| @@ -95,11 +95,11 @@ PN = "gtk+" | |||
| 95 | def test_parse_overrides(self): | 95 | def test_parse_overrides(self): |
| 96 | f = self.parsehelper(self.overridetest) | 96 | f = self.parsehelper(self.overridetest) |
| 97 | d = bb.parse.handle(f.name, self.d)[''] | 97 | d = bb.parse.handle(f.name, self.d)[''] |
| 98 | self.assertEqual(d.getVar("RRECOMMENDS", True), "b") | 98 | self.assertEqual(d.getVar("RRECOMMENDS"), "b") |
| 99 | bb.data.expandKeys(d) | 99 | bb.data.expandKeys(d) |
| 100 | self.assertEqual(d.getVar("RRECOMMENDS", True), "b") | 100 | self.assertEqual(d.getVar("RRECOMMENDS"), "b") |
| 101 | d.setVar("RRECOMMENDS_gtk+", "c") | 101 | d.setVar("RRECOMMENDS_gtk+", "c") |
| 102 | self.assertEqual(d.getVar("RRECOMMENDS", True), "c") | 102 | self.assertEqual(d.getVar("RRECOMMENDS"), "c") |
| 103 | 103 | ||
| 104 | overridetest2 = """ | 104 | overridetest2 = """ |
| 105 | EXTRA_OECONF = "" | 105 | EXTRA_OECONF = "" |
| @@ -112,7 +112,7 @@ EXTRA_OECONF_append = " c" | |||
| 112 | d = bb.parse.handle(f.name, self.d)[''] | 112 | d = bb.parse.handle(f.name, self.d)[''] |
| 113 | d.appendVar("EXTRA_OECONF", " d") | 113 | d.appendVar("EXTRA_OECONF", " d") |
| 114 | d.setVar("OVERRIDES", "class-target") | 114 | d.setVar("OVERRIDES", "class-target") |
| 115 | self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d") | 115 | self.assertEqual(d.getVar("EXTRA_OECONF"), "b c d") |
| 116 | 116 | ||
| 117 | overridetest3 = """ | 117 | overridetest3 = """ |
| 118 | DESCRIPTION = "A" | 118 | DESCRIPTION = "A" |
| @@ -124,11 +124,11 @@ PN = "bc" | |||
| 124 | f = self.parsehelper(self.overridetest3) | 124 | f = self.parsehelper(self.overridetest3) |
| 125 | d = bb.parse.handle(f.name, self.d)[''] | 125 | d = bb.parse.handle(f.name, self.d)[''] |
| 126 | bb.data.expandKeys(d) | 126 | bb.data.expandKeys(d) |
| 127 | self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B") | 127 | self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B") |
| 128 | d.setVar("DESCRIPTION", "E") | 128 | d.setVar("DESCRIPTION", "E") |
| 129 | d.setVar("DESCRIPTION_bc-dev", "C D") | 129 | d.setVar("DESCRIPTION_bc-dev", "C D") |
| 130 | d.setVar("OVERRIDES", "bc-dev") | 130 | d.setVar("OVERRIDES", "bc-dev") |
| 131 | self.assertEqual(d.getVar("DESCRIPTION", True), "C D") | 131 | self.assertEqual(d.getVar("DESCRIPTION"), "C D") |
| 132 | 132 | ||
| 133 | 133 | ||
| 134 | classextend = """ | 134 | classextend = """ |
| @@ -159,6 +159,6 @@ python () { | |||
| 159 | alldata = bb.parse.handle(f.name, self.d) | 159 | alldata = bb.parse.handle(f.name, self.d) |
| 160 | d1 = alldata[''] | 160 | d1 = alldata[''] |
| 161 | d2 = alldata[cls.name] | 161 | d2 = alldata[cls.name] |
| 162 | self.assertEqual(d1.getVar("VAR_var", True), "B") | 162 | self.assertEqual(d1.getVar("VAR_var"), "B") |
| 163 | self.assertEqual(d2.getVar("VAR_var", True), None) | 163 | self.assertEqual(d2.getVar("VAR_var"), None) |
| 164 | 164 | ||
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 729848a1cc..c4b25b50ac 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
| @@ -665,7 +665,7 @@ def build_environment(d): | |||
| 665 | for var in bb.data.keys(d): | 665 | for var in bb.data.keys(d): |
| 666 | export = d.getVarFlag(var, "export", False) | 666 | export = d.getVarFlag(var, "export", False) |
| 667 | if export: | 667 | if export: |
| 668 | os.environ[var] = d.getVar(var, True) or "" | 668 | os.environ[var] = d.getVar(var) or "" |
| 669 | 669 | ||
| 670 | def _check_unsafe_delete_path(path): | 670 | def _check_unsafe_delete_path(path): |
| 671 | """ | 671 | """ |
| @@ -953,7 +953,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
| 953 | Arguments: | 953 | Arguments: |
| 954 | 954 | ||
| 955 | variable -- the variable name. This will be fetched and expanded (using | 955 | variable -- the variable name. This will be fetched and expanded (using |
| 956 | d.getVar(variable, True)) and then split into a set(). | 956 | d.getVar(variable)) and then split into a set(). |
| 957 | 957 | ||
| 958 | checkvalues -- if this is a string it is split on whitespace into a set(), | 958 | checkvalues -- if this is a string it is split on whitespace into a set(), |
| 959 | otherwise coerced directly into a set(). | 959 | otherwise coerced directly into a set(). |
| @@ -966,7 +966,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
| 966 | d -- the data store. | 966 | d -- the data store. |
| 967 | """ | 967 | """ |
| 968 | 968 | ||
| 969 | val = d.getVar(variable, True) | 969 | val = d.getVar(variable) |
| 970 | if not val: | 970 | if not val: |
| 971 | return falsevalue | 971 | return falsevalue |
| 972 | val = set(val.split()) | 972 | val = set(val.split()) |
| @@ -979,7 +979,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
| 979 | return falsevalue | 979 | return falsevalue |
| 980 | 980 | ||
| 981 | def contains_any(variable, checkvalues, truevalue, falsevalue, d): | 981 | def contains_any(variable, checkvalues, truevalue, falsevalue, d): |
| 982 | val = d.getVar(variable, True) | 982 | val = d.getVar(variable) |
| 983 | if not val: | 983 | if not val: |
| 984 | return falsevalue | 984 | return falsevalue |
| 985 | val = set(val.split()) | 985 | val = set(val.split()) |
| @@ -1378,10 +1378,10 @@ def edit_bblayers_conf(bblayers_conf, add, remove): | |||
| 1378 | 1378 | ||
| 1379 | def get_file_layer(filename, d): | 1379 | def get_file_layer(filename, d): |
| 1380 | """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" | 1380 | """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" |
| 1381 | collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split() | 1381 | collections = (d.getVar('BBFILE_COLLECTIONS') or '').split() |
| 1382 | collection_res = {} | 1382 | collection_res = {} |
| 1383 | for collection in collections: | 1383 | for collection in collections: |
| 1384 | collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or '' | 1384 | collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or '' |
| 1385 | 1385 | ||
| 1386 | def path_to_layer(path): | 1386 | def path_to_layer(path): |
| 1387 | # Use longest path so we handle nested layers | 1387 | # Use longest path so we handle nested layers |
| @@ -1394,7 +1394,7 @@ def get_file_layer(filename, d): | |||
| 1394 | return match | 1394 | return match |
| 1395 | 1395 | ||
| 1396 | result = None | 1396 | result = None |
| 1397 | bbfiles = (d.getVar('BBFILES', True) or '').split() | 1397 | bbfiles = (d.getVar('BBFILES') or '').split() |
| 1398 | bbfilesmatch = False | 1398 | bbfilesmatch = False |
| 1399 | for bbfilesentry in bbfiles: | 1399 | for bbfilesentry in bbfiles: |
| 1400 | if fnmatch.fnmatch(filename, bbfilesentry): | 1400 | if fnmatch.fnmatch(filename, bbfilesentry): |
| @@ -1471,7 +1471,7 @@ def export_proxies(d): | |||
| 1471 | if v in os.environ.keys(): | 1471 | if v in os.environ.keys(): |
| 1472 | exported = True | 1472 | exported = True |
| 1473 | else: | 1473 | else: |
| 1474 | v_proxy = d.getVar(v, True) | 1474 | v_proxy = d.getVar(v) |
| 1475 | if v_proxy is not None: | 1475 | if v_proxy is not None: |
| 1476 | os.environ[v] = v_proxy | 1476 | os.environ[v] = v_proxy |
| 1477 | exported = True | 1477 | exported = True |
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py index 739ae27b97..cf9470427a 100644 --- a/bitbake/lib/bblayers/action.py +++ b/bitbake/lib/bblayers/action.py | |||
| @@ -180,7 +180,7 @@ build results (as the layer priority order has effectively changed). | |||
| 180 | 180 | ||
| 181 | if first_regex: | 181 | if first_regex: |
| 182 | # Find the BBFILES entries that match (which will have come from this conf/layer.conf file) | 182 | # Find the BBFILES entries that match (which will have come from this conf/layer.conf file) |
| 183 | bbfiles = str(self.tinfoil.config_data.getVar('BBFILES', True)).split() | 183 | bbfiles = str(self.tinfoil.config_data.getVar('BBFILES')).split() |
| 184 | bbfiles_layer = [] | 184 | bbfiles_layer = [] |
| 185 | for item in bbfiles: | 185 | for item in bbfiles: |
| 186 | if first_regex.match(item): | 186 | if first_regex.match(item): |
diff --git a/bitbake/lib/bblayers/common.py b/bitbake/lib/bblayers/common.py index b10fb4cead..98515ced4f 100644 --- a/bitbake/lib/bblayers/common.py +++ b/bitbake/lib/bblayers/common.py | |||
| @@ -12,7 +12,7 @@ class LayerPlugin(): | |||
| 12 | 12 | ||
| 13 | def tinfoil_init(self, tinfoil): | 13 | def tinfoil_init(self, tinfoil): |
| 14 | self.tinfoil = tinfoil | 14 | self.tinfoil = tinfoil |
| 15 | self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split() | 15 | self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS') or "").split() |
| 16 | layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data) | 16 | layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data) |
| 17 | self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()} | 17 | self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()} |
| 18 | 18 | ||
diff --git a/bitbake/lib/bblayers/layerindex.py b/bitbake/lib/bblayers/layerindex.py index 10ad718eba..013e952f38 100644 --- a/bitbake/lib/bblayers/layerindex.py +++ b/bitbake/lib/bblayers/layerindex.py | |||
| @@ -151,7 +151,7 @@ class LayerIndexPlugin(ActionPlugin): | |||
| 151 | def do_layerindex_fetch(self, args): | 151 | def do_layerindex_fetch(self, args): |
| 152 | """Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf. | 152 | """Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf. |
| 153 | """ | 153 | """ |
| 154 | apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True) | 154 | apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL') |
| 155 | if not apiurl: | 155 | if not apiurl: |
| 156 | logger.error("Cannot get BBLAYERS_LAYERINDEX_URL") | 156 | logger.error("Cannot get BBLAYERS_LAYERINDEX_URL") |
| 157 | return 1 | 157 | return 1 |
| @@ -173,8 +173,8 @@ class LayerIndexPlugin(ActionPlugin): | |||
| 173 | return 1 | 173 | return 1 |
| 174 | 174 | ||
| 175 | ignore_layers = [] | 175 | ignore_layers = [] |
| 176 | for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS', True).split(): | 176 | for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS').split(): |
| 177 | lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True) | 177 | lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection) |
| 178 | if lname: | 178 | if lname: |
| 179 | ignore_layers.append(lname) | 179 | ignore_layers.append(lname) |
| 180 | 180 | ||
| @@ -225,7 +225,7 @@ class LayerIndexPlugin(ActionPlugin): | |||
| 225 | printedlayers.append(dependency) | 225 | printedlayers.append(dependency) |
| 226 | 226 | ||
| 227 | if repourls: | 227 | if repourls: |
| 228 | fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR', True) | 228 | fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR') |
| 229 | if not fetchdir: | 229 | if not fetchdir: |
| 230 | logger.error("Cannot get BBLAYERS_FETCH_DIR") | 230 | logger.error("Cannot get BBLAYERS_FETCH_DIR") |
| 231 | return 1 | 231 | return 1 |
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py index ee1e7c8a1c..29491163c2 100644 --- a/bitbake/lib/bblayers/query.py +++ b/bitbake/lib/bblayers/query.py | |||
| @@ -62,7 +62,7 @@ are overlayed will also be listed, with a " (skipped)" suffix. | |||
| 62 | # factor - however, each layer.conf is free to either prepend or append to | 62 | # factor - however, each layer.conf is free to either prepend or append to |
| 63 | # BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might | 63 | # BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might |
| 64 | # not be exactly the order present in bblayers.conf either. | 64 | # not be exactly the order present in bblayers.conf either. |
| 65 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) | 65 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) |
| 66 | overlayed_class_found = False | 66 | overlayed_class_found = False |
| 67 | for (classfile, classdirs) in classes.items(): | 67 | for (classfile, classdirs) in classes.items(): |
| 68 | if len(classdirs) > 1: | 68 | if len(classdirs) > 1: |
| @@ -114,7 +114,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
| 114 | 114 | ||
| 115 | def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits): | 115 | def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits): |
| 116 | if inherits: | 116 | if inherits: |
| 117 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) | 117 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) |
| 118 | for classname in inherits: | 118 | for classname in inherits: |
| 119 | classfile = 'classes/%s.bbclass' % classname | 119 | classfile = 'classes/%s.bbclass' % classname |
| 120 | if not bb.utils.which(bbpath, classfile, history=False): | 120 | if not bb.utils.which(bbpath, classfile, history=False): |
| @@ -158,7 +158,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
| 158 | logger.plain("%s:", pn) | 158 | logger.plain("%s:", pn) |
| 159 | logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) | 159 | logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) |
| 160 | 160 | ||
| 161 | global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split() | 161 | global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() |
| 162 | cls_re = re.compile('classes/') | 162 | cls_re = re.compile('classes/') |
| 163 | 163 | ||
| 164 | preffiles = [] | 164 | preffiles = [] |
| @@ -319,12 +319,12 @@ NOTE: .bbappend files can impact the dependencies. | |||
| 319 | ignore_layers = (args.ignore or '').split(',') | 319 | ignore_layers = (args.ignore or '').split(',') |
| 320 | 320 | ||
| 321 | pkg_fn = self.tinfoil.cooker_data.pkg_fn | 321 | pkg_fn = self.tinfoil.cooker_data.pkg_fn |
| 322 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) | 322 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) |
| 323 | self.require_re = re.compile(r"require\s+(.+)") | 323 | self.require_re = re.compile(r"require\s+(.+)") |
| 324 | self.include_re = re.compile(r"include\s+(.+)") | 324 | self.include_re = re.compile(r"include\s+(.+)") |
| 325 | self.inherit_re = re.compile(r"inherit\s+(.+)") | 325 | self.inherit_re = re.compile(r"inherit\s+(.+)") |
| 326 | 326 | ||
| 327 | global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split() | 327 | global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() |
| 328 | 328 | ||
| 329 | # The bb's DEPENDS and RDEPENDS | 329 | # The bb's DEPENDS and RDEPENDS |
| 330 | for f in pkg_fn: | 330 | for f in pkg_fn: |
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py index cafcc820cd..350b085a51 100644 --- a/bitbake/lib/prserv/serv.py +++ b/bitbake/lib/prserv/serv.py | |||
| @@ -420,7 +420,7 @@ class PRServiceConfigError(Exception): | |||
| 420 | def auto_start(d): | 420 | def auto_start(d): |
| 421 | global singleton | 421 | global singleton |
| 422 | 422 | ||
| 423 | host_params = list(filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':'))) | 423 | host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':'))) |
| 424 | if not host_params: | 424 | if not host_params: |
| 425 | return None | 425 | return None |
| 426 | 426 | ||
| @@ -431,7 +431,7 @@ def auto_start(d): | |||
| 431 | 431 | ||
| 432 | if is_local_special(host_params[0], int(host_params[1])) and not singleton: | 432 | if is_local_special(host_params[0], int(host_params[1])) and not singleton: |
| 433 | import bb.utils | 433 | import bb.utils |
| 434 | cachedir = (d.getVar("PERSISTENT_DIR", True) or d.getVar("CACHE", True)) | 434 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) |
| 435 | if not cachedir: | 435 | if not cachedir: |
| 436 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") | 436 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") |
| 437 | raise PRServiceConfigError | 437 | raise PRServiceConfigError |
