summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/__init__.py2
-rw-r--r--bitbake/lib/bb/cache.py4
-rw-r--r--bitbake/lib/bb/cooker.py14
-rw-r--r--bitbake/lib/bb/data_smart.py16
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py5
-rw-r--r--bitbake/lib/bb/fetch2/git.py48
-rw-r--r--bitbake/lib/bb/fetch2/wget.py2
-rw-r--r--bitbake/lib/bb/parse/ast.py2
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py2
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py2
-rw-r--r--bitbake/lib/bb/providers.py4
-rw-r--r--bitbake/lib/bb/runqueue.py11
-rw-r--r--bitbake/lib/bb/server/process.py2
-rw-r--r--bitbake/lib/bb/tests/codeparser.py4
-rw-r--r--bitbake/lib/bb/tests/fetch.py38
15 files changed, 116 insertions, 40 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 09e161fef1..c7bc372ec8 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -49,7 +49,7 @@ class BBLoggerMixin(object):
49 if not bb.event.worker_pid: 49 if not bb.event.worker_pid:
50 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): 50 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]):
51 return 51 return
52 if loglevel > bb.msg.loggerDefaultLogLevel: 52 if loglevel < bb.msg.loggerDefaultLogLevel:
53 return 53 return
54 return self.log(loglevel, msg, *args, **kwargs) 54 return self.log(loglevel, msg, *args, **kwargs)
55 55
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 9e0c931a07..029753fea0 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -26,7 +26,7 @@ import re
26 26
27logger = logging.getLogger("BitBake.Cache") 27logger = logging.getLogger("BitBake.Cache")
28 28
29__cache_version__ = "153" 29__cache_version__ = "154"
30 30
31def getCacheFile(path, filename, mc, data_hash): 31def getCacheFile(path, filename, mc, data_hash):
32 mcspec = '' 32 mcspec = ''
@@ -94,6 +94,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
94 if not self.packages: 94 if not self.packages:
95 self.packages.append(self.pn) 95 self.packages.append(self.pn)
96 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata) 96 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
97 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
97 98
98 self.skipreason = self.getvar('__SKIPPED', metadata) 99 self.skipreason = self.getvar('__SKIPPED', metadata)
99 if self.skipreason: 100 if self.skipreason:
@@ -120,7 +121,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
120 self.depends = self.depvar('DEPENDS', metadata) 121 self.depends = self.depvar('DEPENDS', metadata)
121 self.rdepends = self.depvar('RDEPENDS', metadata) 122 self.rdepends = self.depvar('RDEPENDS', metadata)
122 self.rrecommends = self.depvar('RRECOMMENDS', metadata) 123 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
123 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
124 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata) 124 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
125 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata) 125 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False) 126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 1f4cc1e96d..4b5ef07eaa 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -73,7 +73,9 @@ class SkippedPackage:
73 self.pn = info.pn 73 self.pn = info.pn
74 self.skipreason = info.skipreason 74 self.skipreason = info.skipreason
75 self.provides = info.provides 75 self.provides = info.provides
76 self.rprovides = info.rprovides 76 self.rprovides = info.packages + info.rprovides
77 for package in info.packages:
78 self.rprovides += info.rprovides_pkg[package]
77 elif reason: 79 elif reason:
78 self.skipreason = reason 80 self.skipreason = reason
79 81
@@ -2207,18 +2209,18 @@ class CookerParser(object):
2207 except bb.BBHandledException as exc: 2209 except bb.BBHandledException as exc:
2208 self.error += 1 2210 self.error += 1
2209 logger.error('Failed to parse recipe: %s' % exc.recipe) 2211 logger.error('Failed to parse recipe: %s' % exc.recipe)
2210 self.shutdown(clean=False) 2212 self.shutdown(clean=False, force=True)
2211 return False 2213 return False
2212 except ParsingFailure as exc: 2214 except ParsingFailure as exc:
2213 self.error += 1 2215 self.error += 1
2214 logger.error('Unable to parse %s: %s' % 2216 logger.error('Unable to parse %s: %s' %
2215 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2217 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2216 self.shutdown(clean=False) 2218 self.shutdown(clean=False, force=True)
2217 return False 2219 return False
2218 except bb.parse.ParseError as exc: 2220 except bb.parse.ParseError as exc:
2219 self.error += 1 2221 self.error += 1
2220 logger.error(str(exc)) 2222 logger.error(str(exc))
2221 self.shutdown(clean=False) 2223 self.shutdown(clean=False, force=True)
2222 return False 2224 return False
2223 except bb.data_smart.ExpansionError as exc: 2225 except bb.data_smart.ExpansionError as exc:
2224 self.error += 1 2226 self.error += 1
@@ -2227,7 +2229,7 @@ class CookerParser(object):
2227 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2229 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2228 logger.error('ExpansionError during parsing %s', value.recipe, 2230 logger.error('ExpansionError during parsing %s', value.recipe,
2229 exc_info=(etype, value, tb)) 2231 exc_info=(etype, value, tb))
2230 self.shutdown(clean=False) 2232 self.shutdown(clean=False, force=True)
2231 return False 2233 return False
2232 except Exception as exc: 2234 except Exception as exc:
2233 self.error += 1 2235 self.error += 1
@@ -2239,7 +2241,7 @@ class CookerParser(object):
2239 # Most likely, an exception occurred during raising an exception 2241 # Most likely, an exception occurred during raising an exception
2240 import traceback 2242 import traceback
2241 logger.error('Exception during parse: %s' % traceback.format_exc()) 2243 logger.error('Exception during parse: %s' % traceback.format_exc())
2242 self.shutdown(clean=False) 2244 self.shutdown(clean=False, force=True)
2243 return False 2245 return False
2244 2246
2245 self.current += 1 2247 self.current += 1
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index c559102cf5..b4ed62a4e5 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -28,7 +28,7 @@ logger = logging.getLogger("BitBake.Data")
28 28
29__setvar_keyword__ = ["_append", "_prepend", "_remove"] 29__setvar_keyword__ = ["_append", "_prepend", "_remove"]
30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$') 30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}") 31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
32__expand_python_regexp__ = re.compile(r"\${@.+?}") 32__expand_python_regexp__ = re.compile(r"\${@.+?}")
33__whitespace_split__ = re.compile(r'(\s)') 33__whitespace_split__ = re.compile(r'(\s)')
34__override_regexp__ = re.compile(r'[a-z0-9]+') 34__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -481,6 +481,7 @@ class DataSmart(MutableMapping):
481 481
482 def setVar(self, var, value, **loginfo): 482 def setVar(self, var, value, **loginfo):
483 #print("var=" + str(var) + " val=" + str(value)) 483 #print("var=" + str(var) + " val=" + str(value))
484 var = var.replace(":", "_")
484 self.expand_cache = {} 485 self.expand_cache = {}
485 parsing=False 486 parsing=False
486 if 'parsing' in loginfo: 487 if 'parsing' in loginfo:
@@ -589,6 +590,8 @@ class DataSmart(MutableMapping):
589 """ 590 """
590 Rename the variable key to newkey 591 Rename the variable key to newkey
591 """ 592 """
593 key = key.replace(":", "_")
594 newkey = newkey.replace(":", "_")
592 if key == newkey: 595 if key == newkey:
593 bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key) 596 bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key)
594 return 597 return
@@ -637,6 +640,7 @@ class DataSmart(MutableMapping):
637 self.setVar(var + "_prepend", value, ignore=True, parsing=True) 640 self.setVar(var + "_prepend", value, ignore=True, parsing=True)
638 641
639 def delVar(self, var, **loginfo): 642 def delVar(self, var, **loginfo):
643 var = var.replace(":", "_")
640 self.expand_cache = {} 644 self.expand_cache = {}
641 645
642 loginfo['detail'] = "" 646 loginfo['detail'] = ""
@@ -664,6 +668,7 @@ class DataSmart(MutableMapping):
664 override = None 668 override = None
665 669
666 def setVarFlag(self, var, flag, value, **loginfo): 670 def setVarFlag(self, var, flag, value, **loginfo):
671 var = var.replace(":", "_")
667 self.expand_cache = {} 672 self.expand_cache = {}
668 673
669 if 'op' not in loginfo: 674 if 'op' not in loginfo:
@@ -687,6 +692,7 @@ class DataSmart(MutableMapping):
687 self.dict["__exportlist"]["_content"].add(var) 692 self.dict["__exportlist"]["_content"].add(var)
688 693
689 def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False): 694 def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False):
695 var = var.replace(":", "_")
690 if flag == "_content": 696 if flag == "_content":
691 cachename = var 697 cachename = var
692 else: 698 else:
@@ -814,6 +820,7 @@ class DataSmart(MutableMapping):
814 return value 820 return value
815 821
816 def delVarFlag(self, var, flag, **loginfo): 822 def delVarFlag(self, var, flag, **loginfo):
823 var = var.replace(":", "_")
817 self.expand_cache = {} 824 self.expand_cache = {}
818 825
819 local_var, _ = self._findVar(var) 826 local_var, _ = self._findVar(var)
@@ -831,6 +838,7 @@ class DataSmart(MutableMapping):
831 del self.dict[var][flag] 838 del self.dict[var][flag]
832 839
833 def appendVarFlag(self, var, flag, value, **loginfo): 840 def appendVarFlag(self, var, flag, value, **loginfo):
841 var = var.replace(":", "_")
834 loginfo['op'] = 'append' 842 loginfo['op'] = 'append'
835 loginfo['flag'] = flag 843 loginfo['flag'] = flag
836 self.varhistory.record(**loginfo) 844 self.varhistory.record(**loginfo)
@@ -838,6 +846,7 @@ class DataSmart(MutableMapping):
838 self.setVarFlag(var, flag, newvalue, ignore=True) 846 self.setVarFlag(var, flag, newvalue, ignore=True)
839 847
840 def prependVarFlag(self, var, flag, value, **loginfo): 848 def prependVarFlag(self, var, flag, value, **loginfo):
849 var = var.replace(":", "_")
841 loginfo['op'] = 'prepend' 850 loginfo['op'] = 'prepend'
842 loginfo['flag'] = flag 851 loginfo['flag'] = flag
843 self.varhistory.record(**loginfo) 852 self.varhistory.record(**loginfo)
@@ -845,6 +854,7 @@ class DataSmart(MutableMapping):
845 self.setVarFlag(var, flag, newvalue, ignore=True) 854 self.setVarFlag(var, flag, newvalue, ignore=True)
846 855
847 def setVarFlags(self, var, flags, **loginfo): 856 def setVarFlags(self, var, flags, **loginfo):
857 var = var.replace(":", "_")
848 self.expand_cache = {} 858 self.expand_cache = {}
849 infer_caller_details(loginfo) 859 infer_caller_details(loginfo)
850 if not var in self.dict: 860 if not var in self.dict:
@@ -859,6 +869,7 @@ class DataSmart(MutableMapping):
859 self.dict[var][i] = flags[i] 869 self.dict[var][i] = flags[i]
860 870
861 def getVarFlags(self, var, expand = False, internalflags=False): 871 def getVarFlags(self, var, expand = False, internalflags=False):
872 var = var.replace(":", "_")
862 local_var, _ = self._findVar(var) 873 local_var, _ = self._findVar(var)
863 flags = {} 874 flags = {}
864 875
@@ -875,6 +886,7 @@ class DataSmart(MutableMapping):
875 886
876 887
877 def delVarFlags(self, var, **loginfo): 888 def delVarFlags(self, var, **loginfo):
889 var = var.replace(":", "_")
878 self.expand_cache = {} 890 self.expand_cache = {}
879 if not var in self.dict: 891 if not var in self.dict:
880 self._makeShadowCopy(var) 892 self._makeShadowCopy(var)
@@ -1005,7 +1017,7 @@ class DataSmart(MutableMapping):
1005 else: 1017 else:
1006 data.update({key:value}) 1018 data.update({key:value})
1007 1019
1008 varflags = d.getVarFlags(key, internalflags = True) 1020 varflags = d.getVarFlags(key, internalflags = True, expand=["vardepvalue"])
1009 if not varflags: 1021 if not varflags:
1010 continue 1022 continue
1011 for f in varflags: 1023 for f in varflags:
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 551bfb70f2..524165bd5f 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -853,11 +853,6 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
853 if val: 853 if val:
854 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) 854 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
855 855
856 # Ensure that a _PYTHON_SYSCONFIGDATA_NAME value set by a recipe
857 # (for example via python3native.bbclass since warrior) is not set for
858 # host Python (otherwise tools like git-make-shallow will fail)
859 cmd = 'unset _PYTHON_SYSCONFIGDATA_NAME; ' + cmd
860
861 # Disable pseudo as it may affect ssh, potentially causing it to hang. 856 # Disable pseudo as it may affect ssh, potentially causing it to hang.
862 cmd = 'export PSEUDO_DISABLED=1; ' + cmd 857 cmd = 'export PSEUDO_DISABLED=1; ' + cmd
863 858
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index b97967b487..f2cc02258e 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -141,6 +141,10 @@ class Git(FetchMethod):
141 ud.proto = 'file' 141 ud.proto = 'file'
142 else: 142 else:
143 ud.proto = "git" 143 ud.proto = "git"
144 if ud.host == "github.com" and ud.proto == "git":
145 # github stopped supporting git protocol
146 # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
147 ud.proto = "https"
144 148
145 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): 149 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
146 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) 150 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -379,6 +383,35 @@ class Git(FetchMethod):
379 if missing_rev: 383 if missing_rev:
380 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) 384 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
381 385
386 if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud):
387 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
388 # of all LFS blobs needed at the the srcrev.
389 #
390 # It would be nice to just do this inline here by running 'git-lfs fetch'
391 # on the bare clonedir, but that operation requires a working copy on some
392 # releases of Git LFS.
393 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
394 try:
395 # Do the checkout. This implicitly involves a Git LFS fetch.
396 Git.unpack(self, ud, tmpdir, d)
397
398 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
399 # the bare clonedir.
400 #
401 # As this procedure is invoked repeatedly on incremental fetches as
402 # a recipe's SRCREV is bumped throughout its lifetime, this will
403 # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs
404 # corresponding to all the blobs reachable from the different revs
405 # fetched across time.
406 #
407 # Only do this if the unpack resulted in a .git/lfs directory being
408 # created; this only happens if at least one blob needed to be
409 # downloaded.
410 if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
411 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
412 finally:
413 bb.utils.remove(tmpdir, recurse=True)
414
382 def build_mirror_data(self, ud, d): 415 def build_mirror_data(self, ud, d):
383 if ud.shallow and ud.write_shallow_tarballs: 416 if ud.shallow and ud.write_shallow_tarballs:
384 if not os.path.exists(ud.fullshallow): 417 if not os.path.exists(ud.fullshallow):
@@ -474,7 +507,7 @@ class Git(FetchMethod):
474 if os.path.exists(destdir): 507 if os.path.exists(destdir):
475 bb.utils.prunedir(destdir) 508 bb.utils.prunedir(destdir)
476 509
477 need_lfs = ud.parm.get("lfs", "1") == "1" 510 need_lfs = self._need_lfs(ud)
478 511
479 if not need_lfs: 512 if not need_lfs:
480 ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd 513 ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
@@ -563,6 +596,9 @@ class Git(FetchMethod):
563 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) 596 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
564 return output.split()[0] != "0" 597 return output.split()[0] != "0"
565 598
599 def _need_lfs(self, ud):
600 return ud.parm.get("lfs", "1") == "1"
601
566 def _contains_lfs(self, ud, d, wd): 602 def _contains_lfs(self, ud, d, wd):
567 """ 603 """
568 Check if the repository has 'lfs' (large file) content 604 Check if the repository has 'lfs' (large file) content
@@ -573,8 +609,14 @@ class Git(FetchMethod):
573 else: 609 else:
574 branchname = "master" 610 branchname = "master"
575 611
576 cmd = "%s grep lfs origin/%s:.gitattributes | wc -l" % ( 612 # The bare clonedir doesn't use the remote names; it has the branch immediately.
577 ud.basecmd, ud.branches[ud.names[0]]) 613 if wd == ud.clonedir:
614 refname = ud.branches[ud.names[0]]
615 else:
616 refname = "origin/%s" % ud.branches[ud.names[0]]
617
618 cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
619 ud.basecmd, refname)
578 620
579 try: 621 try:
580 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 622 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index e6d9f528d0..83acd85bae 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -316,7 +316,7 @@ class Wget(FetchMethod):
316 except (TypeError, ImportError, IOError, netrc.NetrcParseError): 316 except (TypeError, ImportError, IOError, netrc.NetrcParseError):
317 pass 317 pass
318 318
319 with opener.open(r) as response: 319 with opener.open(r, timeout=30) as response:
320 pass 320 pass
321 except urllib.error.URLError as e: 321 except urllib.error.URLError as e:
322 if try_again: 322 if try_again:
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 0714296af2..c8802c0587 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -97,6 +97,7 @@ class DataNode(AstNode):
97 def eval(self, data): 97 def eval(self, data):
98 groupd = self.groupd 98 groupd = self.groupd
99 key = groupd["var"] 99 key = groupd["var"]
100 key = key.replace(":", "_")
100 loginfo = { 101 loginfo = {
101 'variable': key, 102 'variable': key,
102 'file': self.filename, 103 'file': self.filename,
@@ -207,6 +208,7 @@ class ExportFuncsNode(AstNode):
207 def eval(self, data): 208 def eval(self, data):
208 209
209 for func in self.n: 210 for func in self.n:
211 func = func.replace(":", "_")
210 calledfunc = self.classname + "_" + func 212 calledfunc = self.classname + "_" + func
211 213
212 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False): 214 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 215f940b60..12a78b6502 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -22,7 +22,7 @@ from .ConfHandler import include, init
22# For compatibility 22# For compatibility
23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"]) 23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
24 24
25__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) 25__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index af64d3446e..a7e81bd6ad 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,7 +20,7 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
25 25
26 \s* ( 26 \s* (
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index 81459c36d5..484e1ea4f3 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -151,7 +151,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
151 if item: 151 if item:
152 itemstr = " (for item %s)" % item 152 itemstr = " (for item %s)" % item
153 if preferred_file is None: 153 if preferred_file is None:
154 logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr) 154 logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
155 available_vers = [] 155 available_vers = []
156 for file_set in pkg_pn: 156 for file_set in pkg_pn:
157 for f in file_set: 157 for f in file_set:
@@ -163,7 +163,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
163 available_vers.append(ver_str) 163 available_vers.append(ver_str)
164 if available_vers: 164 if available_vers:
165 available_vers.sort() 165 available_vers.sort()
166 logger.info("versions of %s available: %s", pn, ' '.join(available_vers)) 166 logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
167 else: 167 else:
168 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) 168 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
169 169
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 28bdadb45e..aa1d6b2711 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -1942,6 +1942,10 @@ class RunQueueExecute:
1942 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) 1942 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks))
1943 err = True 1943 err = True
1944 1944
1945 for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered):
1946 # No task should end up in both covered and uncovered, that is a bug.
1947 logger.error("Setscene task %s in both covered and notcovered." % tid)
1948
1945 for tid in self.rqdata.runq_setscene_tids: 1949 for tid in self.rqdata.runq_setscene_tids:
1946 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: 1950 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered:
1947 err = True 1951 err = True
@@ -2430,6 +2434,9 @@ class RunQueueExecute:
2430 2434
2431 for dep in sorted(self.sqdata.sq_deps[task]): 2435 for dep in sorted(self.sqdata.sq_deps[task]):
2432 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: 2436 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
2437 if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
2438 # dependency could be already processed, e.g. noexec setscene task
2439 continue
2433 logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2440 logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2434 self.sq_task_failoutright(dep) 2441 self.sq_task_failoutright(dep)
2435 continue 2442 continue
@@ -2791,6 +2798,7 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2791 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) 2798 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
2792 2799
2793 sqdata.hashes = {} 2800 sqdata.hashes = {}
2801 sqrq.sq_deferred = {}
2794 for mc in sorted(sqdata.multiconfigs): 2802 for mc in sorted(sqdata.multiconfigs):
2795 for tid in sorted(sqdata.sq_revdeps): 2803 for tid in sorted(sqdata.sq_revdeps):
2796 if mc_from_tid(tid) != mc: 2804 if mc_from_tid(tid) != mc:
@@ -2803,6 +2811,9 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2803 continue 2811 continue
2804 if tid in sqrq.scenequeue_notcovered: 2812 if tid in sqrq.scenequeue_notcovered:
2805 continue 2813 continue
2814 if tid in sqrq.scenequeue_covered:
2815 continue
2816
2806 sqdata.outrightfail.add(tid) 2817 sqdata.outrightfail.add(tid)
2807 2818
2808 h = pending_hash_index(tid, rqdata) 2819 h = pending_hash_index(tid, rqdata)
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index b27b4aefe0..3e99bcef8f 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -509,7 +509,7 @@ class BitBakeServer(object):
509 os.set_inheritable(self.bitbake_lock.fileno(), True) 509 os.set_inheritable(self.bitbake_lock.fileno(), True)
510 os.set_inheritable(self.readypipein, True) 510 os.set_inheritable(self.readypipein, True)
511 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") 511 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
512 os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) 512 os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
513 513
514def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface): 514def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface):
515 515
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index 826a2d2f6d..f485204791 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -111,9 +111,9 @@ ${D}${libdir}/pkgconfig/*.pc
111 self.assertExecs(set(["sed"])) 111 self.assertExecs(set(["sed"]))
112 112
113 def test_parameter_expansion_modifiers(self): 113 def test_parameter_expansion_modifiers(self):
114 # - and + are also valid modifiers for parameter expansion, but are 114 # -,+ and : are also valid modifiers for parameter expansion, but are
115 # valid characters in bitbake variable names, so are not included here 115 # valid characters in bitbake variable names, so are not included here
116 for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'): 116 for i in ('=', '?', '#', '%', '##', '%%'):
117 name = "foo%sbar" % i 117 name = "foo%sbar" % i
118 self.parseExpression("${%s}" % name) 118 self.parseExpression("${%s}" % name)
119 self.assertNotIn(name, self.references) 119 self.assertNotIn(name, self.references)
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index da17d7f281..4671532f2b 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -939,7 +939,7 @@ class FetcherNetworkTest(FetcherTest):
939 939
940 @skipIfNoNetwork() 940 @skipIfNoNetwork()
941 def test_git_submodule_CLI11(self): 941 def test_git_submodule_CLI11(self):
942 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" 942 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
943 fetcher = bb.fetch.Fetch([url], self.d) 943 fetcher = bb.fetch.Fetch([url], self.d)
944 fetcher.download() 944 fetcher.download()
945 # Previous cwd has been deleted 945 # Previous cwd has been deleted
@@ -954,12 +954,12 @@ class FetcherNetworkTest(FetcherTest):
954 @skipIfNoNetwork() 954 @skipIfNoNetwork()
955 def test_git_submodule_update_CLI11(self): 955 def test_git_submodule_update_CLI11(self):
956 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ 956 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
957 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" 957 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
958 fetcher = bb.fetch.Fetch([url], self.d) 958 fetcher = bb.fetch.Fetch([url], self.d)
959 fetcher.download() 959 fetcher.download()
960 960
961 # CLI11 that pulls in a newer nlohmann-json 961 # CLI11 that pulls in a newer nlohmann-json
962 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" 962 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
963 fetcher = bb.fetch.Fetch([url], self.d) 963 fetcher = bb.fetch.Fetch([url], self.d)
964 fetcher.download() 964 fetcher.download()
965 # Previous cwd has been deleted 965 # Previous cwd has been deleted
@@ -993,7 +993,7 @@ class FetcherNetworkTest(FetcherTest):
993 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ 993 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
994 994
995 # This repository also has submodules where the module (name), path and url do not align 995 # This repository also has submodules where the module (name), path and url do not align
996 url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699" 996 url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
997 fetcher = bb.fetch.Fetch([url], self.d) 997 fetcher = bb.fetch.Fetch([url], self.d)
998 fetcher.download() 998 fetcher.download()
999 # Previous cwd has been deleted 999 # Previous cwd has been deleted
@@ -1180,7 +1180,7 @@ class FetchLatestVersionTest(FetcherTest):
1180 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "") 1180 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
1181 : "1.0", 1181 : "1.0",
1182 # version pattern "pkg_name-vX.Y.Z" 1182 # version pattern "pkg_name-vX.Y.Z"
1183 ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "") 1183 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
1184 : "1.4.0", 1184 : "1.4.0",
1185 # combination version pattern 1185 # combination version pattern
1186 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "") 1186 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
@@ -2051,13 +2051,14 @@ class GitLfsTest(FetcherTest):
2051 cwd = self.gitdir 2051 cwd = self.gitdir
2052 return bb.process.run(cmd, cwd=cwd)[0] 2052 return bb.process.run(cmd, cwd=cwd)[0]
2053 2053
2054 def fetch(self, uri=None): 2054 def fetch(self, uri=None, download=True):
2055 uris = self.d.getVar('SRC_URI').split() 2055 uris = self.d.getVar('SRC_URI').split()
2056 uri = uris[0] 2056 uri = uris[0]
2057 d = self.d 2057 d = self.d
2058 2058
2059 fetcher = bb.fetch2.Fetch(uris, d) 2059 fetcher = bb.fetch2.Fetch(uris, d)
2060 fetcher.download() 2060 if download:
2061 fetcher.download()
2061 ud = fetcher.ud[uri] 2062 ud = fetcher.ud[uri]
2062 return fetcher, ud 2063 return fetcher, ud
2063 2064
@@ -2067,16 +2068,21 @@ class GitLfsTest(FetcherTest):
2067 uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir 2068 uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir
2068 self.d.setVar('SRC_URI', uri) 2069 self.d.setVar('SRC_URI', uri)
2069 2070
2070 fetcher, ud = self.fetch() 2071 # Careful: suppress initial attempt at downloading until
2072 # we know whether git-lfs is installed.
2073 fetcher, ud = self.fetch(uri=None, download=False)
2071 self.assertIsNotNone(ud.method._find_git_lfs) 2074 self.assertIsNotNone(ud.method._find_git_lfs)
2072 2075
2073 # If git-lfs can be found, the unpack should be successful 2076 # If git-lfs can be found, the unpack should be successful. Only
2074 ud.method._find_git_lfs = lambda d: True 2077 # attempt this with the real live copy of git-lfs installed.
2075 shutil.rmtree(self.gitdir, ignore_errors=True) 2078 if ud.method._find_git_lfs(self.d):
2076 fetcher.unpack(self.d.getVar('WORKDIR')) 2079 fetcher.download()
2080 shutil.rmtree(self.gitdir, ignore_errors=True)
2081 fetcher.unpack(self.d.getVar('WORKDIR'))
2077 2082
2078 # If git-lfs cannot be found, the unpack should throw an error 2083 # If git-lfs cannot be found, the unpack should throw an error
2079 with self.assertRaises(bb.fetch2.FetchError): 2084 with self.assertRaises(bb.fetch2.FetchError):
2085 fetcher.download()
2080 ud.method._find_git_lfs = lambda d: False 2086 ud.method._find_git_lfs = lambda d: False
2081 shutil.rmtree(self.gitdir, ignore_errors=True) 2087 shutil.rmtree(self.gitdir, ignore_errors=True)
2082 fetcher.unpack(self.d.getVar('WORKDIR')) 2088 fetcher.unpack(self.d.getVar('WORKDIR'))
@@ -2087,10 +2093,16 @@ class GitLfsTest(FetcherTest):
2087 uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir 2093 uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir
2088 self.d.setVar('SRC_URI', uri) 2094 self.d.setVar('SRC_URI', uri)
2089 2095
2096 # In contrast to test_lfs_enabled(), allow the implicit download
2097 # done by self.fetch() to occur here. The point of this test case
2098 # is to verify that the fetcher can survive even if the source
2099 # repository has Git LFS usage configured.
2090 fetcher, ud = self.fetch() 2100 fetcher, ud = self.fetch()
2091 self.assertIsNotNone(ud.method._find_git_lfs) 2101 self.assertIsNotNone(ud.method._find_git_lfs)
2092 2102
2093 # If git-lfs can be found, the unpack should be successful 2103 # If git-lfs can be found, the unpack should be successful. A
2104 # live copy of git-lfs is not required for this case, so
2105 # unconditionally forge its presence.
2094 ud.method._find_git_lfs = lambda d: True 2106 ud.method._find_git_lfs = lambda d: True
2095 shutil.rmtree(self.gitdir, ignore_errors=True) 2107 shutil.rmtree(self.gitdir, ignore_errors=True)
2096 fetcher.unpack(self.d.getVar('WORKDIR')) 2108 fetcher.unpack(self.d.getVar('WORKDIR'))