summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorPatrick Ohly <patrick.ohly@intel.com>2016-11-18 16:23:22 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2017-07-21 22:36:09 +0100
commit7d5822bf4cb2089687c3c9a908cf4a4ef4e9a53a (patch)
tree9da07e0a22c6d7d1258d6c74120cd722f21b48c4 /bitbake
parent2e132efa2f985f72c3b6c5402747d0b0e1fc540a (diff)
downloadpoky-7d5822bf4cb2089687c3c9a908cf4a4ef4e9a53a.tar.gz
bitbake: codeparser.py: support deeply nested tokens
For shell constructs like echo hello & wait $! the process_tokens() method ended up with a situation where "token" in the "name, value = token" assignment was a list of tuples and not the expected tuple, causing the assignment to fail. There were already two for loops (one in _parse_shell(), one in process_tokens()) which iterated over token lists. Apparently the actual nesting can also be deeper. Now there is just one such loop in process_token_list() which calls itself recursively when it detects that a list entry is another list. As a side effect (improvement?!) of the loop removal in _parse_shell(), the local function definitions in process_tokens() get executed less often. Fixes: [YOCTO #10668] (Bitbake rev: 887ea6d25cee5114365dfbf1130603599e13ee80) Signed-off-by: Patrick Ohly <patrick.ohly@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/codeparser.py29
1 files changed, 17 insertions, 12 deletions
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 25938d6586..5d2d44065a 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -342,8 +342,7 @@ class ShellParser():
342 except pyshlex.NeedMore: 342 except pyshlex.NeedMore:
343 raise sherrors.ShellSyntaxError("Unexpected EOF") 343 raise sherrors.ShellSyntaxError("Unexpected EOF")
344 344
345 for token in tokens: 345 self.process_tokens(tokens)
346 self.process_tokens(token)
347 346
348 def process_tokens(self, tokens): 347 def process_tokens(self, tokens):
349 """Process a supplied portion of the syntax tree as returned by 348 """Process a supplied portion of the syntax tree as returned by
@@ -389,18 +388,24 @@ class ShellParser():
389 "case_clause": case_clause, 388 "case_clause": case_clause,
390 } 389 }
391 390
392 for token in tokens: 391 def process_token_list(tokens):
393 name, value = token 392 for token in tokens:
394 try: 393 if isinstance(token, list):
395 more_tokens, words = token_handlers[name](value) 394 process_token_list(token)
396 except KeyError: 395 continue
397 raise NotImplementedError("Unsupported token type " + name) 396 name, value = token
397 try:
398 more_tokens, words = token_handlers[name](value)
399 except KeyError:
400 raise NotImplementedError("Unsupported token type " + name)
401
402 if more_tokens:
403 self.process_tokens(more_tokens)
398 404
399 if more_tokens: 405 if words:
400 self.process_tokens(more_tokens) 406 self.process_words(words)
401 407
402 if words: 408 process_token_list(tokens)
403 self.process_words(words)
404 409
405 def process_words(self, words): 410 def process_words(self, words):
406 """Process a set of 'words' in pyshyacc parlance, which includes 411 """Process a set of 'words' in pyshyacc parlance, which includes