summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorPatrick Ohly <patrick.ohly@intel.com>2016-11-18 16:23:22 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-11-30 15:48:10 +0000
commitcaf1a69577b10bbb0e7914964e2ef4bb69c18def (patch)
tree52e4cef4e7258a4322f6ad91775643dc4cb5f077 /bitbake
parent38438b6cf42fb7ad45b9a901f57913af7e7591a3 (diff)
downloadpoky-caf1a69577b10bbb0e7914964e2ef4bb69c18def.tar.gz
bitbake: codeparser.py: support deeply nested tokens
For shell constructs like echo hello & wait $! the process_tokens() method ended up with a situation where "token" in the "name, value = token" assignment was a list of tuples and not the expected tuple, causing the assignment to fail. There were already two for loops (one in _parse_shell(), one in process_tokens()) which iterated over token lists. Apparently the actual nesting can also be deeper. Now there is just one such loop in process_token_list() which calls itself recursively when it detects that a list entry is another list. As a side effect (improvement?!) of the loop removal in _parse_shell(), the local function definitions in process_tokens() get executed less often. Fixes: [YOCTO #10668] (Bitbake rev: d18a74de9ac75ba32f84c40620ca9d47c1ef96a3) Signed-off-by: Patrick Ohly <patrick.ohly@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/codeparser.py29
1 files changed, 17 insertions, 12 deletions
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 25938d6586..5d2d44065a 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -342,8 +342,7 @@ class ShellParser():
342 except pyshlex.NeedMore: 342 except pyshlex.NeedMore:
343 raise sherrors.ShellSyntaxError("Unexpected EOF") 343 raise sherrors.ShellSyntaxError("Unexpected EOF")
344 344
345 for token in tokens: 345 self.process_tokens(tokens)
346 self.process_tokens(token)
347 346
348 def process_tokens(self, tokens): 347 def process_tokens(self, tokens):
349 """Process a supplied portion of the syntax tree as returned by 348 """Process a supplied portion of the syntax tree as returned by
@@ -389,18 +388,24 @@ class ShellParser():
389 "case_clause": case_clause, 388 "case_clause": case_clause,
390 } 389 }
391 390
392 for token in tokens: 391 def process_token_list(tokens):
393 name, value = token 392 for token in tokens:
394 try: 393 if isinstance(token, list):
395 more_tokens, words = token_handlers[name](value) 394 process_token_list(token)
396 except KeyError: 395 continue
397 raise NotImplementedError("Unsupported token type " + name) 396 name, value = token
397 try:
398 more_tokens, words = token_handlers[name](value)
399 except KeyError:
400 raise NotImplementedError("Unsupported token type " + name)
401
402 if more_tokens:
403 self.process_tokens(more_tokens)
398 404
399 if more_tokens: 405 if words:
400 self.process_tokens(more_tokens) 406 self.process_words(words)
401 407
402 if words: 408 process_token_list(tokens)
403 self.process_words(words)
404 409
405 def process_words(self, words): 410 def process_words(self, words):
406 """Process a set of 'words' in pyshyacc parlance, which includes 411 """Process a set of 'words' in pyshyacc parlance, which includes