summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRichard Purdie <rpurdie@linux.intel.com>2011-01-01 23:55:54 +0000
committerRichard Purdie <rpurdie@linux.intel.com>2011-01-04 14:46:54 +0000
commit0090a798eb868ebc926944eac2e6d4a5aff3e1b3 (patch)
tree9b90e66234e6ad7b9616c8c5944029426746110f
parente8c48e668c7525257926ab7db9b6e44aa2705483 (diff)
downloadpoky-0090a798eb868ebc926944eac2e6d4a5aff3e1b3.tar.gz
bitbake: Sync a load of whitespace and other non-functionality changes with bitbake uptream
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
-rw-r--r--bitbake/lib/bb/cache.py9
-rw-r--r--bitbake/lib/bb/codeparser.py40
-rw-r--r--bitbake/lib/bb/cooker.py11
-rw-r--r--bitbake/lib/bb/data.py4
-rw-r--r--bitbake/lib/bb/data_smart.py15
-rw-r--r--bitbake/lib/bb/event.py2
-rw-r--r--bitbake/lib/bb/msg.py3
-rw-r--r--bitbake/lib/bb/parse/__init__.py7
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py1
-rw-r--r--bitbake/lib/bb/runqueue.py25
-rw-r--r--bitbake/lib/bb/siggen.py39
-rw-r--r--bitbake/lib/bb/utils.py16
12 files changed, 87 insertions, 85 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index fb02deb8ef..2f89350763 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -43,7 +43,7 @@ except ImportError:
43 logger.info("Importing cPickle failed. " 43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.") 44 "Falling back to a very slow implementation.")
45 45
46__cache_version__ = "133" 46__cache_version__ = "134"
47 47
48recipe_fields = ( 48recipe_fields = (
49 'pn', 49 'pn',
@@ -100,19 +100,20 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
100 def taskvar(cls, var, tasks, metadata): 100 def taskvar(cls, var, tasks, metadata):
101 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) 101 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
102 for task in tasks) 102 for task in tasks)
103
103 @classmethod 104 @classmethod
104 def getvar(cls, var, metadata): 105 def getvar(cls, var, metadata):
105 return metadata.getVar(var, True) or '' 106 return metadata.getVar(var, True) or ''
106 107
107 @classmethod 108 @classmethod
108 def from_metadata(cls, filename, metadata): 109 def from_metadata(cls, filename, metadata):
110 tasks = metadata.getVar('__BBTASKS', False)
111
109 pn = cls.getvar('PN', metadata) 112 pn = cls.getvar('PN', metadata)
110 packages = cls.listvar('PACKAGES', metadata) 113 packages = cls.listvar('PACKAGES', metadata)
111 if not pn in packages: 114 if not pn in packages:
112 packages.append(pn) 115 packages.append(pn)
113 116
114 tasks = metadata.getVar('__BBTASKS', False)
115
116 return RecipeInfo( 117 return RecipeInfo(
117 tasks = tasks, 118 tasks = tasks,
118 basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata), 119 basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
@@ -463,6 +464,7 @@ class Cache(object):
463 """ 464 """
464 Save data we need into the cache 465 Save data we need into the cache
465 """ 466 """
467
466 realfn = self.virtualfn2realfn(file_name)[0] 468 realfn = self.virtualfn2realfn(file_name)[0]
467 info = RecipeInfo.from_metadata(realfn, data) 469 info = RecipeInfo.from_metadata(realfn, data)
468 self.add_info(file_name, info, cacheData, parsed) 470 self.add_info(file_name, info, cacheData, parsed)
@@ -612,7 +614,6 @@ class CacheData(object):
612 self.possible_world.append(fn) 614 self.possible_world.append(fn)
613 615
614 self.hashfn[fn] = info.hashfilename 616 self.hashfn[fn] = info.hashfilename
615
616 for task, taskhash in info.basetaskhashes.iteritems(): 617 for task, taskhash in info.basetaskhashes.iteritems():
617 identifier = '%s.%s' % (fn, task) 618 identifier = '%s.%s' % (fn, task)
618 self.basetaskhash[identifier] = taskhash 619 self.basetaskhash[identifier] = taskhash
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 8b7db934d3..1d3557cd6d 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -1,16 +1,20 @@
1from bb.pysh import pyshyacc, pyshlex
2from itertools import chain
3from bb import msg, utils
4import ast 1import ast
5import codegen 2import codegen
3import logging
4import os.path
5import bb.utils, bb.data
6from itertools import chain
7from bb.pysh import pyshyacc, pyshlex
6 8
9logger = logging.getLogger('BitBake.CodeParser')
7PARSERCACHE_VERSION = 2 10PARSERCACHE_VERSION = 2
8 11
9try: 12try:
10 import cPickle as pickle 13 import cPickle as pickle
11except ImportError: 14except ImportError:
12 import pickle 15 import pickle
13 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") 16 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
17
14 18
15def check_indent(codestr): 19def check_indent(codestr):
16 """If the code is indented, add a top level piece of code to 'remove' the indentation""" 20 """If the code is indented, add a top level piece of code to 'remove' the indentation"""
@@ -23,7 +27,7 @@ def check_indent(codestr):
23 return codestr 27 return codestr
24 28
25 if codestr[i-1] is " " or codestr[i-1] is " ": 29 if codestr[i-1] is " " or codestr[i-1] is " ":
26 return "if 1:\n" + codestr 30 return "if 1:\n" + codestr
27 31
28 return codestr 32 return codestr
29 33
@@ -31,15 +35,18 @@ pythonparsecache = {}
31shellparsecache = {} 35shellparsecache = {}
32 36
33def parser_cachefile(d): 37def parser_cachefile(d):
34 cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) 38 cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
39 bb.data.getVar("CACHE", d, True))
35 if cachedir in [None, '']: 40 if cachedir in [None, '']:
36 return None 41 return None
37 bb.utils.mkdirhier(cachedir) 42 bb.utils.mkdirhier(cachedir)
38 cachefile = os.path.join(cachedir, "bb_codeparser.dat") 43 cachefile = os.path.join(cachedir, "bb_codeparser.dat")
39 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s' for codeparser cache" % cachefile) 44 logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
40 return cachefile 45 return cachefile
41 46
42def parser_cache_init(d): 47def parser_cache_init(d):
48 global pythonparsecache
49 global shellparsecache
43 50
44 cachefile = parser_cachefile(d) 51 cachefile = parser_cachefile(d)
45 if not cachefile: 52 if not cachefile:
@@ -54,17 +61,16 @@ def parser_cache_init(d):
54 if version != PARSERCACHE_VERSION: 61 if version != PARSERCACHE_VERSION:
55 return 62 return
56 63
57 bb.codeparser.pythonparsecache = data[0] 64 pythonparsecache = data[0]
58 bb.codeparser.shellparsecache = data[1] 65 shellparsecache = data[1]
59 66
60def parser_cache_save(d): 67def parser_cache_save(d):
61
62 cachefile = parser_cachefile(d) 68 cachefile = parser_cachefile(d)
63 if not cachefile: 69 if not cachefile:
64 return 70 return
65 71
66 p = pickle.Pickler(file(cachefile, "wb"), -1) 72 p = pickle.Pickler(file(cachefile, "wb"), -1)
67 p.dump([[bb.codeparser.pythonparsecache, bb.codeparser.shellparsecache], PARSERCACHE_VERSION]) 73 p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION])
68 74
69class PythonParser(): 75class PythonParser():
70 class ValueVisitor(): 76 class ValueVisitor():
@@ -129,10 +135,10 @@ class PythonParser():
129 funcstr = codegen.to_source(func) 135 funcstr = codegen.to_source(func)
130 argstr = codegen.to_source(arg) 136 argstr = codegen.to_source(arg)
131 except TypeError: 137 except TypeError:
132 msg.debug(2, None, "Failed to convert function and argument to source form") 138 logger.debug(2, 'Failed to convert function and argument to source form')
133 else: 139 else:
134 msg.debug(1, None, "Warning: in call to '%s', argument '%s' is not a literal" % 140 logger.debug(1, "Warning: in call to '%s', argumen t'%s' is"
135 (funcstr, argstr)) 141 "not a literal", funcstr, argstr)
136 142
137 def visit_Call(self, node): 143 def visit_Call(self, node):
138 if self.compare_name(self.getvars, node.func): 144 if self.compare_name(self.getvars, node.func):
@@ -184,7 +190,7 @@ class PythonParser():
184 self.execs = pythonparsecache[h]["execs"] 190 self.execs = pythonparsecache[h]["execs"]
185 return 191 return
186 192
187 code = compile(check_indent(str(node)), "<string>", "exec", 193 code = compile(check_indent(str(node)), "<string>", "exec",
188 ast.PyCF_ONLY_AST) 194 ast.PyCF_ONLY_AST)
189 195
190 visitor = self.ValueVisitor(code) 196 visitor = self.ValueVisitor(code)
@@ -319,11 +325,11 @@ class ShellParser():
319 325
320 cmd = word[1] 326 cmd = word[1]
321 if cmd.startswith("$"): 327 if cmd.startswith("$"):
322 msg.debug(1, None, "Warning: execution of non-literal command '%s'" % cmd) 328 logger.debug(1, "Warning: execution of non-literal"
329 "command '%s'", cmd)
323 elif cmd == "eval": 330 elif cmd == "eval":
324 command = " ".join(word for _, word in words[1:]) 331 command = " ".join(word for _, word in words[1:])
325 self.parse_shell(command) 332 self.parse_shell(command)
326 else: 333 else:
327 self.allexecs.add(cmd) 334 self.allexecs.add(cmd)
328 break 335 break
329
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 9c48194a61..23fd72f432 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -1,3 +1,4 @@
1#!/usr/bin/env python
1# ex:ts=4:sw=4:sts=4:et 2# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3# 4#
@@ -483,10 +484,9 @@ class BBCooker:
483 except (IOError, bb.parse.ParseError) as exc: 484 except (IOError, bb.parse.ParseError) as exc:
484 parselog.critical("Unable to parse %s: %s" % (f, exc)) 485 parselog.critical("Unable to parse %s: %s" % (f, exc))
485 sys.exit(1) 486 sys.exit(1)
486
487 data = self.configuration.data
488 487
489 bb.parse.init_parser(data, self.configuration.dump_signatures) 488 data = self.configuration.data
489 bb.parse.init_parser(data)
490 for f in files: 490 for f in files:
491 data = _parse(f, data) 491 data = _parse(f, data)
492 492
@@ -526,9 +526,7 @@ class BBCooker:
526 if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None: 526 if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
527 bb.fetch.fetcher_init(self.configuration.data) 527 bb.fetch.fetcher_init(self.configuration.data)
528 bb.codeparser.parser_cache_init(self.configuration.data) 528 bb.codeparser.parser_cache_init(self.configuration.data)
529 529 bb.parse.init_parser(data)
530 bb.parse.init_parser(data, self.configuration.dump_signatures)
531
532 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data) 530 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
533 531
534 def handleCollections( self, collections ): 532 def handleCollections( self, collections ):
@@ -1043,7 +1041,6 @@ class CookerParser(object):
1043 self.shutdown(clean=False) 1041 self.shutdown(clean=False)
1044 bb.fatal('Error parsing %s: %s' % (exc.recipe, exc)) 1042 bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
1045 1043
1046
1047 self.current += 1 1044 self.current += 1
1048 self.virtuals += len(result) 1045 self.virtuals += len(result)
1049 if parsed: 1046 if parsed:
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 0aa8b404cb..b3abf318af 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -259,7 +259,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
259 for key in keys: 259 for key in keys:
260 emit_var(key, o, d, False) and o.write('\n') 260 emit_var(key, o, d, False) and o.write('\n')
261 261
262 emit_var(func, o, d, False) and o.write('\n') 262 emit_var(func, o, d, False) and o.write('\n')
263 newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True)) 263 newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True))
264 seen = set() 264 seen = set()
265 while newdeps: 265 while newdeps:
@@ -299,7 +299,7 @@ def build_dependencies(key, keys, shelldeps, d):
299 deps |= set((d.getVarFlag(key, "vardeps", True) or "").split()) 299 deps |= set((d.getVarFlag(key, "vardeps", True) or "").split())
300 deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split()) 300 deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
301 except: 301 except:
302 bb.note("Error expanding variable %s" % key) 302 bb.note("Error expanding variable %s" % key)
303 raise 303 raise
304 return deps 304 return deps
305 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) 305 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index ca72449b75..83e6f70cd7 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -28,10 +28,10 @@ BitBake build tools.
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29# Based on functions from the base bb module, Copyright 2003 Holger Schurig 29# Based on functions from the base bb module, Copyright 2003 Holger Schurig
30 30
31import copy, re, sys 31import copy, re
32from collections import MutableMapping 32from collections import MutableMapping
33import logging 33import logging
34import bb 34import bb, bb.codeparser
35from bb import utils 35from bb import utils
36from bb.COW import COWDictBase 36from bb.COW import COWDictBase
37 37
@@ -42,6 +42,7 @@ __setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<
42__expand_var_regexp__ = re.compile(r"\${[^{}]+}") 42__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
43__expand_python_regexp__ = re.compile(r"\${@.+?}") 43__expand_python_regexp__ = re.compile(r"\${@.+?}")
44 44
45
45class VariableParse: 46class VariableParse:
46 def __init__(self, varname, d, val = None): 47 def __init__(self, varname, d, val = None):
47 self.varname = varname 48 self.varname = varname
@@ -72,11 +73,11 @@ class VariableParse:
72 self.references |= parser.references 73 self.references |= parser.references
73 self.execs |= parser.execs 74 self.execs |= parser.execs
74 75
75 value = utils.better_eval(codeobj, DataDict(self.d)) 76 value = utils.better_eval(codeobj, DataContext(self.d))
76 return str(value) 77 return str(value)
77 78
78 79
79class DataDict(dict): 80class DataContext(dict):
80 def __init__(self, metadata, **kwargs): 81 def __init__(self, metadata, **kwargs):
81 self.metadata = metadata 82 self.metadata = metadata
82 dict.__init__(self, **kwargs) 83 dict.__init__(self, **kwargs)
@@ -129,7 +130,7 @@ class DataSmart(MutableMapping):
129 130
130 def expand(self, s, varname): 131 def expand(self, s, varname):
131 return self.expandWithRefs(s, varname).value 132 return self.expandWithRefs(s, varname).value
132 133
133 134
134 def finalize(self): 135 def finalize(self):
135 """Performs final steps upon the datastore, including application of overrides""" 136 """Performs final steps upon the datastore, including application of overrides"""
@@ -291,7 +292,7 @@ class DataSmart(MutableMapping):
291 self._makeShadowCopy(var) 292 self._makeShadowCopy(var)
292 self.dict[var][flag] = flagvalue 293 self.dict[var][flag] = flagvalue
293 294
294 def getVarFlag(self, var, flag, expand = False): 295 def getVarFlag(self, var, flag, expand=False):
295 local_var = self._findVar(var) 296 local_var = self._findVar(var)
296 value = None 297 value = None
297 if local_var: 298 if local_var:
@@ -374,7 +375,7 @@ class DataSmart(MutableMapping):
374 value = self.getVar(variable, False) 375 value = self.getVar(variable, False)
375 for key in keys: 376 for key in keys:
376 referrervalue = self.getVar(key, False) 377 referrervalue = self.getVar(key, False)
377 if ref in referrervalue: 378 if referrervalue and ref in referrervalue:
378 self.setVar(key, referrervalue.replace(ref, value)) 379 self.setVar(key, referrervalue.replace(ref, value))
379 380
380 def localkeys(self): 381 def localkeys(self):
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index ad53ba015c..1d0fb7bfd1 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -343,6 +343,7 @@ class CacheLoadCompleted(Event):
343 self.total = total 343 self.total = total
344 self.num_entries = num_entries 344 self.num_entries = num_entries
345 345
346
346class DepTreeGenerated(Event): 347class DepTreeGenerated(Event):
347 """ 348 """
348 Event when a dependency tree has been generated 349 Event when a dependency tree has been generated
@@ -384,4 +385,3 @@ class LogHandler(logging.Handler):
384 fire(record, None) 385 fire(record, None)
385 if bb.event.useStdout: 386 if bb.event.useStdout:
386 print(self.format(record)) 387 print(self.format(record))
387
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index badc371ce1..13ee569e9a 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -93,12 +93,10 @@ domain = _NamedTuple("Domain", (
93 "RunQueue", 93 "RunQueue",
94 "TaskData", 94 "TaskData",
95 "Util")) 95 "Util"))
96
97logger = logging.getLogger("BitBake") 96logger = logging.getLogger("BitBake")
98loggers = Loggers() 97loggers = Loggers()
99debug_level = DebugLevel() 98debug_level = DebugLevel()
100 99
101#
102# Message control functions 100# Message control functions
103# 101#
104 102
@@ -191,4 +189,3 @@ def fatal(msgdomain, msg):
191 else: 189 else:
192 loggers[msgdomain].critical(msg) 190 loggers[msgdomain].critical(msg)
193 sys.exit(1) 191 sys.exit(1)
194
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index bef3d31ea7..7f1562e66e 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -31,8 +31,7 @@ import logging
31import bb 31import bb
32import bb.utils 32import bb.utils
33import bb.siggen 33import bb.siggen
34import bb.utils 34
35
36logger = logging.getLogger("BitBake.Parsing") 35logger = logging.getLogger("BitBake.Parsing")
37 36
38class ParseError(Exception): 37class ParseError(Exception):
@@ -85,8 +84,8 @@ def init(fn, data):
85 if h['supports'](fn): 84 if h['supports'](fn):
86 return h['init'](data) 85 return h['init'](data)
87 86
88def init_parser(d, dumpsigs): 87def init_parser(d):
89 bb.parse.siggen = bb.siggen.init(d, dumpsigs) 88 bb.parse.siggen = bb.siggen.init(d)
90 89
91def resolve_file(fn, d): 90def resolve_file(fn, d):
92 if not os.path.isabs(fn): 91 if not os.path.isabs(fn):
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py
index a40d72c888..e8e80aac45 100644
--- a/bitbake/lib/bb/pysh/pyshyacc.py
+++ b/bitbake/lib/bb/pysh/pyshyacc.py
@@ -649,7 +649,6 @@ def p_error(p):
649try: 649try:
650 import pyshtables 650 import pyshtables
651except ImportError: 651except ImportError:
652 import os
653 outputdir = os.path.dirname(__file__) 652 outputdir = os.path.dirname(__file__)
654 if not os.access(outputdir, os.W_OK): 653 if not os.access(outputdir, os.W_OK):
655 outputdir = '' 654 outputdir = ''
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 2022749c81..bfb16b4f6e 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -992,7 +992,7 @@ class RunQueue:
992 992
993 if self.state is runQueueComplete: 993 if self.state is runQueueComplete:
994 # All done 994 # All done
995 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)) 995 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
996 return False 996 return False
997 997
998 if self.state is runQueueChildProcess: 998 if self.state is runQueueChildProcess:
@@ -1114,7 +1114,6 @@ class RunQueueExecute:
1114 1114
1115 sys.stdout.flush() 1115 sys.stdout.flush()
1116 sys.stderr.flush() 1116 sys.stderr.flush()
1117
1118 try: 1117 try:
1119 pipeinfd, pipeoutfd = os.pipe() 1118 pipeinfd, pipeoutfd = os.pipe()
1120 pipein = os.fdopen(pipeinfd, 'rb', 4096) 1119 pipein = os.fdopen(pipeinfd, 'rb', 4096)
@@ -1125,6 +1124,7 @@ class RunQueueExecute:
1125 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) 1124 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
1126 if pid == 0: 1125 if pid == 0:
1127 pipein.close() 1126 pipein.close()
1127
1128 # Save out the PID so that the event can include it the 1128 # Save out the PID so that the event can include it the
1129 # events 1129 # events
1130 bb.event.worker_pid = os.getpid() 1130 bb.event.worker_pid = os.getpid()
@@ -1180,9 +1180,10 @@ class RunQueueExecuteDummy(RunQueueExecute):
1180 def __init__(self, rq): 1180 def __init__(self, rq):
1181 self.rq = rq 1181 self.rq = rq
1182 self.stats = RunQueueStats(0) 1182 self.stats = RunQueueStats(0)
1183
1183 def finish(self): 1184 def finish(self):
1184 self.rq.state = runQueueComplete 1185 self.rq.state = runQueueComplete
1185 return 1186 return
1186 1187
1187class RunQueueExecuteTasks(RunQueueExecute): 1188class RunQueueExecuteTasks(RunQueueExecute):
1188 def __init__(self, rq): 1189 def __init__(self, rq):
@@ -1211,7 +1212,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1211 self.rq.scenequeue_covered.add(task) 1212 self.rq.scenequeue_covered.add(task)
1212 found = True 1213 found = True
1213 1214
1214 bb.debug(1, "Full skip list %s" % self.rq.scenequeue_covered) 1215 logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
1215 1216
1216 for task in self.rq.scenequeue_covered: 1217 for task in self.rq.scenequeue_covered:
1217 self.task_skip(task) 1218 self.task_skip(task)
@@ -1221,7 +1222,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1221 for scheduler in self.rqdata.schedulers: 1222 for scheduler in self.rqdata.schedulers:
1222 if self.scheduler == scheduler.name: 1223 if self.scheduler == scheduler.name:
1223 self.sched = scheduler(self, self.rqdata) 1224 self.sched = scheduler(self, self.rqdata)
1224 logger.debug(1, "Using runqueue scheduler '%s'" % scheduler.name) 1225 logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
1225 break 1226 break
1226 else: 1227 else:
1227 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % 1228 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
@@ -1247,7 +1248,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1247 self.runq_buildable[revdep] = 1 1248 self.runq_buildable[revdep] = 1
1248 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]] 1249 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
1249 taskname = self.rqdata.runq_task[revdep] 1250 taskname = self.rqdata.runq_task[revdep]
1250 logger.debug(1, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname)) 1251 logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
1251 1252
1252 def task_complete(self, task): 1253 def task_complete(self, task):
1253 self.stats.taskCompleted() 1254 self.stats.taskCompleted()
@@ -1295,7 +1296,8 @@ class RunQueueExecuteTasks(RunQueueExecute):
1295 1296
1296 taskdep = self.rqdata.dataCache.task_deps[fn] 1297 taskdep = self.rqdata.dataCache.task_deps[fn]
1297 if 'noexec' in taskdep and taskname in taskdep['noexec']: 1298 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1298 startevent = runQueueTaskStarted(task, self.stats, self.rq, noexec=True) 1299 startevent = runQueueTaskStarted(task, self.stats, self.rq,
1300 noexec=True)
1299 bb.event.fire(startevent, self.cfgData) 1301 bb.event.fire(startevent, self.cfgData)
1300 self.runq_running[task] = 1 1302 self.runq_running[task] = 1
1301 self.stats.taskActive() 1303 self.stats.taskActive()
@@ -1328,11 +1330,11 @@ class RunQueueExecuteTasks(RunQueueExecute):
1328 # Sanity Checks 1330 # Sanity Checks
1329 for task in xrange(self.stats.total): 1331 for task in xrange(self.stats.total):
1330 if self.runq_buildable[task] == 0: 1332 if self.runq_buildable[task] == 0:
1331 logger.error("Task %s never buildable!" % task) 1333 logger.error("Task %s never buildable!", task)
1332 if self.runq_running[task] == 0: 1334 if self.runq_running[task] == 0:
1333 logger.error("Task %s never ran!" % task) 1335 logger.error("Task %s never ran!", task)
1334 if self.runq_complete[task] == 0: 1336 if self.runq_complete[task] == 0:
1335 logger.error("Task %s never completed!" % task) 1337 logger.error("Task %s never completed!", task)
1336 self.rq.state = runQueueComplete 1338 self.rq.state = runQueueComplete
1337 return True 1339 return True
1338 1340
@@ -1478,7 +1480,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
1478 """ 1480 """
1479 1481
1480 index = self.rqdata.runq_setscene[task] 1482 index = self.rqdata.runq_setscene[task]
1481 logger.debug(1, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index)) 1483 logger.debug(1, 'Found task %s which could be accelerated',
1484 self.rqdata.get_user_idstring(index))
1482 1485
1483 self.scenequeue_covered.add(task) 1486 self.scenequeue_covered.add(task)
1484 self.scenequeue_updatecounters(task) 1487 self.scenequeue_updatecounters(task)
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 48f600a212..94ae2b48ab 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -1,37 +1,47 @@
1import hashlib 1import hashlib
2import logging
2import re 3import re
3 4
5logger = logging.getLogger('BitBake.SigGen')
6
4try: 7try:
5 import cPickle as pickle 8 import cPickle as pickle
6except ImportError: 9except ImportError:
7 import pickle 10 import pickle
8 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") 11 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
9 12
10def init(d, dumpsigs): 13def init(d):
11 siggens = [obj for obj in globals().itervalues() 14 siggens = [obj for obj in globals().itervalues()
12 if type(obj) is type and issubclass(obj, SignatureGenerator)] 15 if type(obj) is type and issubclass(obj, SignatureGenerator)]
13 16
14 desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop" 17 desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
15 for sg in siggens: 18 for sg in siggens:
16 if desired == sg.name: 19 if desired == sg.name:
17 return sg(d, dumpsigs) 20 return sg(d)
18 break 21 break
19 else: 22 else:
20 bb.error("Invalid signature generator '%s', using default 'noop' generator" % desired) 23 logger.error("Invalid signature generator '%s', using default 'noop'\n"
21 bb.error("Available generators: %s" % ", ".join(obj.name for obj in siggens)) 24 "Available generators: %s",
22 return SignatureGenerator(d, dumpsigs) 25 ', '.join(obj.name for obj in siggens))
26 return SignatureGenerator(d)
23 27
24class SignatureGenerator(object): 28class SignatureGenerator(object):
25 """ 29 """
26 """ 30 """
27 name = "noop" 31 name = "noop"
28 32
29 def __init__(self, data, dumpsigs): 33 def __init__(self, data):
30 return 34 return
31 35
32 def finalise(self, fn, d, varient): 36 def finalise(self, fn, d, varient):
33 return 37 return
34 38
39 def get_taskhash(self, fn, task, deps, dataCache):
40 return 0
41
42 def set_taskdata(self, hashes, deps):
43 return
44
35 def stampfile(self, stampbase, taskname, taskhash): 45 def stampfile(self, stampbase, taskname, taskhash):
36 return "%s.%s" % (stampbase, taskname) 46 return "%s.%s" % (stampbase, taskname)
37 47
@@ -40,7 +50,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
40 """ 50 """
41 name = "basic" 51 name = "basic"
42 52
43 def __init__(self, data, dumpsigs): 53 def __init__(self, data):
44 self.basehash = {} 54 self.basehash = {}
45 self.taskhash = {} 55 self.taskhash = {}
46 self.taskdeps = {} 56 self.taskdeps = {}
@@ -78,7 +88,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
78 if data is None: 88 if data is None:
79 bb.error("Task %s from %s seems to be empty?!" % (task, fn)) 89 bb.error("Task %s from %s seems to be empty?!" % (task, fn))
80 self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest() 90 self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
81 #bb.note("Hash for %s is %s" % (task, tashhash[task]))
82 91
83 self.taskdeps[fn] = taskdeps 92 self.taskdeps[fn] = taskdeps
84 self.gendeps[fn] = gendeps 93 self.gendeps[fn] = gendeps
@@ -110,7 +119,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
110 # then process the actual dependencies 119 # then process the actual dependencies
111 dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn') 120 dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
112 if self.twl.search(dataCache.pkg_fn[dep_fn]): 121 if self.twl.search(dataCache.pkg_fn[dep_fn]):
113 #bb.note("Skipping %s" % dep)
114 continue 122 continue
115 if dep not in self.taskhash: 123 if dep not in self.taskhash:
116 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep) 124 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
@@ -181,10 +189,6 @@ def compare_sigfiles(a, b):
181 p2 = pickle.Unpickler(file(b, "rb")) 189 p2 = pickle.Unpickler(file(b, "rb"))
182 b_data = p2.load() 190 b_data = p2.load()
183 191
184 #print "Checking"
185 #print str(a_data)
186 #print str(b_data)
187
188 def dict_diff(a, b): 192 def dict_diff(a, b):
189 sa = set(a.keys()) 193 sa = set(a.keys())
190 sb = set(b.keys()) 194 sb = set(b.keys())
@@ -195,7 +199,7 @@ def compare_sigfiles(a, b):
195 changed.add(i) 199 changed.add(i)
196 added = sa - sb 200 added = sa - sb
197 removed = sb - sa 201 removed = sb - sa
198 return changed, added, removed 202 return changed, added, removed
199 203
200 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: 204 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
201 print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist']) 205 print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
@@ -225,11 +229,6 @@ def compare_sigfiles(a, b):
225 if changed: 229 if changed:
226 for dep in changed: 230 for dep in changed:
227 print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]) 231 print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
228 #if added:
229 # print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep])
230 #if removed:
231 # print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep])
232
233 if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']): 232 if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
234 print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps'])) 233 print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
235 234
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index 3018f3e5d5..48ed0d72e5 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -311,10 +311,9 @@ def _print_trace(body, line):
311 max_line = min(line + 4, len(body)) 311 max_line = min(line + 4, len(body))
312 for i in xrange(min_line, max_line + 1): 312 for i in xrange(min_line, max_line + 1):
313 if line == i: 313 if line == i:
314 logger.error(" *** %.4d:%s" % (i, body[i-1]) ) 314 logger.error(' *** %.4d:%s', i, body[i-1])
315 else: 315 else:
316 logger.error(" %.4d:%s" % (i, body[i-1]) ) 316 logger.error(' %.4d:%s', i, body[i-1])
317
318 317
319def better_compile(text, file, realfile, mode = "exec"): 318def better_compile(text, file, realfile, mode = "exec"):
320 """ 319 """
@@ -326,16 +325,17 @@ def better_compile(text, file, realfile, mode = "exec"):
326 except Exception as e: 325 except Exception as e:
327 # split the text into lines again 326 # split the text into lines again
328 body = text.split('\n') 327 body = text.split('\n')
329 logger.error("Error in compiling python function in: %s" % (realfile)) 328 logger.error("Error in compiling python function in %s", realfile)
330 logger.error(str(e)) 329 logger.error(str(e))
331 if e.lineno: 330 if e.lineno:
332 logger.error("The lines leading to this error were:") 331 logger.error("The lines leading to this error were:")
333 logger.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) 332 logger.error("\t%d:%s:'%s'", e.lineno, e.__class__.__name__, body[e.lineno-1])
334 _print_trace(body, e.lineno) 333 _print_trace(body, e.lineno)
335 else: 334 else:
336 logger.error("The function causing this error was:") 335 logger.error("The function causing this error was:")
337 for line in body: 336 for line in body:
338 logger.error(line) 337 logger.error(line)
338
339 raise 339 raise
340 340
341def better_exec(code, context, text, realfile = "<code>"): 341def better_exec(code, context, text, realfile = "<code>"):
@@ -376,16 +376,16 @@ def better_exec(code, context, text, realfile = "<code>"):
376 376
377 logger.error("The code that was being executed was:") 377 logger.error("The code that was being executed was:")
378 _print_trace(textarray, linefailed) 378 _print_trace(textarray, linefailed)
379 logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2])) 379 logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
380 380
381 # See if this is a function we constructed and has calls back into other functions in 381 # See if this is a function we constructed and has calls back into other functions in
382 # "text". If so, try and improve the context of the error by diving down the trace 382 # "text". If so, try and improve the context of the error by diving down the trace
383 level = 0 383 level = 0
384 nexttb = tb.tb_next 384 nexttb = tb.tb_next
385 while nexttb is not None: 385 while nexttb is not None:
386 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]: 386 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
387 _print_trace(textarray, tbextract[level+1][1]) 387 _print_trace(textarray, tbextract[level+1][1])
388 logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])) 388 logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
389 else: 389 else:
390 break 390 break
391 nexttb = tb.tb_next 391 nexttb = tb.tb_next