summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorChris Larson <chris_larson@mentor.com>2010-06-10 10:35:31 -0700
committerRichard Purdie <rpurdie@linux.intel.com>2011-01-04 14:46:33 +0000
commitecc68fa4fbb579e97ea45156e79a293b073697a0 (patch)
tree6d08682e43476e37ccf48ee14c8d81e208d1c897 /bitbake
parentd3a45c7d41a88d79389fc40eb68816e4939fb6f9 (diff)
downloadpoky-ecc68fa4fbb579e97ea45156e79a293b073697a0.tar.gz
Switch bitbake internals to use logging directly rather than bb.msg
We use a custom Logger subclass for our loggers This logger provides: - 'debug' method which accepts a debug level - 'plain' method which bypasses log formatting - 'verbose' method which is more detail than info, but less than debug (Bitbake rev: 3b2c1fe5ca56daebb24073a9dd45723d3efd2a8d) Signed-off-by: Chris Larson <chris_larson@mentor.com> Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'bitbake')
-rwxr-xr-xbitbake/bin/bitbake12
-rw-r--r--bitbake/lib/bb/__init__.py31
-rw-r--r--bitbake/lib/bb/build.py26
-rw-r--r--bitbake/lib/bb/cache.py53
-rw-r--r--bitbake/lib/bb/cooker.py126
-rw-r--r--bitbake/lib/bb/data_smart.py10
-rw-r--r--bitbake/lib/bb/fetch/__init__.py35
-rw-r--r--bitbake/lib/bb/fetch/bzr.py13
-rw-r--r--bitbake/lib/bb/fetch/cvs.py13
-rw-r--r--bitbake/lib/bb/fetch/git.py9
-rw-r--r--bitbake/lib/bb/fetch/hg.py14
-rw-r--r--bitbake/lib/bb/fetch/local.py2
-rw-r--r--bitbake/lib/bb/fetch/osc.py11
-rw-r--r--bitbake/lib/bb/fetch/perforce.py18
-rw-r--r--bitbake/lib/bb/fetch/repo.py2
-rw-r--r--bitbake/lib/bb/fetch/svk.py10
-rw-r--r--bitbake/lib/bb/fetch/svn.py14
-rw-r--r--bitbake/lib/bb/fetch/wget.py13
-rw-r--r--bitbake/lib/bb/msg.py18
-rw-r--r--bitbake/lib/bb/parse/__init__.py9
-rw-r--r--bitbake/lib/bb/parse/ast.py12
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py9
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py5
-rw-r--r--bitbake/lib/bb/persist_data.py8
-rw-r--r--bitbake/lib/bb/providers.py25
-rw-r--r--bitbake/lib/bb/runqueue.py90
-rw-r--r--bitbake/lib/bb/taskdata.py61
-rw-r--r--bitbake/lib/bb/utils.py47
28 files changed, 374 insertions, 322 deletions
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index a80c01ca50..470e9dcc43 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -57,23 +57,23 @@ class BBConfiguration(object):
57 self.pkgs_to_build = [] 57 self.pkgs_to_build = []
58 58
59 59
60def print_exception(exc, value, tb): 60def print_exception(*exc_info):
61 """Send exception information through bb.msg""" 61 logger.error("Uncaught exception: ", exc_info=exc_info)
62 bb.fatal("".join(format_exception(exc, value, tb, limit=8))) 62 sys.exit(1)
63 63
64sys.excepthook = print_exception 64sys.excepthook = print_exception
65 65
66 66
67# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
68warnlog = logging.getLogger("BitBake.Warnings")
67_warnings_showwarning = warnings.showwarning 69_warnings_showwarning = warnings.showwarning
68def _showwarning(message, category, filename, lineno, file=None, line=None): 70def _showwarning(message, category, filename, lineno, file=None, line=None):
69 """Display python warning messages using bb.msg"""
70 if file is not None: 71 if file is not None:
71 if _warnings_showwarning is not None: 72 if _warnings_showwarning is not None:
72 _warnings_showwarning(message, category, filename, lineno, file, line) 73 _warnings_showwarning(message, category, filename, lineno, file, line)
73 else: 74 else:
74 s = warnings.formatwarning(message, category, filename, lineno) 75 s = warnings.formatwarning(message, category, filename, lineno)
75 s = s.split("\n")[0] 76 warnlog.warn(s)
76 bb.msg.warn(None, s)
77 77
78warnings.showwarning = _showwarning 78warnings.showwarning = _showwarning
79warnings.filterwarnings("ignore") 79warnings.filterwarnings("ignore")
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 8cda4255bc..11983f0e0e 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -35,7 +35,25 @@ class NullHandler(logging.Handler):
35 def emit(self, record): 35 def emit(self, record):
36 pass 36 pass
37 37
38Logger = logging.getLoggerClass()
39class BBLogger(Logger):
40 def __init__(self, name):
41 if name.split(".")[0] == "BitBake":
42 self.debug = self.bbdebug
43 Logger.__init__(self, name)
44
45 def bbdebug(self, level, msg, *args, **kwargs):
46 return self.log(logging.DEBUG - level - 1, msg, *args, **kwargs)
47
48 def plain(self, msg, *args, **kwargs):
49 return self.log(logging.INFO + 1, msg, *args, **kwargs)
50
51 def verbose(self, msg, *args, **kwargs):
52 return self.log(logging.INFO - 1, msg, *args, **kwargs)
53
38logging.raiseExceptions = False 54logging.raiseExceptions = False
55logging.setLoggerClass(BBLogger)
56
39logger = logging.getLogger("BitBake") 57logger = logging.getLogger("BitBake")
40logger.addHandler(NullHandler()) 58logger.addHandler(NullHandler())
41logger.setLevel(logging.INFO) 59logger.setLevel(logging.INFO)
@@ -48,22 +66,23 @@ if "BBDEBUG" in os.environ:
48 66
49# Messaging convenience functions 67# Messaging convenience functions
50def plain(*args): 68def plain(*args):
51 bb.msg.plain(''.join(args)) 69 logger.plain(''.join(args))
52 70
53def debug(lvl, *args): 71def debug(lvl, *args):
54 bb.msg.debug(lvl, None, ''.join(args)) 72 logger.debug(lvl, ''.join(args))
55 73
56def note(*args): 74def note(*args):
57 bb.msg.note(1, None, ''.join(args)) 75 logger.info(''.join(args))
58 76
59def warn(*args): 77def warn(*args):
60 bb.msg.warn(None, ''.join(args)) 78 logger.warn(''.join(args))
61 79
62def error(*args): 80def error(*args):
63 bb.msg.error(None, ''.join(args)) 81 logger.error(''.join(args))
64 82
65def fatal(*args): 83def fatal(*args):
66 bb.msg.fatal(None, ''.join(args)) 84 logger.critical(''.join(args))
85 sys.exit(1)
67 86
68 87
69def deprecated(func, name = None, advice = ""): 88def deprecated(func, name = None, advice = ""):
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 18a75edca3..9589313238 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -26,9 +26,14 @@
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig 26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27 27
28from bb import data, event, mkdirhier, utils 28from bb import data, event, mkdirhier, utils
29import bb, os, sys 29import os
30import sys
31import logging
32import bb
30import bb.utils 33import bb.utils
31 34
35logger = logging.getLogger("BitBake.Build")
36
32# When we execute a python function we'd like certain things 37# When we execute a python function we'd like certain things
33# in all namespaces, hence we add them to __builtins__ 38# in all namespaces, hence we add them to __builtins__
34# If we do not do this and use the exec globals, they will 39# If we do not do this and use the exec globals, they will
@@ -193,7 +198,8 @@ def exec_func_shell(func, d, runfile, logfile, flags):
193 198
194 f = open(runfile, "w") 199 f = open(runfile, "w")
195 f.write("#!/bin/sh -e\n") 200 f.write("#!/bin/sh -e\n")
196 if bb.msg.debug_level['default'] > 0: f.write("set -x\n") 201 if logger.getEffectiveLevel() <= logging.DEBUG:
202 f.write("set -x\n")
197 data.emit_func(func, f, d) 203 data.emit_func(func, f, d)
198 204
199 f.write("cd %s\n" % os.getcwd()) 205 f.write("cd %s\n" % os.getcwd())
@@ -226,7 +232,7 @@ def exec_task(fn, task, d):
226 # Check whther this is a valid task 232 # Check whther this is a valid task
227 if not data.getVarFlag(task, 'task', d): 233 if not data.getVarFlag(task, 'task', d):
228 event.fire(TaskInvalid(task, d), d) 234 event.fire(TaskInvalid(task, d), d)
229 bb.msg.error(bb.msg.domain.Build, "No such task: %s" % task) 235 logger.error("No such task: %s" % task)
230 return 1 236 return 1
231 237
232 quieterr = False 238 quieterr = False
@@ -234,7 +240,7 @@ def exec_task(fn, task, d):
234 quieterr = True 240 quieterr = True
235 241
236 try: 242 try:
237 bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task) 243 logger.debug(1, "Executing task %s", task)
238 old_overrides = data.getVar('OVERRIDES', d, 0) 244 old_overrides = data.getVar('OVERRIDES', d, 0)
239 localdata = data.createCopy(d) 245 localdata = data.createCopy(d)
240 data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) 246 data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
@@ -269,8 +275,8 @@ def exec_task(fn, task, d):
269 si = file('/dev/null', 'r') 275 si = file('/dev/null', 'r')
270 try: 276 try:
271 so = file(logfile, 'w') 277 so = file(logfile, 'w')
272 except OSError as e: 278 except OSError:
273 bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e) 279 logger.exception("Opening log file '%s'", logfile)
274 pass 280 pass
275 se = so 281 se = so
276 282
@@ -312,7 +318,7 @@ def exec_task(fn, task, d):
312 logfile = None 318 logfile = None
313 msg = message 319 msg = message
314 if not quieterr: 320 if not quieterr:
315 bb.msg.error(bb.msg.domain.Build, "Task failed: %s" % message ) 321 logger.error("Task failed: %s" % message )
316 failedevent = TaskFailed(msg, logfile, task, d) 322 failedevent = TaskFailed(msg, logfile, task, d)
317 event.fire(failedevent, d) 323 event.fire(failedevent, d)
318 return 1 324 return 1
@@ -320,8 +326,8 @@ def exec_task(fn, task, d):
320 except Exception: 326 except Exception:
321 from traceback import format_exc 327 from traceback import format_exc
322 if not quieterr: 328 if not quieterr:
323 bb.msg.error(bb.msg.domain.Build, "Build of %s failed" % (task)) 329 logger.error("Build of %s failed" % (task))
324 bb.msg.error(bb.msg.domain.Build, format_exc()) 330 logger.error(format_exc())
325 failedevent = TaskFailed("Task Failed", None, task, d) 331 failedevent = TaskFailed("Task Failed", None, task, d)
326 event.fire(failedevent, d) 332 event.fire(failedevent, d)
327 return 1 333 return 1
@@ -342,7 +348,7 @@ def exec_task(fn, task, d):
342 se.close() 348 se.close()
343 349
344 if logfile and os.path.exists(logfile) and os.path.getsize(logfile) == 0: 350 if logfile and os.path.exists(logfile) and os.path.getsize(logfile) == 0:
345 bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile) 351 logger.debug(2, "Zero size logfile %s, removing", logfile)
346 os.remove(logfile) 352 os.remove(logfile)
347 try: 353 try:
348 os.remove(loglink) 354 os.remove(loglink)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index c6f3794d5e..255c6168dd 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -29,14 +29,17 @@
29 29
30 30
31import os 31import os
32import logging
32import bb.data 33import bb.data
33import bb.utils 34import bb.utils
34 35
36logger = logging.getLogger("BitBake.Cache")
37
35try: 38try:
36 import cPickle as pickle 39 import cPickle as pickle
37except ImportError: 40except ImportError:
38 import pickle 41 import pickle
39 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") 42 logger.info("Importing cPickle failed. Falling back to a very slow implementation.")
40 43
41__cache_version__ = "132" 44__cache_version__ = "132"
42 45
@@ -57,13 +60,13 @@ class Cache:
57 60
58 if self.cachedir in [None, '']: 61 if self.cachedir in [None, '']:
59 self.has_cache = False 62 self.has_cache = False
60 bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.") 63 logger.info("Not using a cache. Set CACHE = <directory> to enable.")
61 return 64 return
62 65
63 self.has_cache = True 66 self.has_cache = True
64 self.cachefile = os.path.join(self.cachedir, "bb_cache.dat") 67 self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
65 68
66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) 69 logger.debug(1, "Using cache in '%s'", self.cachedir)
67 bb.utils.mkdirhier(self.cachedir) 70 bb.utils.mkdirhier(self.cachedir)
68 71
69 # If any of configuration.data's dependencies are newer than the 72 # If any of configuration.data's dependencies are newer than the
@@ -84,14 +87,14 @@ class Cache:
84 if version_data['BITBAKE_VER'] != bb.__version__: 87 if version_data['BITBAKE_VER'] != bb.__version__:
85 raise ValueError('Bitbake Version Mismatch') 88 raise ValueError('Bitbake Version Mismatch')
86 except EOFError: 89 except EOFError:
87 bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...") 90 logger.info("Truncated cache found, rebuilding...")
88 self.depends_cache = {} 91 self.depends_cache = {}
89 except: 92 except:
90 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") 93 logger.info("Invalid cache found, rebuilding...")
91 self.depends_cache = {} 94 self.depends_cache = {}
92 else: 95 else:
93 if os.path.isfile(self.cachefile): 96 if os.path.isfile(self.cachefile):
94 bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...") 97 logger.info("Out of date cache found, rebuilding...")
95 98
96 def getVar(self, var, fn, exp = 0): 99 def getVar(self, var, fn, exp = 0):
97 """ 100 """
@@ -111,7 +114,7 @@ class Cache:
111 if fn != self.data_fn: 114 if fn != self.data_fn:
112 # We're trying to access data in the cache which doesn't exist 115 # We're trying to access data in the cache which doesn't exist
113 # yet setData hasn't been called to setup the right access. Very bad. 116 # yet setData hasn't been called to setup the right access. Very bad.
114 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) 117 logger.error("data_fn %s and fn %s don't match", self.data_fn, fn)
115 118
116 self.cacheclean = False 119 self.cacheclean = False
117 result = bb.data.getVar(var, self.data, exp) 120 result = bb.data.getVar(var, self.data, exp)
@@ -152,7 +155,6 @@ class Cache:
152 if virtualfn.startswith('virtual:'): 155 if virtualfn.startswith('virtual:'):
153 cls = virtualfn.split(':', 2)[1] 156 cls = virtualfn.split(':', 2)[1]
154 fn = virtualfn.replace('virtual:' + cls + ':', '') 157 fn = virtualfn.replace('virtual:' + cls + ':', '')
155 #bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls))
156 return (fn, cls) 158 return (fn, cls)
157 159
158 def realfn2virtual(self, realfn, cls): 160 def realfn2virtual(self, realfn, cls):
@@ -160,9 +162,7 @@ class Cache:
160 Convert a real filename + the associated subclass keyword to a virtual filename 162 Convert a real filename + the associated subclass keyword to a virtual filename
161 """ 163 """
162 if cls == "": 164 if cls == "":
163 #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn))
164 return realfn 165 return realfn
165 #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn))
166 return "virtual:" + cls + ":" + realfn 166 return "virtual:" + cls + ":" + realfn
167 167
168 def loadDataFull(self, virtualfn, appends, cfgData): 168 def loadDataFull(self, virtualfn, appends, cfgData):
@@ -173,7 +173,7 @@ class Cache:
173 173
174 (fn, cls) = self.virtualfn2realfn(virtualfn) 174 (fn, cls) = self.virtualfn2realfn(virtualfn)
175 175
176 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn) 176 logger.debug(1, "Parsing %s (full)", fn)
177 177
178 bb_data = self.load_bbfile(fn, appends, cfgData) 178 bb_data = self.load_bbfile(fn, appends, cfgData)
179 return bb_data[cls] 179 return bb_data[cls]
@@ -198,13 +198,13 @@ class Cache:
198 virtualfn = self.realfn2virtual(fn, cls) 198 virtualfn = self.realfn2virtual(fn, cls)
199 if self.depends_cache[virtualfn]["__SKIPPED"]: 199 if self.depends_cache[virtualfn]["__SKIPPED"]:
200 skipped += 1 200 skipped += 1
201 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) 201 logger.debug(1, "Skipping %s", virtualfn)
202 continue 202 continue
203 self.handle_data(virtualfn, cacheData) 203 self.handle_data(virtualfn, cacheData)
204 virtuals += 1 204 virtuals += 1
205 return True, skipped, virtuals 205 return True, skipped, virtuals
206 206
207 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn) 207 logger.debug(1, "Parsing %s", fn)
208 208
209 bb_data = self.load_bbfile(fn, appends, cfgData) 209 bb_data = self.load_bbfile(fn, appends, cfgData)
210 210
@@ -213,7 +213,7 @@ class Cache:
213 self.setData(virtualfn, fn, bb_data[data]) 213 self.setData(virtualfn, fn, bb_data[data])
214 if self.getVar("__SKIPPED", virtualfn): 214 if self.getVar("__SKIPPED", virtualfn):
215 skipped += 1 215 skipped += 1
216 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) 216 logger.debug(1, "Skipping %s", virtualfn)
217 else: 217 else:
218 self.handle_data(virtualfn, cacheData) 218 self.handle_data(virtualfn, cacheData)
219 virtuals += 1 219 virtuals += 1
@@ -248,7 +248,7 @@ class Cache:
248 248
249 # File isn't in depends_cache 249 # File isn't in depends_cache
250 if not fn in self.depends_cache: 250 if not fn in self.depends_cache:
251 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn) 251 logger.debug(2, "Cache: %s is not cached", fn)
252 self.remove(fn) 252 self.remove(fn)
253 return False 253 return False
254 254
@@ -256,13 +256,13 @@ class Cache:
256 256
257 # Check file still exists 257 # Check file still exists
258 if mtime == 0: 258 if mtime == 0:
259 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s no longer exists" % fn) 259 logger.debug(2, "Cache: %s no longer exists", fn)
260 self.remove(fn) 260 self.remove(fn)
261 return False 261 return False
262 262
263 # Check the file's timestamp 263 # Check the file's timestamp
264 if mtime != self.getVar("CACHETIMESTAMP", fn, True): 264 if mtime != self.getVar("CACHETIMESTAMP", fn, True):
265 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) 265 logger.debug(2, "Cache: %s changed", fn)
266 self.remove(fn) 266 self.remove(fn)
267 return False 267 return False
268 268
@@ -277,11 +277,10 @@ class Cache:
277 return False 277 return False
278 278
279 if (fmtime != old_mtime): 279 if (fmtime != old_mtime):
280 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) 280 logger.debug(2, "Cache: %s's dependency %s changed", fn, f)
281 self.remove(fn) 281 self.remove(fn)
282 return False 282 return False
283 283
284 #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
285 if not fn in self.clean: 284 if not fn in self.clean:
286 self.clean[fn] = "" 285 self.clean[fn] = ""
287 286
@@ -292,16 +291,16 @@ class Cache:
292 virtualfn = self.realfn2virtual(fn, cls) 291 virtualfn = self.realfn2virtual(fn, cls)
293 self.clean[virtualfn] = "" 292 self.clean[virtualfn] = ""
294 if not virtualfn in self.depends_cache: 293 if not virtualfn in self.depends_cache:
295 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % virtualfn) 294 logger.debug(2, "Cache: %s is not cached", virtualfn)
296 invalid = True 295 invalid = True
297 296
298 # If any one of the varients is not present, mark cache as invalid for all 297 # If any one of the varients is not present, mark cache as invalid for all
299 if invalid: 298 if invalid:
300 for cls in (multi or "").split(): 299 for cls in (multi or "").split():
301 virtualfn = self.realfn2virtual(fn, cls) 300 virtualfn = self.realfn2virtual(fn, cls)
302 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % virtualfn) 301 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
303 del self.clean[virtualfn] 302 del self.clean[virtualfn]
304 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % fn) 303 logger.debug(2, "Cache: removing %s from cache", fn)
305 del self.clean[fn] 304 del self.clean[fn]
306 return False 305 return False
307 306
@@ -312,7 +311,7 @@ class Cache:
312 Remove a fn from the cache 311 Remove a fn from the cache
313 Called from the parser in error cases 312 Called from the parser in error cases
314 """ 313 """
315 bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn) 314 logger.debug(1, "Removing %s from cache", fn)
316 if fn in self.depends_cache: 315 if fn in self.depends_cache:
317 del self.depends_cache[fn] 316 del self.depends_cache[fn]
318 if fn in self.clean: 317 if fn in self.clean:
@@ -329,7 +328,7 @@ class Cache:
329 return 328 return
330 329
331 if self.cacheclean: 330 if self.cacheclean:
332 bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.") 331 logger.info("Cache is clean, not saving.")
333 return 332 return
334 333
335 version_data = {} 334 version_data = {}
@@ -339,10 +338,10 @@ class Cache:
339 cache_data = copy.copy(self.depends_cache) 338 cache_data = copy.copy(self.depends_cache)
340 for fn in self.depends_cache: 339 for fn in self.depends_cache:
341 if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']: 340 if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
342 bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn) 341 logger.debug(2, "Not caching %s, marked as not cacheable", fn)
343 del cache_data[fn] 342 del cache_data[fn]
344 elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']: 343 elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']:
345 bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn) 344 logger.error("Not caching %s as it had SRCREVINACTION in PV. Please report this bug", fn)
346 del cache_data[fn] 345 del cache_data[fn]
347 346
348 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 ) 347 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
@@ -360,7 +359,7 @@ class Cache:
360 pe = self.getVar('PE', file_name, True) or "0" 359 pe = self.getVar('PE', file_name, True) or "0"
361 pv = self.getVar('PV', file_name, True) 360 pv = self.getVar('PV', file_name, True)
362 if 'SRCREVINACTION' in pv: 361 if 'SRCREVINACTION' in pv:
363 bb.msg.note(1, bb.msg.domain.Cache, "Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name)) 362 logger.info("Found SRCREVINACTION in PV (%s) or %s. Please report this bug.", pv, file_name)
364 pr = self.getVar('PR', file_name, True) 363 pr = self.getVar('PR', file_name, True)
365 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") 364 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
366 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") 365 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 95f38f6236..de213f03f4 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -23,12 +23,19 @@
23 23
24from __future__ import print_function 24from __future__ import print_function
25import sys, os, glob, os.path, re, time 25import sys, os, glob, os.path, re, time
26import logging
26import sre_constants 27import sre_constants
27from cStringIO import StringIO 28from cStringIO import StringIO
28from contextlib import closing 29from contextlib import closing
29import bb 30import bb
30from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue 31from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
31 32
33logger = logging.getLogger("BitBake")
34collectlog = logging.getLogger("BitBake.Collection")
35buildlog = logging.getLogger("BitBake.Build")
36parselog = logging.getLogger("BitBake.Parsing")
37providerlog = logging.getLogger("BitBake.Provider")
38
32class MultipleMatches(Exception): 39class MultipleMatches(Exception):
33 """ 40 """
34 Exception raised when multiple file matches are found 41 Exception raised when multiple file matches are found
@@ -102,7 +109,7 @@ class BBCooker:
102 import termios 109 import termios
103 tcattr = termios.tcgetattr(fd) 110 tcattr = termios.tcgetattr(fd)
104 if tcattr[3] & termios.TOSTOP: 111 if tcattr[3] & termios.TOSTOP:
105 bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...") 112 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
106 tcattr[3] = tcattr[3] & ~termios.TOSTOP 113 tcattr[3] = tcattr[3] & ~termios.TOSTOP
107 termios.tcsetattr(fd, termios.TCSANOW, tcattr) 114 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
108 115
@@ -118,7 +125,7 @@ class BBCooker:
118 if nice: 125 if nice:
119 curnice = os.nice(0) 126 curnice = os.nice(0)
120 nice = int(nice) - curnice 127 nice = int(nice) - curnice
121 bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice)) 128 buildlog.verbose("Renice to %s " % os.nice(nice))
122 129
123 def parseCommandLine(self): 130 def parseCommandLine(self):
124 # Parse any commandline into actions 131 # Parse any commandline into actions
@@ -126,11 +133,11 @@ class BBCooker:
126 self.commandlineAction = None 133 self.commandlineAction = None
127 134
128 if 'world' in self.configuration.pkgs_to_build: 135 if 'world' in self.configuration.pkgs_to_build:
129 bb.msg.error(bb.msg.domain.Build, "'world' is not a valid target for --environment.") 136 buildlog.error("'world' is not a valid target for --environment.")
130 elif len(self.configuration.pkgs_to_build) > 1: 137 elif len(self.configuration.pkgs_to_build) > 1:
131 bb.msg.error(bb.msg.domain.Build, "Only one target can be used with the --environment option.") 138 buildlog.error("Only one target can be used with the --environment option.")
132 elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0: 139 elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
133 bb.msg.error(bb.msg.domain.Build, "No target should be used with the --environment and --buildfile options.") 140 buildlog.error("No target should be used with the --environment and --buildfile options.")
134 elif len(self.configuration.pkgs_to_build) > 0: 141 elif len(self.configuration.pkgs_to_build) > 0:
135 self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build] 142 self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
136 else: 143 else:
@@ -148,13 +155,13 @@ class BBCooker:
148 self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd] 155 self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
149 else: 156 else:
150 self.commandlineAction = None 157 self.commandlineAction = None
151 bb.msg.error(bb.msg.domain.Build, "Please specify a package name for dependency graph generation.") 158 buildlog.error("Please specify a package name for dependency graph generation.")
152 else: 159 else:
153 if self.configuration.pkgs_to_build: 160 if self.configuration.pkgs_to_build:
154 self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd] 161 self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
155 else: 162 else:
156 self.commandlineAction = None 163 self.commandlineAction = None
157 bb.msg.error(bb.msg.domain.Build, "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") 164 buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
158 165
159 def runCommands(self, server, data, abort): 166 def runCommands(self, server, data, abort):
160 """ 167 """
@@ -180,8 +187,8 @@ class BBCooker:
180 preferred_versions[pn] = (pref_ver, pref_file) 187 preferred_versions[pn] = (pref_ver, pref_file)
181 latest_versions[pn] = (last_ver, last_file) 188 latest_versions[pn] = (last_ver, last_file)
182 189
183 bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version")) 190 logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
184 bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "=================")) 191 logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
185 192
186 for p in sorted(pkg_pn): 193 for p in sorted(pkg_pn):
187 pref = preferred_versions[p] 194 pref = preferred_versions[p]
@@ -193,7 +200,7 @@ class BBCooker:
193 if pref == latest: 200 if pref == latest:
194 prefstr = "" 201 prefstr = ""
195 202
196 bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr)) 203 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
197 204
198 def compareRevisions(self): 205 def compareRevisions(self):
199 ret = bb.fetch.fetcher_compare_revisons(self.configuration.data) 206 ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
@@ -230,27 +237,21 @@ class BBCooker:
230 if fn: 237 if fn:
231 try: 238 try:
232 envdata = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data) 239 envdata = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
233 except IOError as e: 240 except Exception, e:
234 bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) 241 parselog.exception("Unable to read %s", fn)
235 raise
236 except Exception as e:
237 bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
238 raise 242 raise
239 243
240 # emit variables and shell functions 244 # emit variables and shell functions
241 try: 245 data.update_data(envdata)
242 data.update_data(envdata) 246 with closing(StringIO()) as env:
243 with closing(StringIO()) as env: 247 data.emit_env(env, envdata, True)
244 data.emit_env(env, envdata, True) 248 logger.plain(env.getvalue())
245 bb.msg.plain(env.getvalue())
246 except Exception as e:
247 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
248 249
249 # emit the metadata which isnt valid shell 250 # emit the metadata which isnt valid shell
250 data.expandKeys(envdata) 251 data.expandKeys(envdata)
251 for e in envdata.keys(): 252 for e in envdata.keys():
252 if data.getVarFlag( e, 'python', envdata ): 253 if data.getVarFlag( e, 'python', envdata ):
253 bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) 254 logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
254 255
255 def generateDepTreeData(self, pkgs_to_build, task): 256 def generateDepTreeData(self, pkgs_to_build, task):
256 """ 257 """
@@ -374,7 +375,7 @@ class BBCooker:
374 for rdepend in depgraph["rdepends-pn"][pn]: 375 for rdepend in depgraph["rdepends-pn"][pn]:
375 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file) 376 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
376 print("}", file=depends_file) 377 print("}", file=depends_file)
377 bb.msg.plain("PN dependencies saved to 'pn-depends.dot'") 378 logger.info("PN dependencies saved to 'pn-depends.dot'")
378 379
379 depends_file = file('package-depends.dot', 'w' ) 380 depends_file = file('package-depends.dot', 'w' )
380 print("digraph depends {", file=depends_file) 381 print("digraph depends {", file=depends_file)
@@ -395,7 +396,7 @@ class BBCooker:
395 for rdepend in depgraph["rrecs-pkg"][package]: 396 for rdepend in depgraph["rrecs-pkg"][package]:
396 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) 397 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
397 print("}", file=depends_file) 398 print("}", file=depends_file)
398 bb.msg.plain("Package dependencies saved to 'package-depends.dot'") 399 logger.info("Package dependencies saved to 'package-depends.dot'")
399 400
400 tdepends_file = file('task-depends.dot', 'w' ) 401 tdepends_file = file('task-depends.dot', 'w' )
401 print("digraph depends {", file=tdepends_file) 402 print("digraph depends {", file=tdepends_file)
@@ -407,7 +408,7 @@ class BBCooker:
407 for dep in depgraph["tdepends"][task]: 408 for dep in depgraph["tdepends"][task]:
408 print('"%s" -> "%s"' % (task, dep), file=tdepends_file) 409 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
409 print("}", file=tdepends_file) 410 print("}", file=tdepends_file)
410 bb.msg.plain("Task dependencies saved to 'task-depends.dot'") 411 logger.info("Task dependencies saved to 'task-depends.dot'")
411 412
412 def buildDepgraph( self ): 413 def buildDepgraph( self ):
413 all_depends = self.status.all_depends 414 all_depends = self.status.all_depends
@@ -431,10 +432,10 @@ class BBCooker:
431 try: 432 try:
432 (providee, provider) = p.split(':') 433 (providee, provider) = p.split(':')
433 except: 434 except:
434 bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p) 435 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
435 continue 436 continue
436 if providee in self.status.preferred and self.status.preferred[providee] != provider: 437 if providee in self.status.preferred and self.status.preferred[providee] != provider:
437 bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee])) 438 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
438 self.status.preferred[providee] = provider 439 self.status.preferred[providee] = provider
439 440
440 # Calculate priorities for each file 441 # Calculate priorities for each file
@@ -443,8 +444,7 @@ class BBCooker:
443 444
444 for collection, pattern, regex, _ in self.status.bbfile_config_priorities: 445 for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
445 if not regex in matched: 446 if not regex in matched:
446 bb.msg.warn(bb.msg.domain.Provider, "No bb files matched BBFILE_PATTERN_%s '%s'" % 447 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
447 (collection, pattern))
448 448
449 def buildWorldTargetList(self): 449 def buildWorldTargetList(self):
450 """ 450 """
@@ -452,19 +452,19 @@ class BBCooker:
452 """ 452 """
453 all_depends = self.status.all_depends 453 all_depends = self.status.all_depends
454 pn_provides = self.status.pn_provides 454 pn_provides = self.status.pn_provides
455 bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"") 455 parselog.debug(1, "collating packages for \"world\"")
456 for f in self.status.possible_world: 456 for f in self.status.possible_world:
457 terminal = True 457 terminal = True
458 pn = self.status.pkg_fn[f] 458 pn = self.status.pkg_fn[f]
459 459
460 for p in pn_provides[pn]: 460 for p in pn_provides[pn]:
461 if p.startswith('virtual/'): 461 if p.startswith('virtual/'):
462 bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p)) 462 parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
463 terminal = False 463 terminal = False
464 break 464 break
465 for pf in self.status.providers[p]: 465 for pf in self.status.providers[p]:
466 if self.status.pkg_fn[pf] != pn: 466 if self.status.pkg_fn[pf] != pn:
467 bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p)) 467 parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
468 terminal = False 468 terminal = False
469 break 469 break
470 if terminal: 470 if terminal:
@@ -478,8 +478,9 @@ class BBCooker:
478 """Drop off into a shell""" 478 """Drop off into a shell"""
479 try: 479 try:
480 from bb import shell 480 from bb import shell
481 except ImportError as details: 481 except ImportError:
482 bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details ) 482 parselog.exception("Interactive mode not available")
483 sys.exit(1)
483 else: 484 else:
484 shell.start( self ) 485 shell.start( self )
485 486
@@ -502,14 +503,14 @@ class BBCooker:
502 503
503 layerconf = self._findLayerConf() 504 layerconf = self._findLayerConf()
504 if layerconf: 505 if layerconf:
505 bb.msg.debug(2, bb.msg.domain.Parsing, "Found bblayers.conf (%s)" % layerconf) 506 parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
506 data = bb.parse.handle(layerconf, data) 507 data = bb.parse.handle(layerconf, data)
507 508
508 layers = (bb.data.getVar('BBLAYERS', data, True) or "").split() 509 layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
509 510
510 data = bb.data.createCopy(data) 511 data = bb.data.createCopy(data)
511 for layer in layers: 512 for layer in layers:
512 bb.msg.debug(2, bb.msg.domain.Parsing, "Adding layer %s" % layer) 513 parselog.debug(2, "Adding layer %s", layer)
513 bb.data.setVar('LAYERDIR', layer, data) 514 bb.data.setVar('LAYERDIR', layer, data)
514 data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data) 515 data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data)
515 516
@@ -529,7 +530,7 @@ class BBCooker:
529 bb.data.delVar('LAYERDIR', data) 530 bb.data.delVar('LAYERDIR', data)
530 531
531 if not data.getVar("BBPATH", True): 532 if not data.getVar("BBPATH", True):
532 bb.fatal("The BBPATH variable is not set") 533 raise SystemExit("The BBPATH variable is not set")
533 534
534 data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data) 535 data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data)
535 536
@@ -553,10 +554,9 @@ class BBCooker:
553 554
554 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data) 555 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
555 556
556 except IOError as e: 557 except (IOError, bb.parse.ParseError):
557 bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (files, str(e))) 558 parselog.exception("Error when parsing %s", files)
558 except bb.parse.ParseError as details: 559 sys.exit(1)
559 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (files, details) )
560 560
561 def handleCollections( self, collections ): 561 def handleCollections( self, collections ):
562 """Handle collections""" 562 """Handle collections"""
@@ -565,22 +565,22 @@ class BBCooker:
565 for c in collection_list: 565 for c in collection_list:
566 regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1) 566 regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
567 if regex == None: 567 if regex == None:
568 bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c) 568 parselog.error("BBFILE_PATTERN_%s not defined" % c)
569 continue 569 continue
570 priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1) 570 priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
571 if priority == None: 571 if priority == None:
572 bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c) 572 parselog.error("BBFILE_PRIORITY_%s not defined" % c)
573 continue 573 continue
574 try: 574 try:
575 cre = re.compile(regex) 575 cre = re.compile(regex)
576 except re.error: 576 except re.error:
577 bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex)) 577 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
578 continue 578 continue
579 try: 579 try:
580 pri = int(priority) 580 pri = int(priority)
581 self.status.bbfile_config_priorities.append((c, regex, cre, pri)) 581 self.status.bbfile_config_priorities.append((c, regex, cre, pri))
582 except ValueError: 582 except ValueError:
583 bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority)) 583 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
584 584
585 def buildSetVars(self): 585 def buildSetVars(self):
586 """ 586 """
@@ -616,9 +616,9 @@ class BBCooker:
616 """ 616 """
617 matches = self.matchFiles(buildfile) 617 matches = self.matchFiles(buildfile)
618 if len(matches) != 1: 618 if len(matches) != 1:
619 bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches))) 619 parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
620 for f in matches: 620 for f in matches:
621 bb.msg.error(bb.msg.domain.Parsing, " %s" % f) 621 parselog.error(" %s" % f)
622 raise MultipleMatches 622 raise MultipleMatches
623 return matches[0] 623 return matches[0]
624 624
@@ -662,7 +662,7 @@ class BBCooker:
662 662
663 # Remove stamp for target if force mode active 663 # Remove stamp for target if force mode active
664 if self.configuration.force: 664 if self.configuration.force:
665 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn)) 665 logger.verbose("Remove stamp %s, %s", task, fn)
666 bb.build.del_stamp('do_%s' % task, self.status, fn) 666 bb.build.del_stamp('do_%s' % task, self.status, fn)
667 667
668 # Setup taskdata structure 668 # Setup taskdata structure
@@ -691,7 +691,7 @@ class BBCooker:
691 retval = rq.execute_runqueue() 691 retval = rq.execute_runqueue()
692 except runqueue.TaskFailure as exc: 692 except runqueue.TaskFailure as exc:
693 for fnid in exc.args: 693 for fnid in exc.args:
694 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) 694 buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
695 failures = failures + 1 695 failures = failures + 1
696 retval = False 696 retval = False
697 if not retval: 697 if not retval:
@@ -728,7 +728,7 @@ class BBCooker:
728 retval = rq.execute_runqueue() 728 retval = rq.execute_runqueue()
729 except runqueue.TaskFailure as exc: 729 except runqueue.TaskFailure as exc:
730 for fnid in exc.args: 730 for fnid in exc.args:
731 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) 731 buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
732 failures = failures + 1 732 failures = failures + 1
733 retval = False 733 retval = False
734 if not retval: 734 if not retval:
@@ -779,11 +779,11 @@ class BBCooker:
779 try: 779 try:
780 import psyco 780 import psyco
781 except ImportError: 781 except ImportError:
782 bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.") 782 collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
783 else: 783 else:
784 psyco.bind( CookerParser.parse_next ) 784 psyco.bind( CookerParser.parse_next )
785 else: 785 else:
786 bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.") 786 collectlog.info("You have disabled Psyco. This decreases performance.")
787 787
788 self.status = bb.cache.CacheData() 788 self.status = bb.cache.CacheData()
789 789
@@ -802,7 +802,7 @@ class BBCooker:
802 self.cookerState = cookerParsing 802 self.cookerState = cookerParsing
803 803
804 if not self.parser.parse_next(): 804 if not self.parser.parse_next():
805 bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete") 805 collectlog.debug(1, "parsing complete")
806 self.buildDepgraph() 806 self.buildDepgraph()
807 self.cookerState = cookerParsed 807 self.cookerState = cookerParsed
808 return None 808 return None
@@ -850,7 +850,7 @@ class BBCooker:
850 parsed, cached, skipped, masked = 0, 0, 0, 0 850 parsed, cached, skipped, masked = 0, 0, 0, 0
851 self.bb_cache = bb.cache.init(self) 851 self.bb_cache = bb.cache.init(self)
852 852
853 bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files") 853 collectlog.debug(1, "collecting .bb files")
854 854
855 files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split() 855 files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
856 data.setVar("BBFILES", " ".join(files), self.configuration.data) 856 data.setVar("BBFILES", " ".join(files), self.configuration.data)
@@ -859,7 +859,7 @@ class BBCooker:
859 files = self.get_bbfiles() 859 files = self.get_bbfiles()
860 860
861 if not len(files): 861 if not len(files):
862 bb.msg.error(bb.msg.domain.Collection, "no recipe files to build, check your BBPATH and BBFILES?") 862 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
863 bb.event.fire(CookerExit(), self.configuration.event_data) 863 bb.event.fire(CookerExit(), self.configuration.event_data)
864 864
865 newfiles = set() 865 newfiles = set()
@@ -879,13 +879,14 @@ class BBCooker:
879 try: 879 try:
880 bbmask_compiled = re.compile(bbmask) 880 bbmask_compiled = re.compile(bbmask)
881 except sre_constants.error: 881 except sre_constants.error:
882 bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.") 882 collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
883 return list(newfiles), 0
883 884
884 bbfiles = [] 885 bbfiles = []
885 bbappend = [] 886 bbappend = []
886 for f in newfiles: 887 for f in newfiles:
887 if bbmask and bbmask_compiled.search(f): 888 if bbmask and bbmask_compiled.search(f):
888 bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f) 889 collectlog.debug(1, "skipping masked file %s", f)
889 masked += 1 890 masked += 1
890 continue 891 continue
891 if f.endswith('.bb'): 892 if f.endswith('.bb'):
@@ -893,7 +894,7 @@ class BBCooker:
893 elif f.endswith('.bbappend'): 894 elif f.endswith('.bbappend'):
894 bbappend.append(f) 895 bbappend.append(f)
895 else: 896 else:
896 bb.msg.note(1, bb.msg.domain.Collection, "File %s of unknown filetype in BBFILES? Ignorning..." % f) 897 collectlog.debug(1, "skipping %s: unknown file extension", f)
897 898
898 # Build a list of .bbappend files for each .bb file 899 # Build a list of .bbappend files for each .bb file
899 self.appendlist = {} 900 self.appendlist = {}
@@ -1008,11 +1009,6 @@ class CookerParser:
1008 self.skipped += skipped 1009 self.skipped += skipped
1009 self.virtuals += virtuals 1010 self.virtuals += virtuals
1010 1011
1011 except IOError as e:
1012 self.error += 1
1013 cooker.bb_cache.remove(f)
1014 bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
1015 pass
1016 except KeyboardInterrupt: 1012 except KeyboardInterrupt:
1017 cooker.bb_cache.remove(f) 1013 cooker.bb_cache.remove(f)
1018 cooker.bb_cache.sync() 1014 cooker.bb_cache.sync()
@@ -1020,7 +1016,7 @@ class CookerParser:
1020 except Exception as e: 1016 except Exception as e:
1021 self.error += 1 1017 self.error += 1
1022 cooker.bb_cache.remove(f) 1018 cooker.bb_cache.remove(f)
1023 bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f)) 1019 parselog.exception("Unable to open %s", f)
1024 except: 1020 except:
1025 cooker.bb_cache.remove(f) 1021 cooker.bb_cache.remove(f)
1026 raise 1022 raise
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 22dadec5de..7de90056d5 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -30,10 +30,12 @@ BitBake build tools.
30 30
31import copy, re, sys 31import copy, re, sys
32from collections import MutableMapping 32from collections import MutableMapping
33import logging
33import bb 34import bb
34from bb import utils 35from bb import utils
35from bb.COW import COWDictBase 36from bb.COW import COWDictBase
36 37
38logger = logging.getLogger("BitBake.Data")
37 39
38__setvar_keyword__ = ["_append", "_prepend"] 40__setvar_keyword__ = ["_append", "_prepend"]
39__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?') 41__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
@@ -101,10 +103,8 @@ class DataSmart(MutableMapping):
101 s = __expand_python_regexp__.sub(varparse.python_sub, s) 103 s = __expand_python_regexp__.sub(varparse.python_sub, s)
102 if s == olds: 104 if s == olds:
103 break 105 break
104 except KeyboardInterrupt: 106 except Exception:
105 raise 107 logger.exception("Error evaluating '%s'", s)
106 except:
107 bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
108 raise 108 raise
109 109
110 varparse.value = s 110 varparse.value = s
@@ -152,7 +152,7 @@ class DataSmart(MutableMapping):
152 try: 152 try:
153 self[name] = self[var] 153 self[name] = self[var]
154 except Exception: 154 except Exception:
155 bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar") 155 logger.info("Untracked delVar")
156 156
157 # now on to the appends and prepends 157 # now on to the appends and prepends
158 for op in __setvar_keyword__: 158 for op in __setvar_keyword__:
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 364bdffff1..40b0c699ab 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -27,10 +27,13 @@ BitBake build tools.
27from __future__ import absolute_import 27from __future__ import absolute_import
28from __future__ import print_function 28from __future__ import print_function
29import os, re 29import os, re
30import logging
30import bb 31import bb
31from bb import data 32from bb import data
32from bb import persist_data 33from bb import persist_data
33 34
35logger = logging.getLogger("BitBake.Fetch")
36
34class MalformedUrl(Exception): 37class MalformedUrl(Exception):
35 """Exception raised when encountering an invalid url""" 38 """Exception raised when encountering an invalid url"""
36 39
@@ -117,9 +120,8 @@ def encodeurl(decoded):
117 return url 120 return url
118 121
119def uri_replace(uri, uri_find, uri_replace, d): 122def uri_replace(uri, uri_find, uri_replace, d):
120# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
121 if not uri or not uri_find or not uri_replace: 123 if not uri or not uri_find or not uri_replace:
122 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") 124 logger.debug(1, "uri_replace: passed an undefined value, not replacing")
123 uri_decoded = list(decodeurl(uri)) 125 uri_decoded = list(decodeurl(uri))
124 uri_find_decoded = list(decodeurl(uri_find)) 126 uri_find_decoded = list(decodeurl(uri_find))
125 uri_replace_decoded = list(decodeurl(uri_replace)) 127 uri_replace_decoded = list(decodeurl(uri_replace))
@@ -135,13 +137,8 @@ def uri_replace(uri, uri_find, uri_replace, d):
135 localfn = bb.fetch.localpath(uri, d) 137 localfn = bb.fetch.localpath(uri, d)
136 if localfn: 138 if localfn:
137 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) 139 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
138# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
139 else: 140 else:
140# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
141 return uri 141 return uri
142# else:
143# for j in i:
144# FIXME: apply replacements against options
145 return encodeurl(result_decoded) 142 return encodeurl(result_decoded)
146 143
147methods = [] 144methods = []
@@ -158,9 +155,9 @@ def fetcher_init(d):
158 # When to drop SCM head revisions controlled by user policy 155 # When to drop SCM head revisions controlled by user policy
159 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" 156 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
160 if srcrev_policy == "cache": 157 if srcrev_policy == "cache":
161 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy) 158 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
162 elif srcrev_policy == "clear": 159 elif srcrev_policy == "clear":
163 bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy) 160 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
164 try: 161 try:
165 bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS") 162 bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
166 except: 163 except:
@@ -190,11 +187,11 @@ def fetcher_compare_revisons(d):
190 changed = False 187 changed = False
191 for key in data: 188 for key in data:
192 if key not in data2 or data2[key] != data[key]: 189 if key not in data2 or data2[key] != data[key]:
193 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key) 190 logger.debug(1, "%s changed", key)
194 changed = True 191 changed = True
195 return True 192 return True
196 else: 193 else:
197 bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key) 194 logger.debug(2, "%s did not change", key)
198 return False 195 return False
199 196
200# Function call order is usually: 197# Function call order is usually:
@@ -334,7 +331,7 @@ def checkstatus(d, urls = None):
334 for u in urls: 331 for u in urls:
335 ud = urldata[u] 332 ud = urldata[u]
336 m = ud.method 333 m = ud.method
337 bb.msg.debug(1, bb.msg.domain.Fetcher, "Testing URL %s" % u) 334 logger.debug(1, "Testing URL %s" % u)
338 # First try checking uri, u, from PREMIRRORS 335 # First try checking uri, u, from PREMIRRORS
339 mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) 336 mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
340 ret = try_mirrors(d, u, mirrors, True) 337 ret = try_mirrors(d, u, mirrors, True)
@@ -398,7 +395,7 @@ def get_srcrev(d):
398 scms.append(u) 395 scms.append(u)
399 396
400 if len(scms) == 0: 397 if len(scms) == 0:
401 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") 398 logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
402 raise ParameterError 399 raise ParameterError
403 400
404 if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": 401 if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
@@ -412,7 +409,7 @@ def get_srcrev(d):
412 # 409 #
413 format = bb.data.getVar('SRCREV_FORMAT', d, 1) 410 format = bb.data.getVar('SRCREV_FORMAT', d, 1)
414 if not format: 411 if not format:
415 bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.") 412 logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
416 raise ParameterError 413 raise ParameterError
417 414
418 for scm in scms: 415 for scm in scms:
@@ -454,7 +451,7 @@ def runfetchcmd(cmd, d, quiet = False):
454 if val: 451 if val:
455 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) 452 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
456 453
457 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd) 454 logger.debug(1, "Running %s", cmd)
458 455
459 # redirect stderr to stdout 456 # redirect stderr to stdout
460 stdout_handle = os.popen(cmd + " 2>&1", "r") 457 stdout_handle = os.popen(cmd + " 2>&1", "r")
@@ -490,7 +487,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
490 """ 487 """
491 fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) 488 fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
492 if not check and os.access(fpath, os.R_OK) and not force: 489 if not check and os.access(fpath, os.R_OK) and not force:
493 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % fpath) 490 logger.debug(1, "%s already exists, skipping checkout." % fpath)
494 return fpath 491 return fpath
495 492
496 ld = d.createCopy() 493 ld = d.createCopy()
@@ -500,7 +497,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
500 try: 497 try:
501 ud = FetchData(newuri, ld) 498 ud = FetchData(newuri, ld)
502 except bb.fetch.NoMethodError: 499 except bb.fetch.NoMethodError:
503 bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % uri) 500 logger.debug(1, "No method for %s", uri)
504 continue 501 continue
505 502
506 ud.setup_localpath(ld) 503 ud.setup_localpath(ld)
@@ -518,7 +515,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
518 bb.fetch.MD5SumError): 515 bb.fetch.MD5SumError):
519 import sys 516 import sys
520 (type, value, traceback) = sys.exc_info() 517 (type, value, traceback) = sys.exc_info()
521 bb.msg.debug(2, bb.msg.domain.Fetcher, "Mirror fetch failure: %s" % value) 518 logger.debug(2, "Mirror fetch failure: %s" % value)
522 removefile(ud.localpath) 519 removefile(ud.localpath)
523 continue 520 continue
524 return None 521 return None
@@ -654,7 +651,7 @@ class Fetch(object):
654 Check the status of a URL 651 Check the status of a URL
655 Assumes localpath was called first 652 Assumes localpath was called first
656 """ 653 """
657 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url) 654 logger.info("URL %s could not be checked for status since no method exists.", url)
658 return True 655 return True
659 656
660 def getSRCDate(urldata, d): 657 def getSRCDate(urldata, d):
diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py
index 813d7d8c80..8b0bd9ff3a 100644
--- a/bitbake/lib/bb/fetch/bzr.py
+++ b/bitbake/lib/bb/fetch/bzr.py
@@ -25,11 +25,10 @@ BitBake 'Fetch' implementation for bzr.
25 25
26import os 26import os
27import sys 27import sys
28import logging
28import bb 29import bb
29from bb import data 30from bb import data
30from bb.fetch import Fetch 31from bb.fetch import Fetch, FetchError, runfetchcmd, logger
31from bb.fetch import FetchError
32from bb.fetch import runfetchcmd
33 32
34class Bzr(Fetch): 33class Bzr(Fetch):
35 def supports(self, url, ud, d): 34 def supports(self, url, ud, d):
@@ -93,16 +92,16 @@ class Bzr(Fetch):
93 92
94 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): 93 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
95 bzrcmd = self._buildbzrcommand(ud, d, "update") 94 bzrcmd = self._buildbzrcommand(ud, d, "update")
96 bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc) 95 logger.debug(1, "BZR Update %s", loc)
97 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) 96 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
98 runfetchcmd(bzrcmd, d) 97 runfetchcmd(bzrcmd, d)
99 else: 98 else:
100 os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir))) 99 os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)))
101 bzrcmd = self._buildbzrcommand(ud, d, "fetch") 100 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
102 bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc) 101 logger.debug(1, "BZR Checkout %s", loc)
103 bb.mkdirhier(ud.pkgdir) 102 bb.mkdirhier(ud.pkgdir)
104 os.chdir(ud.pkgdir) 103 os.chdir(ud.pkgdir)
105 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd) 104 logger.debug(1, "Running %s", bzrcmd)
106 runfetchcmd(bzrcmd, d) 105 runfetchcmd(bzrcmd, d)
107 106
108 os.chdir(ud.pkgdir) 107 os.chdir(ud.pkgdir)
@@ -130,7 +129,7 @@ class Bzr(Fetch):
130 """ 129 """
131 Return the latest upstream revision number 130 Return the latest upstream revision number
132 """ 131 """
133 bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url) 132 logger.debug(2, "BZR fetcher hitting network for %s", url)
134 133
135 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) 134 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
136 135
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index 61976f7ef4..1064b09e11 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -27,11 +27,10 @@ BitBake build tools.
27# 27#
28 28
29import os 29import os
30import logging
30import bb 31import bb
31from bb import data 32from bb import data
32from bb.fetch import Fetch 33from bb.fetch import Fetch, FetchError, MissingParameterError, logger
33from bb.fetch import FetchError
34from bb.fetch import MissingParameterError
35 34
36class Cvs(Fetch): 35class Cvs(Fetch):
37 """ 36 """
@@ -136,21 +135,21 @@ class Cvs(Fetch):
136 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) 135 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
137 136
138 # create module directory 137 # create module directory
139 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") 138 logger.debug(2, "Fetch: checking for module directory")
140 pkg = data.expand('${PN}', d) 139 pkg = data.expand('${PN}', d)
141 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) 140 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
142 moddir = os.path.join(pkgdir, localdir) 141 moddir = os.path.join(pkgdir, localdir)
143 if os.access(os.path.join(moddir, 'CVS'), os.R_OK): 142 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
144 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) 143 logger.info("Update " + loc)
145 # update sources there 144 # update sources there
146 os.chdir(moddir) 145 os.chdir(moddir)
147 myret = os.system(cvsupdatecmd) 146 myret = os.system(cvsupdatecmd)
148 else: 147 else:
149 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 148 logger.info("Fetch " + loc)
150 # check out sources there 149 # check out sources there
151 bb.mkdirhier(pkgdir) 150 bb.mkdirhier(pkgdir)
152 os.chdir(pkgdir) 151 os.chdir(pkgdir)
153 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) 152 logger.debug(1, "Running %s", cvscmd)
154 myret = os.system(cvscmd) 153 myret = os.system(cvscmd)
155 154
156 if myret != 0 or not os.access(moddir, os.R_OK): 155 if myret != 0 or not os.access(moddir, os.R_OK):
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 9bd447ff8b..57d758dcda 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -25,6 +25,7 @@ import bb
25from bb import data 25from bb import data
26from bb.fetch import Fetch 26from bb.fetch import Fetch
27from bb.fetch import runfetchcmd 27from bb.fetch import runfetchcmd
28from bb.fetch import logger
28 29
29class Git(Fetch): 30class Git(Fetch):
30 """Class to fetch a module or modules from git repositories""" 31 """Class to fetch a module or modules from git repositories"""
@@ -153,7 +154,7 @@ class Git(Fetch):
153 os.chdir(ud.clonedir) 154 os.chdir(ud.clonedir)
154 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) 155 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
155 if mirror_tarballs != "0" or 'fullclone' in ud.parm: 156 if mirror_tarballs != "0" or 'fullclone' in ud.parm:
156 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") 157 logger.info("Creating tarball of git repository")
157 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) 158 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
158 159
159 if 'fullclone' in ud.parm: 160 if 'fullclone' in ud.parm:
@@ -185,7 +186,7 @@ class Git(Fetch):
185 runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) 186 runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
186 187
187 os.chdir(codir) 188 os.chdir(codir)
188 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") 189 logger.info("Creating tarball of git checkout")
189 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) 190 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
190 191
191 os.chdir(ud.clonedir) 192 os.chdir(ud.clonedir)
@@ -238,7 +239,7 @@ class Git(Fetch):
238 print("no repo") 239 print("no repo")
239 self.go(None, ud, d) 240 self.go(None, ud, d)
240 if not os.path.exists(ud.clonedir): 241 if not os.path.exists(ud.clonedir):
241 bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir)) 242 logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
242 return None 243 return None
243 244
244 245
@@ -250,5 +251,5 @@ class Git(Fetch):
250 os.chdir(cwd) 251 os.chdir(cwd)
251 252
252 buildindex = "%s" % output.split()[0] 253 buildindex = "%s" % output.split()[0]
253 bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, ud.clonedir, buildindex, rev)) 254 logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
254 return buildindex 255 return buildindex
diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py
index efb3b5c76d..ab00d43033 100644
--- a/bitbake/lib/bb/fetch/hg.py
+++ b/bitbake/lib/bb/fetch/hg.py
@@ -26,12 +26,14 @@ BitBake 'Fetch' implementation for mercurial DRCS (hg).
26 26
27import os 27import os
28import sys 28import sys
29import logging
29import bb 30import bb
30from bb import data 31from bb import data
31from bb.fetch import Fetch 32from bb.fetch import Fetch
32from bb.fetch import FetchError 33from bb.fetch import FetchError
33from bb.fetch import MissingParameterError 34from bb.fetch import MissingParameterError
34from bb.fetch import runfetchcmd 35from bb.fetch import runfetchcmd
36from bb.fetch import logger
35 37
36class Hg(Fetch): 38class Hg(Fetch):
37 """Class to fetch a from mercurial repositories""" 39 """Class to fetch a from mercurial repositories"""
@@ -116,29 +118,29 @@ class Hg(Fetch):
116 def go(self, loc, ud, d): 118 def go(self, loc, ud, d):
117 """Fetch url""" 119 """Fetch url"""
118 120
119 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") 121 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
120 122
121 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): 123 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
122 updatecmd = self._buildhgcommand(ud, d, "pull") 124 updatecmd = self._buildhgcommand(ud, d, "pull")
123 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) 125 logger.info("Update " + loc)
124 # update sources there 126 # update sources there
125 os.chdir(ud.moddir) 127 os.chdir(ud.moddir)
126 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) 128 logger.debug(1, "Running %s", updatecmd)
127 runfetchcmd(updatecmd, d) 129 runfetchcmd(updatecmd, d)
128 130
129 else: 131 else:
130 fetchcmd = self._buildhgcommand(ud, d, "fetch") 132 fetchcmd = self._buildhgcommand(ud, d, "fetch")
131 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 133 logger.info("Fetch " + loc)
132 # check out sources there 134 # check out sources there
133 bb.mkdirhier(ud.pkgdir) 135 bb.mkdirhier(ud.pkgdir)
134 os.chdir(ud.pkgdir) 136 os.chdir(ud.pkgdir)
135 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) 137 logger.debug(1, "Running %s", fetchcmd)
136 runfetchcmd(fetchcmd, d) 138 runfetchcmd(fetchcmd, d)
137 139
138 # Even when we clone (fetch), we still need to update as hg's clone 140 # Even when we clone (fetch), we still need to update as hg's clone
139 # won't checkout the specified revision if its on a branch 141 # won't checkout the specified revision if its on a branch
140 updatecmd = self._buildhgcommand(ud, d, "update") 142 updatecmd = self._buildhgcommand(ud, d, "update")
141 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) 143 logger.debug(1, "Running %s", updatecmd)
142 runfetchcmd(updatecmd, d) 144 runfetchcmd(updatecmd, d)
143 145
144 os.chdir(ud.pkgdir) 146 os.chdir(ud.pkgdir)
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index 882a2c4602..6aa9e45768 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -66,7 +66,7 @@ class Local(Fetch):
66 Check the status of the url 66 Check the status of the url
67 """ 67 """
68 if urldata.localpath.find("*") != -1: 68 if urldata.localpath.find("*") != -1:
69 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url) 69 logger.info("URL %s looks like a glob and was therefore not checked.", url)
70 return True 70 return True
71 if os.path.exists(urldata.localpath): 71 if os.path.exists(urldata.localpath):
72 return True 72 return True
diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py
index ed773939b0..6fcb344ce0 100644
--- a/bitbake/lib/bb/fetch/osc.py
+++ b/bitbake/lib/bb/fetch/osc.py
@@ -8,6 +8,7 @@ Based on the svn "Fetch" implementation.
8 8
9import os 9import os
10import sys 10import sys
11import logging
11import bb 12import bb
12from bb import data 13from bb import data
13from bb.fetch import Fetch 14from bb.fetch import Fetch
@@ -91,22 +92,22 @@ class Osc(Fetch):
91 Fetch url 92 Fetch url
92 """ 93 """
93 94
94 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") 95 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
95 96
96 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): 97 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
97 oscupdatecmd = self._buildosccommand(ud, d, "update") 98 oscupdatecmd = self._buildosccommand(ud, d, "update")
98 bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc) 99 logger.info("Update "+ loc)
99 # update sources there 100 # update sources there
100 os.chdir(ud.moddir) 101 os.chdir(ud.moddir)
101 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd) 102 logger.debug(1, "Running %s", oscupdatecmd)
102 runfetchcmd(oscupdatecmd, d) 103 runfetchcmd(oscupdatecmd, d)
103 else: 104 else:
104 oscfetchcmd = self._buildosccommand(ud, d, "fetch") 105 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
105 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 106 logger.info("Fetch " + loc)
106 # check out sources there 107 # check out sources there
107 bb.mkdirhier(ud.pkgdir) 108 bb.mkdirhier(ud.pkgdir)
108 os.chdir(ud.pkgdir) 109 os.chdir(ud.pkgdir)
109 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) 110 logger.debug(1, "Running %s", oscfetchcmd)
110 runfetchcmd(oscfetchcmd, d) 111 runfetchcmd(oscfetchcmd, d)
111 112
112 os.chdir(os.path.join(ud.pkgdir + ud.path)) 113 os.chdir(os.path.join(ud.pkgdir + ud.path))
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
index 1c74cff349..6f68d85614 100644
--- a/bitbake/lib/bb/fetch/perforce.py
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -27,10 +27,12 @@ BitBake build tools.
27 27
28from future_builtins import zip 28from future_builtins import zip
29import os 29import os
30import logging
30import bb 31import bb
31from bb import data 32from bb import data
32from bb.fetch import Fetch 33from bb.fetch import Fetch
33from bb.fetch import FetchError 34from bb.fetch import FetchError
35from bb.fetch import logger
34 36
35class Perforce(Fetch): 37class Perforce(Fetch):
36 def supports(self, url, ud, d): 38 def supports(self, url, ud, d):
@@ -86,10 +88,10 @@ class Perforce(Fetch):
86 depot += "@%s" % (p4date) 88 depot += "@%s" % (p4date)
87 89
88 p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) 90 p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
89 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) 91 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
90 p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) 92 p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
91 cset = p4file.readline().strip() 93 cset = p4file.readline().strip()
92 bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset)) 94 logger.debug(1, "READ %s", cset)
93 if not cset: 95 if not cset:
94 return -1 96 return -1
95 97
@@ -155,13 +157,13 @@ class Perforce(Fetch):
155 p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) 157 p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
156 158
157 # create temp directory 159 # create temp directory
158 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") 160 logger.debug(2, "Fetch: creating temporary directory")
159 bb.mkdirhier(data.expand('${WORKDIR}', localdata)) 161 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
160 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) 162 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
161 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") 163 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
162 tmpfile = tmppipe.readline().strip() 164 tmpfile = tmppipe.readline().strip()
163 if not tmpfile: 165 if not tmpfile:
164 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") 166 logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
165 raise FetchError(module) 167 raise FetchError(module)
166 168
167 if "label" in parm: 169 if "label" in parm:
@@ -171,12 +173,12 @@ class Perforce(Fetch):
171 depot = "%s@%s" % (depot, cset) 173 depot = "%s@%s" % (depot, cset)
172 174
173 os.chdir(tmpfile) 175 os.chdir(tmpfile)
174 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 176 logger.info("Fetch " + loc)
175 bb.msg.note(1, bb.msg.domain.Fetcher, "%s%s files %s" % (p4cmd, p4opt, depot)) 177 logger.info("%s%s files %s", p4cmd, p4opt, depot)
176 p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) 178 p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
177 179
178 if not p4file: 180 if not p4file:
179 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to get the P4 files from %s" % (depot)) 181 logger.error("Fetch: unable to get the P4 files from %s", depot)
180 raise FetchError(module) 182 raise FetchError(module)
181 183
182 count = 0 184 count = 0
@@ -194,7 +196,7 @@ class Perforce(Fetch):
194 count = count + 1 196 count = count + 1
195 197
196 if count == 0: 198 if count == 0:
197 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: No files gathered from the P4 fetch") 199 logger.error("Fetch: No files gathered from the P4 fetch")
198 raise FetchError(module) 200 raise FetchError(module)
199 201
200 myret = os.system("tar -czf %s %s" % (ud.localpath, module)) 202 myret = os.system("tar -czf %s %s" % (ud.localpath, module))
diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py
index 883310b019..4794796814 100644
--- a/bitbake/lib/bb/fetch/repo.py
+++ b/bitbake/lib/bb/fetch/repo.py
@@ -72,7 +72,7 @@ class Repo(Fetch):
72 """Fetch url""" 72 """Fetch url"""
73 73
74 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): 74 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
75 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping repo init / sync." % ud.localpath) 75 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
76 return 76 return
77 77
78 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) 78 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
index a17ac04d21..2754971eba 100644
--- a/bitbake/lib/bb/fetch/svk.py
+++ b/bitbake/lib/bb/fetch/svk.py
@@ -26,11 +26,13 @@ This implementation is for svk. It is based on the svn implementation
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig 26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27 27
28import os 28import os
29import logging
29import bb 30import bb
30from bb import data 31from bb import data
31from bb.fetch import Fetch 32from bb.fetch import Fetch
32from bb.fetch import FetchError 33from bb.fetch import FetchError
33from bb.fetch import MissingParameterError 34from bb.fetch import MissingParameterError
35from bb.fetch import logger
34 36
35class Svk(Fetch): 37class Svk(Fetch):
36 """Class to fetch a module or modules from svk repositories""" 38 """Class to fetch a module or modules from svk repositories"""
@@ -72,19 +74,19 @@ class Svk(Fetch):
72 # create temp directory 74 # create temp directory
73 localdata = data.createCopy(d) 75 localdata = data.createCopy(d)
74 data.update_data(localdata) 76 data.update_data(localdata)
75 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") 77 logger.debug(2, "Fetch: creating temporary directory")
76 bb.mkdirhier(data.expand('${WORKDIR}', localdata)) 78 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
77 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) 79 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
78 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") 80 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
79 tmpfile = tmppipe.readline().strip() 81 tmpfile = tmppipe.readline().strip()
80 if not tmpfile: 82 if not tmpfile:
81 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") 83 logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
82 raise FetchError(ud.module) 84 raise FetchError(ud.module)
83 85
84 # check out sources there 86 # check out sources there
85 os.chdir(tmpfile) 87 os.chdir(tmpfile)
86 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 88 logger.info("Fetch " + loc)
87 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) 89 logger.debug(1, "Running %s", svkcmd)
88 myret = os.system(svkcmd) 90 myret = os.system(svkcmd)
89 if myret != 0: 91 if myret != 0:
90 try: 92 try:
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index 375e8df055..34f8132257 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -25,12 +25,14 @@ BitBake 'Fetch' implementation for svn.
25 25
26import os 26import os
27import sys 27import sys
28import logging
28import bb 29import bb
29from bb import data 30from bb import data
30from bb.fetch import Fetch 31from bb.fetch import Fetch
31from bb.fetch import FetchError 32from bb.fetch import FetchError
32from bb.fetch import MissingParameterError 33from bb.fetch import MissingParameterError
33from bb.fetch import runfetchcmd 34from bb.fetch import runfetchcmd
35from bb.fetch import logger
34 36
35class Svn(Fetch): 37class Svn(Fetch):
36 """Class to fetch a module or modules from svn repositories""" 38 """Class to fetch a module or modules from svn repositories"""
@@ -136,22 +138,22 @@ class Svn(Fetch):
136 def go(self, loc, ud, d): 138 def go(self, loc, ud, d):
137 """Fetch url""" 139 """Fetch url"""
138 140
139 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") 141 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
140 142
141 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): 143 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
142 svnupdatecmd = self._buildsvncommand(ud, d, "update") 144 svnupdatecmd = self._buildsvncommand(ud, d, "update")
143 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) 145 logger.info("Update " + loc)
144 # update sources there 146 # update sources there
145 os.chdir(ud.moddir) 147 os.chdir(ud.moddir)
146 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd) 148 logger.debug(1, "Running %s", svnupdatecmd)
147 runfetchcmd(svnupdatecmd, d) 149 runfetchcmd(svnupdatecmd, d)
148 else: 150 else:
149 svnfetchcmd = self._buildsvncommand(ud, d, "fetch") 151 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
150 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 152 logger.info("Fetch " + loc)
151 # check out sources there 153 # check out sources there
152 bb.mkdirhier(ud.pkgdir) 154 bb.mkdirhier(ud.pkgdir)
153 os.chdir(ud.pkgdir) 155 os.chdir(ud.pkgdir)
154 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd) 156 logger.debug(1, "Running %s", svnfetchcmd)
155 runfetchcmd(svnfetchcmd, d) 157 runfetchcmd(svnfetchcmd, d)
156 158
157 os.chdir(ud.pkgdir) 159 os.chdir(ud.pkgdir)
@@ -179,7 +181,7 @@ class Svn(Fetch):
179 """ 181 """
180 Return the latest upstream revision number 182 Return the latest upstream revision number
181 """ 183 """
182 bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url) 184 logger.debug(2, "SVN fetcher hitting network for %s", url)
183 185
184 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) 186 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
185 187
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index 18503a03f7..4d4bdfd493 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -26,13 +26,11 @@ BitBake build tools.
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig 26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27 27
28import os 28import os
29import logging
29import bb 30import bb
30import urllib 31import urllib
31from bb import data 32from bb import data
32from bb.fetch import Fetch 33from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
33from bb.fetch import FetchError
34from bb.fetch import encodeurl, decodeurl
35from bb.fetch import runfetchcmd
36 34
37class Wget(Fetch): 35class Wget(Fetch):
38 """Class to fetch urls via 'wget'""" 36 """Class to fetch urls via 'wget'"""
@@ -69,15 +67,14 @@ class Wget(Fetch):
69 67
70 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) 68 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
71 fetchcmd = fetchcmd.replace("${FILE}", ud.basename) 69 fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
72 70 logger.info("fetch " + uri)
73 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) 71 logger.debug(2, "executing " + fetchcmd)
74 bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
75 runfetchcmd(fetchcmd, d) 72 runfetchcmd(fetchcmd, d)
76 73
77 # Sanity check since wget can pretend it succeed when it didn't 74 # Sanity check since wget can pretend it succeed when it didn't
78 # Also, this used to happen if sourceforge sent us to the mirror page 75 # Also, this used to happen if sourceforge sent us to the mirror page
79 if not os.path.exists(ud.localpath) and not checkonly: 76 if not os.path.exists(ud.localpath) and not checkonly:
80 bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) 77 logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
81 return False 78 return False
82 79
83 return True 80 return True
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 254ba07a04..bd01d79028 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -111,19 +111,25 @@ def set_debug_domains(domainargs):
111def debug(level, msgdomain, msg, fn = None): 111def debug(level, msgdomain, msg, fn = None):
112 level = logging.DEBUG - (level - 1) 112 level = logging.DEBUG - (level - 1)
113 if not msgdomain: 113 if not msgdomain:
114 logger.log(level, msg) 114 logger.debug(level, msg)
115 else: 115 else:
116 loggers[msgdomain].log(level, msg) 116 loggers[msgdomain].debug(level, msg)
117 117
118def plain(msg, fn = None): 118def plain(msg, fn = None):
119 logger.log(logging.INFO + 1, msg) 119 logger.log(logging.INFO + 1, msg)
120 logger.plain(msg)
120 121
121def note(level, msgdomain, msg, fn = None): 122def note(level, msgdomain, msg, fn = None):
122 level = logging.INFO - (level - 1) 123 if level > 1:
123 if not msgdomain: 124 if msgdomain:
124 logger.log(level, msg) 125 logger.verbose(msg)
126 else:
127 loggers[msgdomain].verbose(msg)
125 else: 128 else:
126 loggers[msgdomain].log(level, msg) 129 if msgdomain:
130 logger.info(msg)
131 else:
132 loggers[msgdomain].info(msg)
127 133
128def warn(msgdomain, msg, fn = None): 134def warn(msgdomain, msg, fn = None):
129 if not msgdomain: 135 if not msgdomain:
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index da160ceb27..f9517f03f2 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -26,9 +26,14 @@ File parsers for the BitBake build tools.
26 26
27handlers = [] 27handlers = []
28 28
29import bb, os 29import os
30import logging
31import bb
30import bb.utils 32import bb.utils
31import bb.siggen 33import bb.siggen
34import bb.utils
35
36logger = logging.getLogger("BitBake.Parsing")
32 37
33class ParseError(Exception): 38class ParseError(Exception):
34 """Exception raised when parsing fails""" 39 """Exception raised when parsing fails"""
@@ -91,7 +96,7 @@ def resolve_file(fn, d):
91 raise IOError("file %s not found in %s" % (fn, bbpath)) 96 raise IOError("file %s not found in %s" % (fn, bbpath))
92 fn = newfn 97 fn = newfn
93 98
94 bb.msg.debug(2, bb.msg.domain.Parsing, "LOAD %s" % fn) 99 logger.debug(2, "LOAD %s", fn)
95 return fn 100 return fn
96 101
97# Used by OpenEmbedded metadata 102# Used by OpenEmbedded metadata
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 870ae65b0e..55ab485990 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -23,9 +23,13 @@
23 23
24from __future__ import absolute_import 24from __future__ import absolute_import
25from future_builtins import filter 25from future_builtins import filter
26import bb, re, string 26import re
27from bb import methodpool 27import string
28import logging
29import bb
28import itertools 30import itertools
31from bb import methodpool
32from bb.parse import logger
29 33
30__word__ = re.compile(r"\S+") 34__word__ = re.compile(r"\S+")
31__parsed_methods__ = bb.methodpool.get_parsed_dict() 35__parsed_methods__ = bb.methodpool.get_parsed_dict()
@@ -51,7 +55,7 @@ class IncludeNode(AstNode):
51 Include the file and evaluate the statements 55 Include the file and evaluate the statements
52 """ 56 """
53 s = bb.data.expand(self.what_file, data) 57 s = bb.data.expand(self.what_file, data)
54 bb.msg.debug(3, bb.msg.domain.Parsing, "CONF %s:%d: including %s" % (self.from_fn, self.from_lineno, s)) 58 logger.debug(2, "CONF %s:%s: including %s", self.from_fn, self.from_lineno, s)
55 59
56 # TODO: Cache those includes... maybe not here though 60 # TODO: Cache those includes... maybe not here though
57 if self.force: 61 if self.force:
@@ -365,7 +369,7 @@ def _expand_versions(versions):
365def multi_finalize(fn, d): 369def multi_finalize(fn, d):
366 appends = (d.getVar("__BBAPPEND", True) or "").split() 370 appends = (d.getVar("__BBAPPEND", True) or "").split()
367 for append in appends: 371 for append in appends:
368 bb.msg.debug(2, bb.msg.domain.Parsing, "Appending .bbappend file " + append + " to " + fn) 372 logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
369 bb.parse.BBHandler.handle(append, d, True) 373 bb.parse.BBHandler.handle(append, d, True)
370 374
371 safe_d = d 375 safe_d = d
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 51ad10fb92..31f44f7df3 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -27,11 +27,12 @@
27 27
28from __future__ import absolute_import 28from __future__ import absolute_import
29import re, bb, os 29import re, bb, os
30import logging
30import bb.fetch, bb.build, bb.utils 31import bb.fetch, bb.build, bb.utils
31from bb import data 32from bb import data
32 33
33from . import ConfHandler 34from . import ConfHandler
34from .. import resolve_file, ast 35from .. import resolve_file, ast, logger
35from .ConfHandler import include, init 36from .ConfHandler import include, init
36 37
37# For compatibility 38# For compatibility
@@ -76,7 +77,7 @@ def inherit(files, d):
76 file = os.path.join('classes', '%s.bbclass' % file) 77 file = os.path.join('classes', '%s.bbclass' % file)
77 78
78 if not file in __inherit_cache: 79 if not file in __inherit_cache:
79 bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file)) 80 logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
80 __inherit_cache.append( file ) 81 __inherit_cache.append( file )
81 data.setVar('__inherit_cache', __inherit_cache, d) 82 data.setVar('__inherit_cache', __inherit_cache, d)
82 include(fn, file, d, "inherit") 83 include(fn, file, d, "inherit")
@@ -115,9 +116,9 @@ def handle(fn, d, include):
115 116
116 117
117 if include == 0: 118 if include == 0:
118 bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)") 119 logger.debug(2, "BB %s: handle(data)", fn)
119 else: 120 else:
120 bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)") 121 logger.debug(2, "BB %s: handle(data, include)", fn)
121 122
122 (root, ext) = os.path.splitext(os.path.basename(fn)) 123 (root, ext) = os.path.splitext(os.path.basename(fn))
123 base_name = "%s%s" % (root, ext) 124 base_name = "%s%s" % (root, ext)
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 9128a2ef8f..2abd2fdc25 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -25,8 +25,9 @@
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26 26
27import re, bb.data, os 27import re, bb.data, os
28import logging
28import bb.utils 29import bb.utils
29from bb.parse import ParseError, resolve_file, ast 30from bb.parse import ParseError, resolve_file, ast, logger
30 31
31#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") 32#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
32__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") 33__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
@@ -68,7 +69,7 @@ def include(oldfn, fn, data, error_out):
68 except IOError: 69 except IOError:
69 if error_out: 70 if error_out:
70 raise ParseError("Could not %(error_out)s file %(fn)s" % vars() ) 71 raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
71 bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn) 72 logger.debug(2, "CONF file '%s' not found", fn)
72 73
73def handle(fn, data, include): 74def handle(fn, data, include):
74 init(data) 75 init(data)
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index 00f4929945..17975b64a5 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -15,9 +15,13 @@
15# with this program; if not, write to the Free Software Foundation, Inc., 15# with this program; if not, write to the Free Software Foundation, Inc.,
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 17
18import bb, os 18import os
19import logging
20import bb
19import bb.utils 21import bb.utils
20 22
23logger = logging.getLogger("BitBake.PersistData")
24
21try: 25try:
22 import sqlite3 26 import sqlite3
23except ImportError: 27except ImportError:
@@ -56,7 +60,7 @@ class PersistData:
56 bb.utils.mkdirhier(self.cachedir) 60 bb.utils.mkdirhier(self.cachedir)
57 61
58 self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3") 62 self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
59 bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile) 63 logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile)
60 64
61 connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) 65 connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
62 persistent_database_connection["connection"] = connection 66 persistent_database_connection["connection"] = connection
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index 58326f0398..d017d6006f 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -22,9 +22,12 @@
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 23
24import re 24import re
25import logging
25from bb import data, utils 26from bb import data, utils
26import bb 27import bb
27 28
29logger = logging.getLogger("BitBake.Provider")
30
28class NoProvider(Exception): 31class NoProvider(Exception):
29 """Exception raised when no provider of a build dependency can be found""" 32 """Exception raised when no provider of a build dependency can be found"""
30 33
@@ -120,9 +123,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
120 if item: 123 if item:
121 itemstr = " (for item %s)" % item 124 itemstr = " (for item %s)" % item
122 if preferred_file is None: 125 if preferred_file is None:
123 bb.msg.note(1, bb.msg.domain.Provider, "preferred version %s of %s not available%s" % (pv_str, pn, itemstr)) 126 logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
124 else: 127 else:
125 bb.msg.debug(1, bb.msg.domain.Provider, "selecting %s as PREFERRED_VERSION %s of package %s%s" % (preferred_file, pv_str, pn, itemstr)) 128 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
126 129
127 return (preferred_ver, preferred_file) 130 return (preferred_ver, preferred_file)
128 131
@@ -189,7 +192,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
189 pkg_pn[pn] = [] 192 pkg_pn[pn] = []
190 pkg_pn[pn].append(p) 193 pkg_pn[pn].append(p)
191 194
192 bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys())) 195 logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
193 196
194 # First add PREFERRED_VERSIONS 197 # First add PREFERRED_VERSIONS
195 for pn in pkg_pn: 198 for pn in pkg_pn:
@@ -206,7 +209,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
206 eligible.append(preferred_versions[pn][1]) 209 eligible.append(preferred_versions[pn][1])
207 210
208 if len(eligible) == 0: 211 if len(eligible) == 0:
209 bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item) 212 logger.error("no eligible providers for %s", item)
210 return 0 213 return 0
211 214
212 # If pn == item, give it a slight default preference 215 # If pn == item, give it a slight default preference
@@ -242,13 +245,13 @@ def filterProviders(providers, item, cfgData, dataCache):
242 for p in eligible: 245 for p in eligible:
243 pn = dataCache.pkg_fn[p] 246 pn = dataCache.pkg_fn[p]
244 if dataCache.preferred[item] == pn: 247 if dataCache.preferred[item] == pn:
245 bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item)) 248 logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
246 eligible.remove(p) 249 eligible.remove(p)
247 eligible = [p] + eligible 250 eligible = [p] + eligible
248 foundUnique = True 251 foundUnique = True
249 break 252 break
250 253
251 bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible)) 254 logger.debug(1, "sorted providers for %s are: %s", item, eligible)
252 255
253 return eligible, foundUnique 256 return eligible, foundUnique
254 257
@@ -268,11 +271,11 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
268 pn = dataCache.pkg_fn[p] 271 pn = dataCache.pkg_fn[p]
269 provides = dataCache.pn_provides[pn] 272 provides = dataCache.pn_provides[pn]
270 for provide in provides: 273 for provide in provides:
271 bb.msg.note(2, bb.msg.domain.Provider, "checking PREFERRED_PROVIDER_%s" % (provide)) 274 logger.verbose("checking PREFERRED_PROVIDER_%s", provide)
272 prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1) 275 prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
273 if prefervar == pn: 276 if prefervar == pn:
274 var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) 277 var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
275 bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy runtime %s due to %s" % (pn, item, var)) 278 logger.verbose("selecting %s to satisfy runtime %s due to %s", pn, item, var)
276 preferred_vars.append(var) 279 preferred_vars.append(var)
277 eligible.remove(p) 280 eligible.remove(p)
278 eligible = [p] + eligible 281 eligible = [p] + eligible
@@ -282,9 +285,9 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
282 numberPreferred = len(preferred) 285 numberPreferred = len(preferred)
283 286
284 if numberPreferred > 1: 287 if numberPreferred > 1:
285 bb.msg.error(bb.msg.domain.Provider, "Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s" % (preferred, item, preferred_vars)) 288 logger.error("Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s", preferred, item, preferred_vars)
286 289
287 bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible)) 290 logger.debug(1, "sorted providers for %s are: %s", item, eligible)
288 291
289 return eligible, numberPreferred 292 return eligible, numberPreferred
290 293
@@ -314,7 +317,7 @@ def getRuntimeProviders(dataCache, rdepend):
314 try: 317 try:
315 regexp = re.compile(pattern) 318 regexp = re.compile(pattern)
316 except: 319 except:
317 bb.msg.error(bb.msg.domain.Provider, "Error parsing re expression: %s" % pattern) 320 logger.error("Error parsing regular expression '%s'", pattern)
318 raise 321 raise
319 regexp_cache[pattern] = regexp 322 regexp_cache[pattern] = regexp
320 if regexp.match(rdepend): 323 if regexp.match(rdepend):
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 94b456a988..94b9cab5ed 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -22,22 +22,25 @@ Handles preparation and execution of a queue of tasks
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25import bb, os, sys 25import os
26import sys
26import subprocess 27import subprocess
27import signal 28import signal
28import stat 29import stat
29import fcntl 30import fcntl
30import copy 31import copy
31import logging 32import logging
33import bb
32from bb import msg, data, event 34from bb import msg, data, event
33 35
34bblogger = logging.getLogger("BitBake") 36bblogger = logging.getLogger("BitBake")
37logger = logging.getLogger("BitBake.RunQueue")
35 38
36try: 39try:
37 import cPickle as pickle 40 import cPickle as pickle
38except ImportError: 41except ImportError:
39 import pickle 42 import pickle
40 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") 43 logger.info("Importing cPickle failed. Falling back to a very slow implementation.")
41 44
42class RunQueueStats: 45class RunQueueStats:
43 """ 46 """
@@ -363,14 +366,14 @@ class RunQueueData:
363 for task in range(numTasks): 366 for task in range(numTasks):
364 if task_done[task] is False or deps_left[task] != 0: 367 if task_done[task] is False or deps_left[task] != 0:
365 problem_tasks.append(task) 368 problem_tasks.append(task)
366 bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s) is not buildable\n" % (task, self.get_user_idstring(task))) 369 logger.debug(2, "Task %s (%s) is not buildable\n", task, self.get_user_idstring(task))
367 bb.msg.debug(2, bb.msg.domain.RunQueue, "(Complete marker was %s and the remaining dependency count was %s)\n\n" % (task_done[task], deps_left[task])) 370 logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n\n", task_done[task], deps_left[task])
368 371
369 if problem_tasks: 372 if problem_tasks:
370 message = "Unbuildable tasks were found.\n" 373 message = "Unbuildable tasks were found.\n"
371 message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n" 374 message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
372 message = message + "Identifying dependency loops (this may take a short while)...\n" 375 message = message + "Identifying dependency loops (this may take a short while)...\n"
373 bb.msg.error(bb.msg.domain.RunQueue, message) 376 logger.error(message)
374 377
375 msgs = self.circular_depchains_handler(problem_tasks) 378 msgs = self.circular_depchains_handler(problem_tasks)
376 379
@@ -398,7 +401,7 @@ class RunQueueData:
398 # Nothing to do 401 # Nothing to do
399 return 0 402 return 0
400 403
401 bb.msg.note(1, bb.msg.domain.RunQueue, "Preparing runqueue") 404 logger.info("Preparing runqueue")
402 405
403 # Step A - Work out a list of tasks to run 406 # Step A - Work out a list of tasks to run
404 # 407 #
@@ -445,7 +448,7 @@ class RunQueueData:
445 fn = taskData.fn_index[fnid] 448 fn = taskData.fn_index[fnid]
446 task_deps = self.dataCache.task_deps[fn] 449 task_deps = self.dataCache.task_deps[fn]
447 450
448 bb.msg.debug(2, bb.msg.domain.RunQueue, "Processing %s:%s" %(fn, taskData.tasks_name[task])) 451 logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
449 452
450 if fnid not in taskData.failed_fnids: 453 if fnid not in taskData.failed_fnids:
451 454
@@ -505,7 +508,7 @@ class RunQueueData:
505 # Rmove all self references 508 # Rmove all self references
506 if task in depends: 509 if task in depends:
507 newdep = [] 510 newdep = []
508 bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)) 511 logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
509 for dep in depends: 512 for dep in depends:
510 if task != dep: 513 if task != dep:
511 newdep.append(dep) 514 newdep.append(dep)
@@ -567,7 +570,7 @@ class RunQueueData:
567 # as active too. If the task is to be 'forced', clear its stamp. Once 570 # as active too. If the task is to be 'forced', clear its stamp. Once
568 # all active tasks are marked, prune the ones we don't need. 571 # all active tasks are marked, prune the ones we don't need.
569 572
570 bb.msg.note(2, bb.msg.domain.RunQueue, "Marking Active Tasks") 573 logger.verbose("Marking Active Tasks")
571 574
572 def mark_active(listid, depth): 575 def mark_active(listid, depth):
573 """ 576 """
@@ -600,7 +603,7 @@ class RunQueueData:
600 603
601 # Remove stamps for targets if force mode active 604 # Remove stamps for targets if force mode active
602 if self.cooker.configuration.force: 605 if self.cooker.configuration.force:
603 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (target[1], fn)) 606 logger.verbose("Remove stamp %s, %s", target[1], fn)
604 bb.build.del_stamp(target[1], self.dataCache, fn) 607 bb.build.del_stamp(target[1], self.dataCache, fn)
605 608
606 if fnid in taskData.failed_fnids: 609 if fnid in taskData.failed_fnids:
@@ -643,7 +646,7 @@ class RunQueueData:
643 else: 646 else:
644 bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.") 647 bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
645 648
646 bb.msg.note(2, bb.msg.domain.RunQueue, "Pruned %s inactive tasks, %s left" % (delcount, len(self.runq_fnid))) 649 logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
647 650
648 # Remap the dependencies to account for the deleted tasks 651 # Remap the dependencies to account for the deleted tasks
649 # Check we didn't delete a task we depend on 652 # Check we didn't delete a task we depend on
@@ -656,7 +659,7 @@ class RunQueueData:
656 newdeps.append(maps[origdep]) 659 newdeps.append(maps[origdep])
657 self.runq_depends[listid] = set(newdeps) 660 self.runq_depends[listid] = set(newdeps)
658 661
659 bb.msg.note(2, bb.msg.domain.RunQueue, "Assign Weightings") 662 logger.verbose("Assign Weightings")
660 663
661 # Generate a list of reverse dependencies to ease future calculations 664 # Generate a list of reverse dependencies to ease future calculations
662 for listid in range(len(self.runq_fnid)): 665 for listid in range(len(self.runq_fnid)):
@@ -675,7 +678,7 @@ class RunQueueData:
675 #self.dump_data(taskData) 678 #self.dump_data(taskData)
676 bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid])) 679 bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
677 680
678 bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints)) 681 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
679 682
680 # Calculate task weights 683 # Calculate task weights
681 # Check of higher length circular dependencies 684 # Check of higher length circular dependencies
@@ -698,9 +701,7 @@ class RunQueueData:
698 for prov in prov_list: 701 for prov in prov_list:
699 if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist: 702 if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
700 error = True 703 error = True
701 bb.msg.error(bb.msg.domain.RunQueue, "Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should." % (prov, " ".join(prov_list[prov]))) 704 logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
702 #if error:
703 # bb.msg.fatal(bb.msg.domain.RunQueue, "Corrupted metadata configuration detected, aborting...")
704 705
705 706
706 # Create a whitelist usable by the stamp checks 707 # Create a whitelist usable by the stamp checks
@@ -762,20 +763,20 @@ class RunQueueData:
762 """ 763 """
763 Dump some debug information on the internal data structures 764 Dump some debug information on the internal data structures
764 """ 765 """
765 bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:") 766 logger.debug(3, "run_tasks:")
766 for task in range(len(self.rqdata.runq_task)): 767 for task in range(len(self.rqdata.runq_task)):
767 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, 768 logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
768 taskQueue.fn_index[self.rqdata.runq_fnid[task]], 769 taskQueue.fn_index[self.rqdata.runq_fnid[task]],
769 self.rqdata.runq_task[task], 770 self.rqdata.runq_task[task],
770 self.rqdata.runq_weight[task], 771 self.rqdata.runq_weight[task],
771 self.rqdata.runq_depends[task], 772 self.rqdata.runq_depends[task],
772 self.rqdata.runq_revdeps[task])) 773 self.rqdata.runq_revdeps[task]))
773 774
774 bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:") 775 logger.debug(3, "sorted_tasks:")
775 for task1 in range(len(self.rqdata.runq_task)): 776 for task1 in range(len(self.rqdata.runq_task)):
776 if task1 in self.prio_map: 777 if task1 in self.prio_map:
777 task = self.prio_map[task1] 778 task = self.prio_map[task1]
778 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, 779 logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
779 taskQueue.fn_index[self.rqdata.runq_fnid[task]], 780 taskQueue.fn_index[self.rqdata.runq_fnid[task]],
780 self.rqdata.runq_task[task], 781 self.rqdata.runq_task[task],
781 self.rqdata.runq_weight[task], 782 self.rqdata.runq_weight[task],
@@ -913,12 +914,12 @@ class RunQueue:
913 914
914 # If the stamp is missing its not current 915 # If the stamp is missing its not current
915 if not os.access(stampfile, os.F_OK): 916 if not os.access(stampfile, os.F_OK):
916 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s not available\n" % stampfile) 917 logger.debug(2, "Stampfile %s not available\n" % stampfile)
917 return False 918 return False
918 # If its a 'nostamp' task, it's not current 919 # If its a 'nostamp' task, it's not current
919 taskdep = self.rqdata.dataCache.task_deps[fn] 920 taskdep = self.rqdata.dataCache.task_deps[fn]
920 if 'nostamp' in taskdep and taskname in taskdep['nostamp']: 921 if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
921 bb.msg.debug(2, bb.msg.domain.RunQueue, "%s.%s is nostamp\n" % (fn, taskname)) 922 logger.debug(2, "%s.%s is nostamp\n" % (fn, taskname))
922 return False 923 return False
923 924
924 if taskname.endswith("_setscene"): 925 if taskname.endswith("_setscene"):
@@ -938,10 +939,10 @@ class RunQueue:
938 continue 939 continue
939 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): 940 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
940 if not t2: 941 if not t2:
941 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s does not exist" % (stampfile2)) 942 logger.debug(2, "Stampfile %s does not exist" % (stampfile2))
942 iscurrent = False 943 iscurrent = False
943 if t1 < t2: 944 if t1 < t2:
944 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s < %s" % (stampfile, stampfile2)) 945 logger.debug(2, "Stampfile %s < %s" % (stampfile, stampfile2))
945 iscurrent = False 946 iscurrent = False
946 947
947 return iscurrent 948 return iscurrent
@@ -972,7 +973,7 @@ class RunQueue:
972 retval = self.rqexe.execute() 973 retval = self.rqexe.execute()
973 974
974 if self.state is runQueueRunInit: 975 if self.state is runQueueRunInit:
975 bb.msg.note(1, bb.msg.domain.RunQueue, "Executing RunQueue Tasks") 976 logger.info("Executing RunQueue Tasks")
976 self.rqexe = RunQueueExecuteTasks(self) 977 self.rqexe = RunQueueExecuteTasks(self)
977 self.state = runQueueRunning 978 self.state = runQueueRunning
978 979
@@ -991,7 +992,7 @@ class RunQueue:
991 992
992 if self.state is runQueueComplete: 993 if self.state is runQueueComplete:
993 # All done 994 # All done
994 bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)) 995 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed))
995 return False 996 return False
996 997
997 if self.state is runQueueChildProcess: 998 if self.state is runQueueChildProcess:
@@ -1059,7 +1060,7 @@ class RunQueueExecute:
1059 1060
1060 def finish_now(self): 1061 def finish_now(self):
1061 if self.stats.active: 1062 if self.stats.active:
1062 bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGTERM to remaining %s tasks" % self.stats.active) 1063 logger.info("Sending SIGTERM to remaining %s tasks", self.stats.active)
1063 for k, v in self.build_pids.iteritems(): 1064 for k, v in self.build_pids.iteritems():
1064 try: 1065 try:
1065 os.kill(-k, signal.SIGTERM) 1066 os.kill(-k, signal.SIGTERM)
@@ -1101,7 +1102,7 @@ class RunQueueExecute:
1101 fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split() 1102 fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
1102 for p in fakedirs: 1103 for p in fakedirs:
1103 bb.mkdirhier(p) 1104 bb.mkdirhier(p)
1104 bb.msg.debug(2, bb.msg.domain.RunQueue, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs)) 1105 logger.debug(2, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
1105 1106
1106 env['PATH'] = self.cooker.configuration.initial_path 1107 env['PATH'] = self.cooker.configuration.initial_path
1107 1108
@@ -1220,7 +1221,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1220 for scheduler in self.rqdata.schedulers: 1221 for scheduler in self.rqdata.schedulers:
1221 if self.scheduler == scheduler.name: 1222 if self.scheduler == scheduler.name:
1222 self.sched = scheduler(self, self.rqdata) 1223 self.sched = scheduler(self, self.rqdata)
1223 bb.msg.debug(1, bb.msg.domain.RunQueue, "Using runqueue scheduler '%s'" % scheduler.name) 1224 logger.debug(1, "Using runqueue scheduler '%s'" % scheduler.name)
1224 break 1225 break
1225 else: 1226 else:
1226 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % 1227 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
@@ -1246,7 +1247,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1246 self.runq_buildable[revdep] = 1 1247 self.runq_buildable[revdep] = 1
1247 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]] 1248 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
1248 taskname = self.rqdata.runq_task[revdep] 1249 taskname = self.rqdata.runq_task[revdep]
1249 bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname)) 1250 logger.debug(1, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
1250 1251
1251 def task_complete(self, task): 1252 def task_complete(self, task):
1252 self.stats.taskCompleted() 1253 self.stats.taskCompleted()
@@ -1258,7 +1259,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1258 Called when a task has failed 1259 Called when a task has failed
1259 Updates the state engine with the failure 1260 Updates the state engine with the failure
1260 """ 1261 """
1261 bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed with %s" % (task, self.rqdata.get_user_idstring(task), exitcode)) 1262 logger.error("Task %s (%s) failed with %s" % (task, self.rqdata.get_user_idstring(task), exitcode))
1262 self.stats.taskFailed() 1263 self.stats.taskFailed()
1263 fnid = self.rqdata.runq_fnid[task] 1264 fnid = self.rqdata.runq_fnid[task]
1264 self.failed_fnids.append(fnid) 1265 self.failed_fnids.append(fnid)
@@ -1287,7 +1288,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1287 1288
1288 taskname = self.rqdata.runq_task[task] 1289 taskname = self.rqdata.runq_task[task]
1289 if self.rq.check_stamp_task(task, taskname): 1290 if self.rq.check_stamp_task(task, taskname):
1290 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task))) 1291 logger.debug(2, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task)))
1291 self.task_skip(task) 1292 self.task_skip(task)
1292 return True 1293 return True
1293 1294
@@ -1295,8 +1296,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1295 1296
1296 taskdep = self.rqdata.dataCache.task_deps[fn] 1297 taskdep = self.rqdata.dataCache.task_deps[fn]
1297 if 'noexec' in taskdep and taskname in taskdep['noexec']: 1298 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1298 bb.msg.note(1, bb.msg.domain.RunQueue, 1299 logger.info("Noexec task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
1299 "Noexec task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
1300 self.stats.total, 1300 self.stats.total,
1301 task, 1301 task,
1302 self.rqdata.get_user_idstring(task))) 1302 self.rqdata.get_user_idstring(task)))
@@ -1306,8 +1306,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
1306 self.task_complete(task) 1306 self.task_complete(task)
1307 return True 1307 return True
1308 1308
1309 bb.msg.note(1, bb.msg.domain.RunQueue, 1309 logger.info("Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
1310 "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
1311 self.stats.total, 1310 self.stats.total,
1312 task, 1311 task,
1313 self.rqdata.get_user_idstring(task))) 1312 self.rqdata.get_user_idstring(task)))
@@ -1334,11 +1333,11 @@ class RunQueueExecuteTasks(RunQueueExecute):
1334 # Sanity Checks 1333 # Sanity Checks
1335 for task in range(self.stats.total): 1334 for task in range(self.stats.total):
1336 if self.runq_buildable[task] == 0: 1335 if self.runq_buildable[task] == 0:
1337 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task) 1336 logger.error("Task %s never buildable!" % task)
1338 if self.runq_running[task] == 0: 1337 if self.runq_running[task] == 0:
1339 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task) 1338 logger.error("Task %s never ran!" % task)
1340 if self.runq_complete[task] == 0: 1339 if self.runq_complete[task] == 0:
1341 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task) 1340 logger.error("Task %s never completed!" % task)
1342 self.rq.state = runQueueComplete 1341 self.rq.state = runQueueComplete
1343 return True 1342 return True
1344 1343
@@ -1461,12 +1460,12 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
1461 1460
1462 for task in range(len(self.sq_revdeps)): 1461 for task in range(len(self.sq_revdeps)):
1463 if task not in valid_new and task not in noexec: 1462 if task not in valid_new and task not in noexec:
1464 bb.msg.debug(2, bb.msg.domain.RunQueue, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task]))) 1463 logger.debug(2, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
1465 self.task_failoutright(task) 1464 self.task_failoutright(task)
1466 1465
1467 #print(str(valid)) 1466 #print(str(valid))
1468 1467
1469 bb.msg.note(1, bb.msg.domain.RunQueue, "Executing SetScene Tasks") 1468 logger.info("Executing SetScene Tasks")
1470 1469
1471 self.rq.state = runQueueSceneRun 1470 self.rq.state = runQueueSceneRun
1472 1471
@@ -1484,7 +1483,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
1484 """ 1483 """
1485 1484
1486 index = self.rqdata.runq_setscene[task] 1485 index = self.rqdata.runq_setscene[task]
1487 bb.msg.debug(1, bb.msg.domain.RunQueue, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index)) 1486 logger.debug(1, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index))
1488 1487
1489 self.scenequeue_covered.add(task) 1488 self.scenequeue_covered.add(task)
1490 self.scenequeue_updatecounters(task) 1489 self.scenequeue_updatecounters(task)
@@ -1539,7 +1538,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
1539 1538
1540 taskname = self.rqdata.runq_task[realtask] + "_setscene" 1539 taskname = self.rqdata.runq_task[realtask] + "_setscene"
1541 if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]): 1540 if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
1542 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task))) 1541 logger.debug(2, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task)))
1543 self.task_failoutright(task) 1542 self.task_failoutright(task)
1544 return True 1543 return True
1545 1544
@@ -1550,12 +1549,11 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
1550 return True 1549 return True
1551 1550
1552 if self.rq.check_stamp_task(realtask, taskname): 1551 if self.rq.check_stamp_task(realtask, taskname):
1553 bb.msg.debug(2, bb.msg.domain.RunQueue, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask))) 1552 logger.debug(2, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask)))
1554 self.task_skip(task) 1553 self.task_skip(task)
1555 return True 1554 return True
1556 1555
1557 bb.msg.note(1, bb.msg.domain.RunQueue, 1556 logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
1558 "Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
1559 self.stats.total, fn, taskname)) 1557 self.stats.total, fn, taskname))
1560 1558
1561 pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname) 1559 pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index e31f967850..7dc1c0c82f 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -23,8 +23,11 @@ Task data collection and handling
23# with this program; if not, write to the Free Software Foundation, Inc., 23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 25
26import logging
26import bb 27import bb
27 28
29logger = logging.getLogger("BitBake.TaskData")
30
28def re_match_strings(target, strings): 31def re_match_strings(target, strings):
29 """ 32 """
30 Whether or not the string 'target' matches 33 Whether or not the string 'target' matches
@@ -182,7 +185,7 @@ class TaskData:
182 if not fnid in self.depids: 185 if not fnid in self.depids:
183 dependids = {} 186 dependids = {}
184 for depend in dataCache.deps[fn]: 187 for depend in dataCache.deps[fn]:
185 bb.msg.debug(2, bb.msg.domain.TaskData, "Added dependency %s for %s" % (depend, fn)) 188 logger.debug(2, "Added dependency %s for %s", depend, fn)
186 dependids[self.getbuild_id(depend)] = None 189 dependids[self.getbuild_id(depend)] = None
187 self.depids[fnid] = dependids.keys() 190 self.depids[fnid] = dependids.keys()
188 191
@@ -193,11 +196,11 @@ class TaskData:
193 rrecs = dataCache.runrecs[fn] 196 rrecs = dataCache.runrecs[fn]
194 for package in rdepends: 197 for package in rdepends:
195 for rdepend in bb.utils.explode_deps(rdepends[package]): 198 for rdepend in bb.utils.explode_deps(rdepends[package]):
196 bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn)) 199 logger.debug(2, "Added runtime dependency %s for %s", rdepend, fn)
197 rdependids[self.getrun_id(rdepend)] = None 200 rdependids[self.getrun_id(rdepend)] = None
198 for package in rrecs: 201 for package in rrecs:
199 for rdepend in bb.utils.explode_deps(rrecs[package]): 202 for rdepend in bb.utils.explode_deps(rrecs[package]):
200 bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn)) 203 logger.debug(2, "Added runtime recommendation %s for %s", rdepend, fn)
201 rdependids[self.getrun_id(rdepend)] = None 204 rdependids[self.getrun_id(rdepend)] = None
202 self.rdepids[fnid] = rdependids.keys() 205 self.rdepids[fnid] = rdependids.keys()
203 206
@@ -397,7 +400,7 @@ class TaskData:
397 fnid = self.getfn_id(fn) 400 fnid = self.getfn_id(fn)
398 if fnid in self.failed_fnids: 401 if fnid in self.failed_fnids:
399 continue 402 continue
400 bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy %s" % (fn, item)) 403 logger.debug(2, "adding %s to satisfy %s", fn, item)
401 self.add_build_target(fn, item) 404 self.add_build_target(fn, item)
402 self.add_tasks(fn, dataCache) 405 self.add_tasks(fn, dataCache)
403 406
@@ -450,7 +453,7 @@ class TaskData:
450 fnid = self.getfn_id(fn) 453 fnid = self.getfn_id(fn)
451 if fnid in self.failed_fnids: 454 if fnid in self.failed_fnids:
452 continue 455 continue
453 bb.msg.debug(2, bb.msg.domain.Provider, "adding '%s' to satisfy runtime '%s'" % (fn, item)) 456 logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
454 self.add_runtime_target(fn, item) 457 self.add_runtime_target(fn, item)
455 self.add_tasks(fn, dataCache) 458 self.add_tasks(fn, dataCache)
456 459
@@ -463,7 +466,7 @@ class TaskData:
463 """ 466 """
464 if fnid in self.failed_fnids: 467 if fnid in self.failed_fnids:
465 return 468 return
466 bb.msg.debug(1, bb.msg.domain.Provider, "File '%s' is unbuildable, removing..." % self.fn_index[fnid]) 469 logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
467 self.failed_fnids.append(fnid) 470 self.failed_fnids.append(fnid)
468 for target in self.build_targets: 471 for target in self.build_targets:
469 if fnid in self.build_targets[target]: 472 if fnid in self.build_targets[target]:
@@ -485,7 +488,7 @@ class TaskData:
485 missing_list = [self.build_names_index[targetid]] 488 missing_list = [self.build_names_index[targetid]]
486 else: 489 else:
487 missing_list = [self.build_names_index[targetid]] + missing_list 490 missing_list = [self.build_names_index[targetid]] + missing_list
488 bb.msg.note(2, bb.msg.domain.Provider, "Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list)) 491 providerlog.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
489 self.failed_deps.append(targetid) 492 self.failed_deps.append(targetid)
490 dependees = self.get_dependees(targetid) 493 dependees = self.get_dependees(targetid)
491 for fnid in dependees: 494 for fnid in dependees:
@@ -498,7 +501,7 @@ class TaskData:
498 501
499 if self.abort and targetid in self.external_targets: 502 if self.abort and targetid in self.external_targets:
500 target = self.build_names_index[targetid] 503 target = self.build_names_index[targetid]
501 bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (target, missing_list)) 504 providerlog.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
502 raise bb.providers.NoProvider(target) 505 raise bb.providers.NoProvider(target)
503 506
504 def remove_runtarget(self, targetid, missing_list = []): 507 def remove_runtarget(self, targetid, missing_list = []):
@@ -511,7 +514,7 @@ class TaskData:
511 else: 514 else:
512 missing_list = [self.run_names_index[targetid]] + missing_list 515 missing_list = [self.run_names_index[targetid]] + missing_list
513 516
514 bb.msg.note(1, bb.msg.domain.Provider, "Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.run_names_index[targetid], missing_list)) 517 providerlog.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
515 self.failed_rdeps.append(targetid) 518 self.failed_rdeps.append(targetid)
516 dependees = self.get_rdependees(targetid) 519 dependees = self.get_rdependees(targetid)
517 for fnid in dependees: 520 for fnid in dependees:
@@ -521,7 +524,7 @@ class TaskData:
521 """ 524 """
522 Resolve all unresolved build and runtime targets 525 Resolve all unresolved build and runtime targets
523 """ 526 """
524 bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies") 527 logger.info("Resolving any missing task queue dependencies")
525 while True: 528 while True:
526 added = 0 529 added = 0
527 for target in self.get_unresolved_build_targets(dataCache): 530 for target in self.get_unresolved_build_targets(dataCache):
@@ -539,7 +542,7 @@ class TaskData:
539 added = added + 1 542 added = added + 1
540 except bb.providers.NoRProvider: 543 except bb.providers.NoRProvider:
541 self.remove_runtarget(self.getrun_id(target)) 544 self.remove_runtarget(self.getrun_id(target))
542 bb.msg.debug(1, bb.msg.domain.TaskData, "Resolved " + str(added) + " extra dependencies") 545 logger.debug(1, "Resolved " + str(added) + " extra dependencies")
543 if added == 0: 546 if added == 0:
544 break 547 break
545 # self.dump_data() 548 # self.dump_data()
@@ -548,40 +551,40 @@ class TaskData:
548 """ 551 """
549 Dump some debug information on the internal data structures 552 Dump some debug information on the internal data structures
550 """ 553 """
551 bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:") 554 logger.debug(3, "build_names:")
552 bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index)) 555 logger.debug(3, ", ".join(self.build_names_index))
553 556
554 bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:") 557 logger.debug(3, "run_names:")
555 bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index)) 558 logger.debug(3, ", ".join(self.run_names_index))
556 559
557 bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:") 560 logger.debug(3, "build_targets:")
558 for buildid in range(len(self.build_names_index)): 561 for buildid in range(len(self.build_names_index)):
559 target = self.build_names_index[buildid] 562 target = self.build_names_index[buildid]
560 targets = "None" 563 targets = "None"
561 if buildid in self.build_targets: 564 if buildid in self.build_targets:
562 targets = self.build_targets[buildid] 565 targets = self.build_targets[buildid]
563 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets)) 566 logger.debug(3, " (%s)%s: %s", buildid, target, targets)
564 567
565 bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:") 568 logger.debug(3, "run_targets:")
566 for runid in range(len(self.run_names_index)): 569 for runid in range(len(self.run_names_index)):
567 target = self.run_names_index[runid] 570 target = self.run_names_index[runid]
568 targets = "None" 571 targets = "None"
569 if runid in self.run_targets: 572 if runid in self.run_targets:
570 targets = self.run_targets[runid] 573 targets = self.run_targets[runid]
571 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets)) 574 logger.debug(3, " (%s)%s: %s", runid, target, targets)
572 575
573 bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:") 576 logger.debug(3, "tasks:")
574 for task in range(len(self.tasks_name)): 577 for task in range(len(self.tasks_name)):
575 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % ( 578 logger.debug(3, " (%s)%s - %s: %s",
576 task, 579 task,
577 self.fn_index[self.tasks_fnid[task]], 580 self.fn_index[self.tasks_fnid[task]],
578 self.tasks_name[task], 581 self.tasks_name[task],
579 self.tasks_tdepends[task])) 582 self.tasks_tdepends[task])
580 583
581 bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):") 584 logger.debug(3, "dependency ids (per fn):")
582 for fnid in self.depids: 585 for fnid in self.depids:
583 bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.depids[fnid])) 586 logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
584 587
585 bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):") 588 logger.debug(3, "runtime dependency ids (per fn):")
586 for fnid in self.rdepids: 589 for fnid in self.rdepids:
587 bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid])) 590 logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index 8330c80b72..47fef8c1a9 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -21,11 +21,14 @@ BitBake Utility Functions
21 21
22import re, fcntl, os, string, stat, shutil, time 22import re, fcntl, os, string, stat, shutil, time
23import sys 23import sys
24import bb
25import errno 24import errno
25import logging
26import bb
26import bb.msg 27import bb.msg
27from commands import getstatusoutput 28from commands import getstatusoutput
28 29
30logger = logging.getLogger("BitBake.Util")
31
29# Version comparison 32# Version comparison
30separators = ".-" 33separators = ".-"
31 34
@@ -307,9 +310,9 @@ def _print_trace(body, line):
307 max_line = min(line + 4, len(body)) 310 max_line = min(line + 4, len(body))
308 for i in range(min_line, max_line + 1): 311 for i in range(min_line, max_line + 1):
309 if line == i: 312 if line == i:
310 bb.msg.error(bb.msg.domain.Util, " *** %.4d:%s" % (i, body[i-1]) ) 313 logger.error(" *** %.4d:%s" % (i, body[i-1]) )
311 else: 314 else:
312 bb.msg.error(bb.msg.domain.Util, " %.4d:%s" % (i, body[i-1]) ) 315 logger.error(" %.4d:%s" % (i, body[i-1]) )
313 316
314 317
315def better_compile(text, file, realfile, mode = "exec"): 318def better_compile(text, file, realfile, mode = "exec"):
@@ -322,16 +325,16 @@ def better_compile(text, file, realfile, mode = "exec"):
322 except Exception as e: 325 except Exception as e:
323 # split the text into lines again 326 # split the text into lines again
324 body = text.split('\n') 327 body = text.split('\n')
325 bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: %s" % (realfile)) 328 logger.error("Error in compiling python function in: %s" % (realfile))
326 bb.msg.error(bb.msg.domain.Util, str(e)) 329 logger.error(str(e))
327 if e.lineno: 330 if e.lineno:
328 bb.msg.error(bb.msg.domain.Util, "The lines leading to this error were:") 331 logger.error("The lines leading to this error were:")
329 bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) 332 logger.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
330 _print_trace(body, e.lineno) 333 _print_trace(body, e.lineno)
331 else: 334 else:
332 bb.msg.error(bb.msg.domain.Util, "The function causing this error was:") 335 logger.error("The function causing this error was:")
333 for line in body: 336 for line in body:
334 bb.msg.error(bb.msg.domain.Util, line) 337 logger.error(line)
335 raise 338 raise
336 339
337def better_exec(code, context, text, realfile = "<code>"): 340def better_exec(code, context, text, realfile = "<code>"):
@@ -351,9 +354,11 @@ def better_exec(code, context, text, realfile = "<code>"):
351 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: 354 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
352 raise 355 raise
353 356
357 logger.exception("Error executing python function in '%s'", code.co_filename)
358
354 # print the Header of the Error Message 359 # print the Header of the Error Message
355 bb.msg.error(bb.msg.domain.Util, "There was an error when executing a python function in: %s" % code.co_filename) 360 logger.error("There was an error when executing a python function in: %s" % code.co_filename)
356 bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t, value)) 361 logger.error("Exception:%s Message:%s" % (t, value))
357 362
358 # Strip 'us' from the stack (better_exec call) 363 # Strip 'us' from the stack (better_exec call)
359 tb = tb.tb_next 364 tb = tb.tb_next
@@ -364,13 +369,13 @@ def better_exec(code, context, text, realfile = "<code>"):
364 369
365 tbextract = traceback.extract_tb(tb) 370 tbextract = traceback.extract_tb(tb)
366 tbformat = "\n".join(traceback.format_list(tbextract)) 371 tbformat = "\n".join(traceback.format_list(tbextract))
367 bb.msg.error(bb.msg.domain.Util, "The stack trace of python calls that resulted in thie exception/failure was:") 372 logger.error("The stack trace of python calls that resulted in thie exception/failure was:")
368 for line in tbformat.split('\n'): 373 for line in tbformat.split('\n'):
369 bb.msg.error(bb.msg.domain.Util, line) 374 logger.error(line)
370 375
371 bb.msg.error(bb.msg.domain.Util, "The code that was being executed was:") 376 logger.error("The code that was being executed was:")
372 _print_trace(textarray, linefailed) 377 _print_trace(textarray, linefailed)
373 bb.msg.error(bb.msg.domain.Util, "(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2])) 378 logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
374 379
375 # See if this is a function we constructed and has calls back into other functions in 380 # See if this is a function we constructed and has calls back into other functions in
376 # "text". If so, try and improve the context of the error by diving down the trace 381 # "text". If so, try and improve the context of the error by diving down the trace
@@ -379,7 +384,7 @@ def better_exec(code, context, text, realfile = "<code>"):
379 while nexttb is not None: 384 while nexttb is not None:
380 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]: 385 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
381 _print_trace(textarray, tbextract[level+1][1]) 386 _print_trace(textarray, tbextract[level+1][1])
382 bb.msg.error(bb.msg.domain.Util, "(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])) 387 logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
383 else: 388 else:
384 break 389 break
385 nexttb = tb.tb_next 390 nexttb = tb.tb_next
@@ -400,11 +405,11 @@ def lockfile(name):
400 """ 405 """
401 path = os.path.dirname(name) 406 path = os.path.dirname(name)
402 if not os.path.isdir(path): 407 if not os.path.isdir(path):
403 bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path) 408 logger.error("Lockfile path '%s' does not exist", path)
404 sys.exit(1) 409 sys.exit(1)
405 410
406 if not os.access(path, os.W_OK): 411 if not os.access(path, os.W_OK):
407 bb.msg.error(bb.msg.domain.Util, "Error, lockfile path is not writable!: %s" % path) 412 logger.error("Error, lockfile path is not writable!: %s" % path)
408 sys.exit(1) 413 sys.exit(1)
409 414
410 while True: 415 while True:
@@ -534,7 +539,7 @@ def filter_environment(good_vars):
534 del os.environ[key] 539 del os.environ[key]
535 540
536 if len(removed_vars): 541 if len(removed_vars):
537 bb.msg.debug(1, bb.msg.domain.Util, "Removed the following variables from the environment: %s" % (", ".join(removed_vars))) 542 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars))
538 543
539 return removed_vars 544 return removed_vars
540 545
@@ -604,10 +609,10 @@ def mkdirhier(dir):
604 directory already exists like os.makedirs 609 directory already exists like os.makedirs
605 """ 610 """
606 611
607 bb.msg.debug(3, bb.msg.domain.Util, "mkdirhier(%s)" % dir) 612 logger.debug(3, "mkdirhier(%s)", dir)
608 try: 613 try:
609 os.makedirs(dir) 614 os.makedirs(dir)
610 bb.msg.debug(2, bb.msg.domain.Util, "created " + dir) 615 logger.debug(2, "created " + dir)
611 except OSError as e: 616 except OSError as e:
612 if e.errno != errno.EEXIST: 617 if e.errno != errno.EEXIST:
613 raise e 618 raise e