diff options
Diffstat (limited to 'bitbake')
63 files changed, 390 insertions, 400 deletions
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake index bba87b082c..9813a08483 100755 --- a/bitbake/bin/bitbake +++ b/bitbake/bin/bitbake | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | # ex:ts=4:sw=4:sts=4:et | 2 | # ex:ts=4:sw=4:sts=4:et |
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
4 | # | 4 | # |
@@ -35,6 +35,9 @@ except RuntimeError as exc: | |||
35 | from bb import cookerdata | 35 | from bb import cookerdata |
36 | from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException | 36 | from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException |
37 | 37 | ||
38 | if sys.getfilesystemencoding() != "utf-8": | ||
39 | sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") | ||
40 | |||
38 | __version__ = "1.31.0" | 41 | __version__ = "1.31.0" |
39 | 42 | ||
40 | if __name__ == "__main__": | 43 | if __name__ == "__main__": |
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs index 67c60dbb00..3b6ef8811c 100755 --- a/bitbake/bin/bitbake-diffsigs +++ b/bitbake/bin/bitbake-diffsigs | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | 2 | ||
3 | # bitbake-diffsigs | 3 | # bitbake-diffsigs |
4 | # BitBake task signature data comparison utility | 4 | # BitBake task signature data comparison utility |
@@ -24,6 +24,7 @@ import warnings | |||
24 | import fnmatch | 24 | import fnmatch |
25 | import optparse | 25 | import optparse |
26 | import logging | 26 | import logging |
27 | import pickle | ||
27 | 28 | ||
28 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) | 29 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) |
29 | 30 | ||
@@ -121,7 +122,6 @@ else: | |||
121 | if len(args) == 1: | 122 | if len(args) == 1: |
122 | parser.print_help() | 123 | parser.print_help() |
123 | else: | 124 | else: |
124 | import cPickle | ||
125 | try: | 125 | try: |
126 | if len(args) == 2: | 126 | if len(args) == 2: |
127 | output = bb.siggen.dump_sigfile(sys.argv[1]) | 127 | output = bb.siggen.dump_sigfile(sys.argv[1]) |
diff --git a/bitbake/bin/bitbake-dumpsig b/bitbake/bin/bitbake-dumpsig index ffaed1f457..58ba1cad04 100755 --- a/bitbake/bin/bitbake-dumpsig +++ b/bitbake/bin/bitbake-dumpsig | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | 2 | ||
3 | # bitbake-dumpsig | 3 | # bitbake-dumpsig |
4 | # BitBake task signature dump utility | 4 | # BitBake task signature dump utility |
@@ -23,6 +23,7 @@ import sys | |||
23 | import warnings | 23 | import warnings |
24 | import optparse | 24 | import optparse |
25 | import logging | 25 | import logging |
26 | import pickle | ||
26 | 27 | ||
27 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) | 28 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) |
28 | 29 | ||
@@ -51,7 +52,6 @@ options, args = parser.parse_args(sys.argv) | |||
51 | if len(args) == 1: | 52 | if len(args) == 1: |
52 | parser.print_help() | 53 | parser.print_help() |
53 | else: | 54 | else: |
54 | import cPickle | ||
55 | try: | 55 | try: |
56 | output = bb.siggen.dump_sigfile(args[1]) | 56 | output = bb.siggen.dump_sigfile(args[1]) |
57 | except IOError as e: | 57 | except IOError as e: |
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers index d8ffa9592a..0c973dfd2f 100755 --- a/bitbake/bin/bitbake-layers +++ b/bitbake/bin/bitbake-layers | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | 2 | ||
3 | # This script has subcommands which operate against your bitbake layers, either | 3 | # This script has subcommands which operate against your bitbake layers, either |
4 | # displaying useful information, or acting against them. | 4 | # displaying useful information, or acting against them. |
@@ -48,7 +48,6 @@ def logger_create(name, output=sys.stderr): | |||
48 | logger.setLevel(logging.INFO) | 48 | logger.setLevel(logging.INFO) |
49 | return logger | 49 | return logger |
50 | 50 | ||
51 | |||
52 | def logger_setup_color(logger, color='auto'): | 51 | def logger_setup_color(logger, color='auto'): |
53 | from bb.msg import BBLogFormatter | 52 | from bb.msg import BBLogFormatter |
54 | console = logging.StreamHandler(sys.stdout) | 53 | console = logging.StreamHandler(sys.stdout) |
@@ -61,7 +60,6 @@ def logger_setup_color(logger, color='auto'): | |||
61 | 60 | ||
62 | logger = logger_create('bitbake-layers', sys.stdout) | 61 | logger = logger_create('bitbake-layers', sys.stdout) |
63 | 62 | ||
64 | |||
65 | def main(): | 63 | def main(): |
66 | parser = argparse.ArgumentParser( | 64 | parser = argparse.ArgumentParser( |
67 | description="BitBake layers utility", | 65 | description="BitBake layers utility", |
@@ -78,6 +76,7 @@ def main(): | |||
78 | parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS, | 76 | parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS, |
79 | help='show this help message and exit') | 77 | help='show this help message and exit') |
80 | subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>') | 78 | subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>') |
79 | subparsers.required = True | ||
81 | 80 | ||
82 | if global_args.debug: | 81 | if global_args.debug: |
83 | logger.setLevel(logging.DEBUG) | 82 | logger.setLevel(logging.DEBUG) |
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv index 03821446b7..f38d2dd882 100755 --- a/bitbake/bin/bitbake-prserv +++ b/bitbake/bin/bitbake-prserv | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | import os | 2 | import os |
3 | import sys,logging | 3 | import sys,logging |
4 | import optparse | 4 | import optparse |
diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest index 462eb1b2b4..1e6f35ef0c 100755 --- a/bitbake/bin/bitbake-selftest +++ b/bitbake/bin/bitbake-selftest | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | # | 2 | # |
3 | # Copyright (C) 2012 Richard Purdie | 3 | # Copyright (C) 2012 Richard Purdie |
4 | # | 4 | # |
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker index 6a6b26b64a..5d062a23e9 100755 --- a/bitbake/bin/bitbake-worker +++ b/bitbake/bin/bitbake-worker | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | 2 | ||
3 | import os | 3 | import os |
4 | import sys | 4 | import sys |
@@ -10,8 +10,12 @@ import bb | |||
10 | import select | 10 | import select |
11 | import errno | 11 | import errno |
12 | import signal | 12 | import signal |
13 | import pickle | ||
13 | from multiprocessing import Lock | 14 | from multiprocessing import Lock |
14 | 15 | ||
16 | if sys.getfilesystemencoding() != "utf-8": | ||
17 | sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") | ||
18 | |||
15 | # Users shouldn't be running this code directly | 19 | # Users shouldn't be running this code directly |
16 | if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"): | 20 | if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"): |
17 | print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.") | 21 | print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.") |
@@ -30,19 +34,16 @@ if sys.argv[1].startswith("decafbadbad"): | |||
30 | # updates to log files for use with tail | 34 | # updates to log files for use with tail |
31 | try: | 35 | try: |
32 | if sys.stdout.name == '<stdout>': | 36 | if sys.stdout.name == '<stdout>': |
33 | sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) | 37 | import fcntl |
38 | fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL) | ||
39 | fl |= os.O_SYNC | ||
40 | fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl) | ||
41 | #sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) | ||
34 | except: | 42 | except: |
35 | pass | 43 | pass |
36 | 44 | ||
37 | logger = logging.getLogger("BitBake") | 45 | logger = logging.getLogger("BitBake") |
38 | 46 | ||
39 | try: | ||
40 | import cPickle as pickle | ||
41 | except ImportError: | ||
42 | import pickle | ||
43 | bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") | ||
44 | |||
45 | |||
46 | worker_pipe = sys.stdout.fileno() | 47 | worker_pipe = sys.stdout.fileno() |
47 | bb.utils.nonblockingfd(worker_pipe) | 48 | bb.utils.nonblockingfd(worker_pipe) |
48 | # Need to guard against multiprocessing being used in child processes | 49 | # Need to guard against multiprocessing being used in child processes |
@@ -62,10 +63,10 @@ if 0: | |||
62 | consolelog.setFormatter(conlogformat) | 63 | consolelog.setFormatter(conlogformat) |
63 | logger.addHandler(consolelog) | 64 | logger.addHandler(consolelog) |
64 | 65 | ||
65 | worker_queue = "" | 66 | worker_queue = b"" |
66 | 67 | ||
67 | def worker_fire(event, d): | 68 | def worker_fire(event, d): |
68 | data = "<event>" + pickle.dumps(event) + "</event>" | 69 | data = b"<event>" + pickle.dumps(event) + b"</event>" |
69 | worker_fire_prepickled(data) | 70 | worker_fire_prepickled(data) |
70 | 71 | ||
71 | def worker_fire_prepickled(event): | 72 | def worker_fire_prepickled(event): |
@@ -91,7 +92,7 @@ def worker_child_fire(event, d): | |||
91 | global worker_pipe | 92 | global worker_pipe |
92 | global worker_pipe_lock | 93 | global worker_pipe_lock |
93 | 94 | ||
94 | data = "<event>" + pickle.dumps(event) + "</event>" | 95 | data = b"<event>" + pickle.dumps(event) + b"</event>" |
95 | try: | 96 | try: |
96 | worker_pipe_lock.acquire() | 97 | worker_pipe_lock.acquire() |
97 | worker_pipe.write(data) | 98 | worker_pipe.write(data) |
@@ -251,7 +252,7 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat | |||
251 | bb.utils.process_profilelog(profname) | 252 | bb.utils.process_profilelog(profname) |
252 | os._exit(ret) | 253 | os._exit(ret) |
253 | else: | 254 | else: |
254 | for key, value in envbackup.iteritems(): | 255 | for key, value in iter(envbackup.items()): |
255 | if value is None: | 256 | if value is None: |
256 | del os.environ[key] | 257 | del os.environ[key] |
257 | else: | 258 | else: |
@@ -268,22 +269,22 @@ class runQueueWorkerPipe(): | |||
268 | if pipeout: | 269 | if pipeout: |
269 | pipeout.close() | 270 | pipeout.close() |
270 | bb.utils.nonblockingfd(self.input) | 271 | bb.utils.nonblockingfd(self.input) |
271 | self.queue = "" | 272 | self.queue = b"" |
272 | 273 | ||
273 | def read(self): | 274 | def read(self): |
274 | start = len(self.queue) | 275 | start = len(self.queue) |
275 | try: | 276 | try: |
276 | self.queue = self.queue + self.input.read(102400) | 277 | self.queue = self.queue + (self.input.read(102400) or b"") |
277 | except (OSError, IOError) as e: | 278 | except (OSError, IOError) as e: |
278 | if e.errno != errno.EAGAIN: | 279 | if e.errno != errno.EAGAIN: |
279 | raise | 280 | raise |
280 | 281 | ||
281 | end = len(self.queue) | 282 | end = len(self.queue) |
282 | index = self.queue.find("</event>") | 283 | index = self.queue.find(b"</event>") |
283 | while index != -1: | 284 | while index != -1: |
284 | worker_fire_prepickled(self.queue[:index+8]) | 285 | worker_fire_prepickled(self.queue[:index+8]) |
285 | self.queue = self.queue[index+8:] | 286 | self.queue = self.queue[index+8:] |
286 | index = self.queue.find("</event>") | 287 | index = self.queue.find(b"</event>") |
287 | return (end > start) | 288 | return (end > start) |
288 | 289 | ||
289 | def close(self): | 290 | def close(self): |
@@ -299,7 +300,7 @@ class BitbakeWorker(object): | |||
299 | def __init__(self, din): | 300 | def __init__(self, din): |
300 | self.input = din | 301 | self.input = din |
301 | bb.utils.nonblockingfd(self.input) | 302 | bb.utils.nonblockingfd(self.input) |
302 | self.queue = "" | 303 | self.queue = b"" |
303 | self.cookercfg = None | 304 | self.cookercfg = None |
304 | self.databuilder = None | 305 | self.databuilder = None |
305 | self.data = None | 306 | self.data = None |
@@ -336,12 +337,12 @@ class BitbakeWorker(object): | |||
336 | except (OSError, IOError): | 337 | except (OSError, IOError): |
337 | pass | 338 | pass |
338 | if len(self.queue): | 339 | if len(self.queue): |
339 | self.handle_item("cookerconfig", self.handle_cookercfg) | 340 | self.handle_item(b"cookerconfig", self.handle_cookercfg) |
340 | self.handle_item("workerdata", self.handle_workerdata) | 341 | self.handle_item(b"workerdata", self.handle_workerdata) |
341 | self.handle_item("runtask", self.handle_runtask) | 342 | self.handle_item(b"runtask", self.handle_runtask) |
342 | self.handle_item("finishnow", self.handle_finishnow) | 343 | self.handle_item(b"finishnow", self.handle_finishnow) |
343 | self.handle_item("ping", self.handle_ping) | 344 | self.handle_item(b"ping", self.handle_ping) |
344 | self.handle_item("quit", self.handle_quit) | 345 | self.handle_item(b"quit", self.handle_quit) |
345 | 346 | ||
346 | for pipe in self.build_pipes: | 347 | for pipe in self.build_pipes: |
347 | self.build_pipes[pipe].read() | 348 | self.build_pipes[pipe].read() |
@@ -351,12 +352,12 @@ class BitbakeWorker(object): | |||
351 | 352 | ||
352 | 353 | ||
353 | def handle_item(self, item, func): | 354 | def handle_item(self, item, func): |
354 | if self.queue.startswith("<" + item + ">"): | 355 | if self.queue.startswith(b"<" + item + b">"): |
355 | index = self.queue.find("</" + item + ">") | 356 | index = self.queue.find(b"</" + item + b">") |
356 | while index != -1: | 357 | while index != -1: |
357 | func(self.queue[(len(item) + 2):index]) | 358 | func(self.queue[(len(item) + 2):index]) |
358 | self.queue = self.queue[(index + len(item) + 3):] | 359 | self.queue = self.queue[(index + len(item) + 3):] |
359 | index = self.queue.find("</" + item + ">") | 360 | index = self.queue.find(b"</" + item + b">") |
360 | 361 | ||
361 | def handle_cookercfg(self, data): | 362 | def handle_cookercfg(self, data): |
362 | self.cookercfg = pickle.loads(data) | 363 | self.cookercfg = pickle.loads(data) |
@@ -420,12 +421,12 @@ class BitbakeWorker(object): | |||
420 | self.build_pipes[pid].close() | 421 | self.build_pipes[pid].close() |
421 | del self.build_pipes[pid] | 422 | del self.build_pipes[pid] |
422 | 423 | ||
423 | worker_fire_prepickled("<exitcode>" + pickle.dumps((task, status)) + "</exitcode>") | 424 | worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>") |
424 | 425 | ||
425 | def handle_finishnow(self, _): | 426 | def handle_finishnow(self, _): |
426 | if self.build_pids: | 427 | if self.build_pids: |
427 | logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids)) | 428 | logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids)) |
428 | for k, v in self.build_pids.iteritems(): | 429 | for k, v in iter(self.build_pids.items()): |
429 | try: | 430 | try: |
430 | os.kill(-k, signal.SIGTERM) | 431 | os.kill(-k, signal.SIGTERM) |
431 | os.waitpid(-1, 0) | 432 | os.waitpid(-1, 0) |
@@ -435,6 +436,7 @@ class BitbakeWorker(object): | |||
435 | self.build_pipes[pipe].read() | 436 | self.build_pipes[pipe].read() |
436 | 437 | ||
437 | try: | 438 | try: |
439 | sys.stdin = sys.stdin.detach() | ||
438 | worker = BitbakeWorker(sys.stdin) | 440 | worker = BitbakeWorker(sys.stdin) |
439 | if not profiling: | 441 | if not profiling: |
440 | worker.serve() | 442 | worker.serve() |
diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc index defb3dd37a..2744678824 100755 --- a/bitbake/bin/bitdoc +++ b/bitbake/bin/bitdoc | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | # ex:ts=4:sw=4:sts=4:et | 2 | # ex:ts=4:sw=4:sts=4:et |
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
4 | # | 4 | # |
diff --git a/bitbake/bin/image-writer b/bitbake/bin/image-writer index e30ab45e31..7ab1d8c979 100755 --- a/bitbake/bin/image-writer +++ b/bitbake/bin/image-writer | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | 2 | ||
3 | # Copyright (c) 2012 Wind River Systems, Inc. | 3 | # Copyright (c) 2012 Wind River Systems, Inc. |
4 | # | 4 | # |
@@ -24,9 +24,13 @@ try: | |||
24 | except RuntimeError as exc: | 24 | except RuntimeError as exc: |
25 | sys.exit(str(exc)) | 25 | sys.exit(str(exc)) |
26 | 26 | ||
27 | from gi import pygtkcompat | ||
28 | |||
29 | pygtkcompat.enable() | ||
30 | pygtkcompat.enable_gtk(version='3.0') | ||
31 | |||
27 | import gtk | 32 | import gtk |
28 | import optparse | 33 | import optparse |
29 | import pygtk | ||
30 | 34 | ||
31 | from bb.ui.crumbs.hobwidget import HobAltButton, HobButton | 35 | from bb.ui.crumbs.hobwidget import HobAltButton, HobButton |
32 | from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog | 36 | from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog |
diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay index 615a7aed15..bdddb908b5 100755 --- a/bitbake/bin/toaster-eventreplay +++ b/bitbake/bin/toaster-eventreplay | |||
@@ -1,4 +1,4 @@ | |||
1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
2 | # ex:ts=4:sw=4:sts=4:et | 2 | # ex:ts=4:sw=4:sts=4:et |
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
4 | # | 4 | # |
diff --git a/bitbake/contrib/dump_cache.py b/bitbake/contrib/dump_cache.py index 97c5463a09..a1f09b4044 100755 --- a/bitbake/contrib/dump_cache.py +++ b/bitbake/contrib/dump_cache.py | |||
@@ -29,7 +29,7 @@ import warnings | |||
29 | sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib')) | 29 | sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib')) |
30 | from bb.cache import CoreRecipeInfo | 30 | from bb.cache import CoreRecipeInfo |
31 | 31 | ||
32 | import cPickle as pickle | 32 | import pickle as pickle |
33 | 33 | ||
34 | def main(argv=None): | 34 | def main(argv=None): |
35 | """ | 35 | """ |
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py index 6917ec378a..77a05cfe35 100644 --- a/bitbake/lib/bb/COW.py +++ b/bitbake/lib/bb/COW.py | |||
@@ -23,19 +23,17 @@ | |||
23 | # Assign a file to __warn__ to get warnings about slow operations. | 23 | # Assign a file to __warn__ to get warnings about slow operations. |
24 | # | 24 | # |
25 | 25 | ||
26 | from __future__ import print_function | 26 | |
27 | import copy | 27 | import copy |
28 | import types | 28 | import types |
29 | ImmutableTypes = ( | 29 | ImmutableTypes = ( |
30 | types.NoneType, | ||
31 | bool, | 30 | bool, |
32 | complex, | 31 | complex, |
33 | float, | 32 | float, |
34 | int, | 33 | int, |
35 | long, | ||
36 | tuple, | 34 | tuple, |
37 | frozenset, | 35 | frozenset, |
38 | basestring | 36 | str |
39 | ) | 37 | ) |
40 | 38 | ||
41 | MUTABLE = "__mutable__" | 39 | MUTABLE = "__mutable__" |
@@ -61,7 +59,7 @@ class COWDictMeta(COWMeta): | |||
61 | __call__ = cow | 59 | __call__ = cow |
62 | 60 | ||
63 | def __setitem__(cls, key, value): | 61 | def __setitem__(cls, key, value): |
64 | if not isinstance(value, ImmutableTypes): | 62 | if value is not None and not isinstance(value, ImmutableTypes): |
65 | if not isinstance(value, COWMeta): | 63 | if not isinstance(value, COWMeta): |
66 | cls.__hasmutable__ = True | 64 | cls.__hasmutable__ = True |
67 | key += MUTABLE | 65 | key += MUTABLE |
@@ -116,7 +114,7 @@ class COWDictMeta(COWMeta): | |||
116 | cls.__setitem__(key, cls.__marker__) | 114 | cls.__setitem__(key, cls.__marker__) |
117 | 115 | ||
118 | def __revertitem__(cls, key): | 116 | def __revertitem__(cls, key): |
119 | if not cls.__dict__.has_key(key): | 117 | if key not in cls.__dict__: |
120 | key += MUTABLE | 118 | key += MUTABLE |
121 | delattr(cls, key) | 119 | delattr(cls, key) |
122 | 120 | ||
@@ -183,7 +181,7 @@ class COWSetMeta(COWDictMeta): | |||
183 | COWDictMeta.__delitem__(cls, repr(hash(value))) | 181 | COWDictMeta.__delitem__(cls, repr(hash(value))) |
184 | 182 | ||
185 | def __in__(cls, value): | 183 | def __in__(cls, value): |
186 | return COWDictMeta.has_key(repr(hash(value))) | 184 | return repr(hash(value)) in COWDictMeta |
187 | 185 | ||
188 | def iterkeys(cls): | 186 | def iterkeys(cls): |
189 | raise TypeError("sets don't have keys") | 187 | raise TypeError("sets don't have keys") |
@@ -192,12 +190,10 @@ class COWSetMeta(COWDictMeta): | |||
192 | raise TypeError("sets don't have 'items'") | 190 | raise TypeError("sets don't have 'items'") |
193 | 191 | ||
194 | # These are the actual classes you use! | 192 | # These are the actual classes you use! |
195 | class COWDictBase(object): | 193 | class COWDictBase(object, metaclass = COWDictMeta): |
196 | __metaclass__ = COWDictMeta | ||
197 | __count__ = 0 | 194 | __count__ = 0 |
198 | 195 | ||
199 | class COWSetBase(object): | 196 | class COWSetBase(object, metaclass = COWSetMeta): |
200 | __metaclass__ = COWSetMeta | ||
201 | __count__ = 0 | 197 | __count__ = 0 |
202 | 198 | ||
203 | if __name__ == "__main__": | 199 | if __name__ == "__main__": |
@@ -217,11 +213,11 @@ if __name__ == "__main__": | |||
217 | print() | 213 | print() |
218 | 214 | ||
219 | print("a", a) | 215 | print("a", a) |
220 | for x in a.iteritems(): | 216 | for x in a.items(): |
221 | print(x) | 217 | print(x) |
222 | print("--") | 218 | print("--") |
223 | print("b", b) | 219 | print("b", b) |
224 | for x in b.iteritems(): | 220 | for x in b.items(): |
225 | print(x) | 221 | print(x) |
226 | print() | 222 | print() |
227 | 223 | ||
@@ -229,11 +225,11 @@ if __name__ == "__main__": | |||
229 | b['a'] = 'c' | 225 | b['a'] = 'c' |
230 | 226 | ||
231 | print("a", a) | 227 | print("a", a) |
232 | for x in a.iteritems(): | 228 | for x in a.items(): |
233 | print(x) | 229 | print(x) |
234 | print("--") | 230 | print("--") |
235 | print("b", b) | 231 | print("b", b) |
236 | for x in b.iteritems(): | 232 | for x in b.items(): |
237 | print(x) | 233 | print(x) |
238 | print() | 234 | print() |
239 | 235 | ||
@@ -248,22 +244,22 @@ if __name__ == "__main__": | |||
248 | a['set'].add("o2") | 244 | a['set'].add("o2") |
249 | 245 | ||
250 | print("a", a) | 246 | print("a", a) |
251 | for x in a['set'].itervalues(): | 247 | for x in a['set'].values(): |
252 | print(x) | 248 | print(x) |
253 | print("--") | 249 | print("--") |
254 | print("b", b) | 250 | print("b", b) |
255 | for x in b['set'].itervalues(): | 251 | for x in b['set'].values(): |
256 | print(x) | 252 | print(x) |
257 | print() | 253 | print() |
258 | 254 | ||
259 | b['set'].add('o3') | 255 | b['set'].add('o3') |
260 | 256 | ||
261 | print("a", a) | 257 | print("a", a) |
262 | for x in a['set'].itervalues(): | 258 | for x in a['set'].values(): |
263 | print(x) | 259 | print(x) |
264 | print("--") | 260 | print("--") |
265 | print("b", b) | 261 | print("b", b) |
266 | for x in b['set'].itervalues(): | 262 | for x in b['set'].values(): |
267 | print(x) | 263 | print(x) |
268 | print() | 264 | print() |
269 | 265 | ||
@@ -273,7 +269,7 @@ if __name__ == "__main__": | |||
273 | a['set2'].add("o2") | 269 | a['set2'].add("o2") |
274 | 270 | ||
275 | print("a", a) | 271 | print("a", a) |
276 | for x in a.iteritems(): | 272 | for x in a.items(): |
277 | print(x) | 273 | print(x) |
278 | print("--") | 274 | print("--") |
279 | print("b", b) | 275 | print("b", b) |
@@ -287,13 +283,13 @@ if __name__ == "__main__": | |||
287 | except KeyError: | 283 | except KeyError: |
288 | print("Yay! deleted key raises error") | 284 | print("Yay! deleted key raises error") |
289 | 285 | ||
290 | if b.has_key('b'): | 286 | if 'b' in b: |
291 | print("Boo!") | 287 | print("Boo!") |
292 | else: | 288 | else: |
293 | print("Yay - has_key with delete works!") | 289 | print("Yay - has_key with delete works!") |
294 | 290 | ||
295 | print("a", a) | 291 | print("a", a) |
296 | for x in a.iteritems(): | 292 | for x in a.items(): |
297 | print(x) | 293 | print(x) |
298 | print("--") | 294 | print("--") |
299 | print("b", b) | 295 | print("b", b) |
@@ -304,7 +300,7 @@ if __name__ == "__main__": | |||
304 | b.__revertitem__('b') | 300 | b.__revertitem__('b') |
305 | 301 | ||
306 | print("a", a) | 302 | print("a", a) |
307 | for x in a.iteritems(): | 303 | for x in a.items(): |
308 | print(x) | 304 | print(x) |
309 | print("--") | 305 | print("--") |
310 | print("b", b) | 306 | print("b", b) |
@@ -314,7 +310,7 @@ if __name__ == "__main__": | |||
314 | 310 | ||
315 | b.__revertitem__('dict') | 311 | b.__revertitem__('dict') |
316 | print("a", a) | 312 | print("a", a) |
317 | for x in a.iteritems(): | 313 | for x in a.items(): |
318 | print(x) | 314 | print(x) |
319 | print("--") | 315 | print("--") |
320 | print("b", b) | 316 | print("b", b) |
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index 4ae6b8a5f5..6b85984ba8 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -84,7 +84,7 @@ def plain(*args): | |||
84 | mainlogger.plain(''.join(args)) | 84 | mainlogger.plain(''.join(args)) |
85 | 85 | ||
86 | def debug(lvl, *args): | 86 | def debug(lvl, *args): |
87 | if isinstance(lvl, basestring): | 87 | if isinstance(lvl, str): |
88 | mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) | 88 | mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) |
89 | args = (lvl,) + args | 89 | args = (lvl,) + args |
90 | lvl = 1 | 90 | lvl = 1 |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 9854553038..e016ae3f40 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
@@ -35,8 +35,7 @@ import stat | |||
35 | import bb | 35 | import bb |
36 | import bb.msg | 36 | import bb.msg |
37 | import bb.process | 37 | import bb.process |
38 | from contextlib import nested | 38 | from bb import data, event, utils |
39 | from bb import event, utils | ||
40 | 39 | ||
41 | bblogger = logging.getLogger('BitBake') | 40 | bblogger = logging.getLogger('BitBake') |
42 | logger = logging.getLogger('BitBake.Build') | 41 | logger = logging.getLogger('BitBake.Build') |
@@ -328,7 +327,7 @@ trap '' 0 | |||
328 | exit $ret | 327 | exit $ret |
329 | ''') | 328 | ''') |
330 | 329 | ||
331 | os.chmod(runfile, 0775) | 330 | os.chmod(runfile, 0o775) |
332 | 331 | ||
333 | cmd = runfile | 332 | cmd = runfile |
334 | if d.getVarFlag(func, 'fakeroot', False): | 333 | if d.getVarFlag(func, 'fakeroot', False): |
@@ -342,12 +341,12 @@ exit $ret | |||
342 | logfile = sys.stdout | 341 | logfile = sys.stdout |
343 | 342 | ||
344 | def readfifo(data): | 343 | def readfifo(data): |
345 | lines = data.split('\0') | 344 | lines = data.split(b'\0') |
346 | for line in lines: | 345 | for line in lines: |
347 | splitval = line.split(' ', 1) | 346 | splitval = line.split(b' ', 1) |
348 | cmd = splitval[0] | 347 | cmd = splitval[0] |
349 | if len(splitval) > 1: | 348 | if len(splitval) > 1: |
350 | value = splitval[1] | 349 | value = splitval[1].decode("utf-8") |
351 | else: | 350 | else: |
352 | value = '' | 351 | value = '' |
353 | if cmd == 'bbplain': | 352 | if cmd == 'bbplain': |
@@ -375,7 +374,7 @@ exit $ret | |||
375 | if os.path.exists(fifopath): | 374 | if os.path.exists(fifopath): |
376 | os.unlink(fifopath) | 375 | os.unlink(fifopath) |
377 | os.mkfifo(fifopath) | 376 | os.mkfifo(fifopath) |
378 | with open(fifopath, 'r+') as fifo: | 377 | with open(fifopath, 'r+b', buffering=0) as fifo: |
379 | try: | 378 | try: |
380 | bb.debug(2, "Executing shell function %s" % func) | 379 | bb.debug(2, "Executing shell function %s" % func) |
381 | 380 | ||
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 393d541744..96abd7141b 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -28,21 +28,15 @@ | |||
28 | # with this program; if not, write to the Free Software Foundation, Inc., | 28 | # with this program; if not, write to the Free Software Foundation, Inc., |
29 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 29 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
30 | 30 | ||
31 | |||
32 | import os | 31 | import os |
32 | import sys | ||
33 | import logging | 33 | import logging |
34 | import pickle | ||
34 | from collections import defaultdict | 35 | from collections import defaultdict |
35 | import bb.utils | 36 | import bb.utils |
36 | 37 | ||
37 | logger = logging.getLogger("BitBake.Cache") | 38 | logger = logging.getLogger("BitBake.Cache") |
38 | 39 | ||
39 | try: | ||
40 | import cPickle as pickle | ||
41 | except ImportError: | ||
42 | import pickle | ||
43 | logger.info("Importing cPickle failed. " | ||
44 | "Falling back to a very slow implementation.") | ||
45 | |||
46 | __cache_version__ = "150" | 40 | __cache_version__ = "150" |
47 | 41 | ||
48 | def getCacheFile(path, filename, data_hash): | 42 | def getCacheFile(path, filename, data_hash): |
@@ -80,7 +74,7 @@ class RecipeInfoCommon(object): | |||
80 | out_dict = dict((var, metadata.getVarFlag(var, flag, True)) | 74 | out_dict = dict((var, metadata.getVarFlag(var, flag, True)) |
81 | for var in varlist) | 75 | for var in varlist) |
82 | if squash: | 76 | if squash: |
83 | return dict((k,v) for (k,v) in out_dict.iteritems() if v) | 77 | return dict((k,v) for (k,v) in out_dict.items() if v) |
84 | else: | 78 | else: |
85 | return out_dict | 79 | return out_dict |
86 | 80 | ||
@@ -240,7 +234,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
240 | cachedata.universe_target.append(self.pn) | 234 | cachedata.universe_target.append(self.pn) |
241 | 235 | ||
242 | cachedata.hashfn[fn] = self.hashfilename | 236 | cachedata.hashfn[fn] = self.hashfilename |
243 | for task, taskhash in self.basetaskhashes.iteritems(): | 237 | for task, taskhash in self.basetaskhashes.items(): |
244 | identifier = '%s.%s' % (fn, task) | 238 | identifier = '%s.%s' % (fn, task) |
245 | cachedata.basetaskhash[identifier] = taskhash | 239 | cachedata.basetaskhash[identifier] = taskhash |
246 | 240 | ||
@@ -404,7 +398,7 @@ class Cache(object): | |||
404 | infos = [] | 398 | infos = [] |
405 | datastores = cls.load_bbfile(filename, appends, configdata) | 399 | datastores = cls.load_bbfile(filename, appends, configdata) |
406 | depends = [] | 400 | depends = [] |
407 | for variant, data in sorted(datastores.iteritems(), | 401 | for variant, data in sorted(datastores.items(), |
408 | key=lambda i: i[0], | 402 | key=lambda i: i[0], |
409 | reverse=True): | 403 | reverse=True): |
410 | virtualfn = cls.realfn2virtual(filename, variant) | 404 | virtualfn = cls.realfn2virtual(filename, variant) |
@@ -616,7 +610,7 @@ class Cache(object): | |||
616 | pickler_dict['CoreRecipeInfo'].dump(bb.__version__) | 610 | pickler_dict['CoreRecipeInfo'].dump(bb.__version__) |
617 | 611 | ||
618 | try: | 612 | try: |
619 | for key, info_array in self.depends_cache.iteritems(): | 613 | for key, info_array in self.depends_cache.items(): |
620 | for info in info_array: | 614 | for info in info_array: |
621 | if isinstance(info, RecipeInfoCommon): | 615 | if isinstance(info, RecipeInfoCommon): |
622 | cache_class_name = info.__class__.__name__ | 616 | cache_class_name = info.__class__.__name__ |
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py index 2ec964d73b..be4ab68915 100644 --- a/bitbake/lib/bb/checksum.py +++ b/bitbake/lib/bb/checksum.py | |||
@@ -19,20 +19,13 @@ import glob | |||
19 | import operator | 19 | import operator |
20 | import os | 20 | import os |
21 | import stat | 21 | import stat |
22 | import pickle | ||
22 | import bb.utils | 23 | import bb.utils |
23 | import logging | 24 | import logging |
24 | from bb.cache import MultiProcessCache | 25 | from bb.cache import MultiProcessCache |
25 | 26 | ||
26 | logger = logging.getLogger("BitBake.Cache") | 27 | logger = logging.getLogger("BitBake.Cache") |
27 | 28 | ||
28 | try: | ||
29 | import cPickle as pickle | ||
30 | except ImportError: | ||
31 | import pickle | ||
32 | logger.info("Importing cPickle failed. " | ||
33 | "Falling back to a very slow implementation.") | ||
34 | |||
35 | |||
36 | # mtime cache (non-persistent) | 29 | # mtime cache (non-persistent) |
37 | # based upon the assumption that files do not change during bitbake run | 30 | # based upon the assumption that files do not change during bitbake run |
38 | class FileMtimeCache(object): | 31 | class FileMtimeCache(object): |
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 70b0a8d136..b1d067a2f1 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -1,22 +1,17 @@ | |||
1 | import ast | 1 | import ast |
2 | import sys | ||
2 | import codegen | 3 | import codegen |
3 | import logging | 4 | import logging |
5 | import pickle | ||
6 | import bb.pysh as pysh | ||
4 | import os.path | 7 | import os.path |
5 | import bb.utils, bb.data | 8 | import bb.utils, bb.data |
6 | from itertools import chain | 9 | from itertools import chain |
7 | from pysh import pyshyacc, pyshlex, sherrors | 10 | from bb.pysh import pyshyacc, pyshlex, sherrors |
8 | from bb.cache import MultiProcessCache | 11 | from bb.cache import MultiProcessCache |
9 | 12 | ||
10 | |||
11 | logger = logging.getLogger('BitBake.CodeParser') | 13 | logger = logging.getLogger('BitBake.CodeParser') |
12 | 14 | ||
13 | try: | ||
14 | import cPickle as pickle | ||
15 | except ImportError: | ||
16 | import pickle | ||
17 | logger.info('Importing cPickle failed. Falling back to a very slow implementation.') | ||
18 | |||
19 | |||
20 | def check_indent(codestr): | 15 | def check_indent(codestr): |
21 | """If the code is indented, add a top level piece of code to 'remove' the indentation""" | 16 | """If the code is indented, add a top level piece of code to 'remove' the indentation""" |
22 | 17 | ||
@@ -68,7 +63,7 @@ class SetCache(object): | |||
68 | 63 | ||
69 | new = [] | 64 | new = [] |
70 | for i in items: | 65 | for i in items: |
71 | new.append(intern(i)) | 66 | new.append(sys.intern(i)) |
72 | s = frozenset(new) | 67 | s = frozenset(new) |
73 | if hash(s) in self.setcache: | 68 | if hash(s) in self.setcache: |
74 | return self.setcache[hash(s)] | 69 | return self.setcache[hash(s)] |
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py index 0559ffc07c..caa3e4d457 100644 --- a/bitbake/lib/bb/command.py +++ b/bitbake/lib/bb/command.py | |||
@@ -110,7 +110,7 @@ class Command: | |||
110 | return False | 110 | return False |
111 | except SystemExit as exc: | 111 | except SystemExit as exc: |
112 | arg = exc.args[0] | 112 | arg = exc.args[0] |
113 | if isinstance(arg, basestring): | 113 | if isinstance(arg, str): |
114 | self.finishAsyncCommand(arg) | 114 | self.finishAsyncCommand(arg) |
115 | else: | 115 | else: |
116 | self.finishAsyncCommand("Exited with %s" % arg) | 116 | self.finishAsyncCommand("Exited with %s" % arg) |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 11f17efa6f..2154ef49c4 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -30,13 +30,13 @@ import logging | |||
30 | import multiprocessing | 30 | import multiprocessing |
31 | import sre_constants | 31 | import sre_constants |
32 | import threading | 32 | import threading |
33 | from cStringIO import StringIO | 33 | from io import StringIO |
34 | from contextlib import closing | 34 | from contextlib import closing |
35 | from functools import wraps | 35 | from functools import wraps |
36 | from collections import defaultdict | 36 | from collections import defaultdict |
37 | import bb, bb.exceptions, bb.command | 37 | import bb, bb.exceptions, bb.command |
38 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build | 38 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build |
39 | import Queue | 39 | import queue |
40 | import signal | 40 | import signal |
41 | import subprocess | 41 | import subprocess |
42 | import errno | 42 | import errno |
@@ -65,7 +65,7 @@ class CollectionError(bb.BBHandledException): | |||
65 | """ | 65 | """ |
66 | 66 | ||
67 | class state: | 67 | class state: |
68 | initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7) | 68 | initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7)) |
69 | 69 | ||
70 | @classmethod | 70 | @classmethod |
71 | def get_name(cls, code): | 71 | def get_name(cls, code): |
@@ -93,7 +93,7 @@ class SkippedPackage: | |||
93 | 93 | ||
94 | 94 | ||
95 | class CookerFeatures(object): | 95 | class CookerFeatures(object): |
96 | _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(3) | 96 | _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) |
97 | 97 | ||
98 | def __init__(self): | 98 | def __init__(self): |
99 | self._features=set() | 99 | self._features=set() |
@@ -110,8 +110,8 @@ class CookerFeatures(object): | |||
110 | def __iter__(self): | 110 | def __iter__(self): |
111 | return self._features.__iter__() | 111 | return self._features.__iter__() |
112 | 112 | ||
113 | def next(self): | 113 | def __next__(self): |
114 | return self._features.next() | 114 | return next(self._features) |
115 | 115 | ||
116 | 116 | ||
117 | #============================================================================# | 117 | #============================================================================# |
@@ -726,13 +726,13 @@ class BBCooker: | |||
726 | depend_tree['providermap'] = {} | 726 | depend_tree['providermap'] = {} |
727 | depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities | 727 | depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities |
728 | 728 | ||
729 | for name, fn in taskdata.get_providermap().iteritems(): | 729 | for name, fn in list(taskdata.get_providermap().items()): |
730 | pn = self.recipecache.pkg_fn[fn] | 730 | pn = self.recipecache.pkg_fn[fn] |
731 | if name != pn: | 731 | if name != pn: |
732 | version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] | 732 | version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] |
733 | depend_tree['providermap'][name] = (pn, version) | 733 | depend_tree['providermap'][name] = (pn, version) |
734 | 734 | ||
735 | for task in xrange(len(rq.rqdata.runq_fnid)): | 735 | for task in range(len(rq.rqdata.runq_fnid)): |
736 | taskname = rq.rqdata.runq_task[task] | 736 | taskname = rq.rqdata.runq_task[task] |
737 | fnid = rq.rqdata.runq_fnid[task] | 737 | fnid = rq.rqdata.runq_fnid[task] |
738 | fn = taskdata.fn_index[fnid] | 738 | fn = taskdata.fn_index[fnid] |
@@ -807,7 +807,7 @@ class BBCooker: | |||
807 | _, taskdata = self.prepareTreeData(pkgs_to_build, task) | 807 | _, taskdata = self.prepareTreeData(pkgs_to_build, task) |
808 | tasks_fnid = [] | 808 | tasks_fnid = [] |
809 | if len(taskdata.tasks_name) != 0: | 809 | if len(taskdata.tasks_name) != 0: |
810 | for task in xrange(len(taskdata.tasks_name)): | 810 | for task in range(len(taskdata.tasks_name)): |
811 | tasks_fnid.append(taskdata.tasks_fnid[task]) | 811 | tasks_fnid.append(taskdata.tasks_fnid[task]) |
812 | 812 | ||
813 | seen_fnids = [] | 813 | seen_fnids = [] |
@@ -825,7 +825,7 @@ class BBCooker: | |||
825 | cachefields = getattr(cache_class, 'cachefields', []) | 825 | cachefields = getattr(cache_class, 'cachefields', []) |
826 | extra_info = extra_info + cachefields | 826 | extra_info = extra_info + cachefields |
827 | 827 | ||
828 | for task in xrange(len(tasks_fnid)): | 828 | for task in range(len(tasks_fnid)): |
829 | fnid = tasks_fnid[task] | 829 | fnid = tasks_fnid[task] |
830 | fn = taskdata.fn_index[fnid] | 830 | fn = taskdata.fn_index[fnid] |
831 | pn = self.recipecache.pkg_fn[fn] | 831 | pn = self.recipecache.pkg_fn[fn] |
@@ -953,7 +953,7 @@ class BBCooker: | |||
953 | # Determine which bbappends haven't been applied | 953 | # Determine which bbappends haven't been applied |
954 | 954 | ||
955 | # First get list of recipes, including skipped | 955 | # First get list of recipes, including skipped |
956 | recipefns = self.recipecache.pkg_fn.keys() | 956 | recipefns = list(self.recipecache.pkg_fn.keys()) |
957 | recipefns.extend(self.skiplist.keys()) | 957 | recipefns.extend(self.skiplist.keys()) |
958 | 958 | ||
959 | # Work out list of bbappends that have been applied | 959 | # Work out list of bbappends that have been applied |
@@ -1152,7 +1152,7 @@ class BBCooker: | |||
1152 | deplist = bb.utils.explode_dep_versions2(deps) | 1152 | deplist = bb.utils.explode_dep_versions2(deps) |
1153 | except bb.utils.VersionStringException as vse: | 1153 | except bb.utils.VersionStringException as vse: |
1154 | bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse))) | 1154 | bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse))) |
1155 | for dep, oplist in deplist.iteritems(): | 1155 | for dep, oplist in list(deplist.items()): |
1156 | if dep in collection_list: | 1156 | if dep in collection_list: |
1157 | for opstr in oplist: | 1157 | for opstr in oplist: |
1158 | layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) | 1158 | layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) |
@@ -1888,7 +1888,7 @@ class Feeder(multiprocessing.Process): | |||
1888 | while True: | 1888 | while True: |
1889 | try: | 1889 | try: |
1890 | quit = self.quit.get_nowait() | 1890 | quit = self.quit.get_nowait() |
1891 | except Queue.Empty: | 1891 | except queue.Empty: |
1892 | pass | 1892 | pass |
1893 | else: | 1893 | else: |
1894 | if quit == 'cancel': | 1894 | if quit == 'cancel': |
@@ -1902,7 +1902,7 @@ class Feeder(multiprocessing.Process): | |||
1902 | 1902 | ||
1903 | try: | 1903 | try: |
1904 | self.to_parsers.put(job, timeout=0.5) | 1904 | self.to_parsers.put(job, timeout=0.5) |
1905 | except Queue.Full: | 1905 | except queue.Full: |
1906 | self.jobs.insert(0, job) | 1906 | self.jobs.insert(0, job) |
1907 | continue | 1907 | continue |
1908 | 1908 | ||
@@ -1942,7 +1942,7 @@ class Parser(multiprocessing.Process): | |||
1942 | while True: | 1942 | while True: |
1943 | try: | 1943 | try: |
1944 | self.quit.get_nowait() | 1944 | self.quit.get_nowait() |
1945 | except Queue.Empty: | 1945 | except queue.Empty: |
1946 | pass | 1946 | pass |
1947 | else: | 1947 | else: |
1948 | self.results.cancel_join_thread() | 1948 | self.results.cancel_join_thread() |
@@ -1953,7 +1953,7 @@ class Parser(multiprocessing.Process): | |||
1953 | else: | 1953 | else: |
1954 | try: | 1954 | try: |
1955 | job = self.jobs.get(timeout=0.25) | 1955 | job = self.jobs.get(timeout=0.25) |
1956 | except Queue.Empty: | 1956 | except queue.Empty: |
1957 | continue | 1957 | continue |
1958 | 1958 | ||
1959 | if job is None: | 1959 | if job is None: |
@@ -1962,7 +1962,7 @@ class Parser(multiprocessing.Process): | |||
1962 | 1962 | ||
1963 | try: | 1963 | try: |
1964 | self.results.put(result, timeout=0.25) | 1964 | self.results.put(result, timeout=0.25) |
1965 | except Queue.Full: | 1965 | except queue.Full: |
1966 | pending.append(result) | 1966 | pending.append(result) |
1967 | 1967 | ||
1968 | def parse(self, filename, appends, caches_array): | 1968 | def parse(self, filename, appends, caches_array): |
@@ -2115,7 +2115,7 @@ class CookerParser(object): | |||
2115 | 2115 | ||
2116 | try: | 2116 | try: |
2117 | result = self.result_queue.get(timeout=0.25) | 2117 | result = self.result_queue.get(timeout=0.25) |
2118 | except Queue.Empty: | 2118 | except queue.Empty: |
2119 | pass | 2119 | pass |
2120 | else: | 2120 | else: |
2121 | value = result[1] | 2121 | value = result[1] |
@@ -2128,7 +2128,7 @@ class CookerParser(object): | |||
2128 | result = [] | 2128 | result = [] |
2129 | parsed = None | 2129 | parsed = None |
2130 | try: | 2130 | try: |
2131 | parsed, result = self.results.next() | 2131 | parsed, result = next(self.results) |
2132 | except StopIteration: | 2132 | except StopIteration: |
2133 | self.shutdown() | 2133 | self.shutdown() |
2134 | return False | 2134 | return False |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index 2ab884bb39..2a61386d89 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
@@ -372,7 +372,7 @@ class DataSmart(MutableMapping): | |||
372 | 372 | ||
373 | def expandWithRefs(self, s, varname): | 373 | def expandWithRefs(self, s, varname): |
374 | 374 | ||
375 | if not isinstance(s, basestring): # sanity check | 375 | if not isinstance(s, str): # sanity check |
376 | return VariableParse(varname, self, s) | 376 | return VariableParse(varname, self, s) |
377 | 377 | ||
378 | if varname and varname in self.expand_cache: | 378 | if varname and varname in self.expand_cache: |
@@ -966,4 +966,4 @@ class DataSmart(MutableMapping): | |||
966 | data.update({i:value}) | 966 | data.update({i:value}) |
967 | 967 | ||
968 | data_str = str([(k, data[k]) for k in sorted(data.keys())]) | 968 | data_str = str([(k, data[k]) for k in sorted(data.keys())]) |
969 | return hashlib.md5(data_str).hexdigest() | 969 | return hashlib.md5(data_str.encode("utf-8")).hexdigest() |
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py index 29b14f6c32..6fb37128ea 100644 --- a/bitbake/lib/bb/event.py +++ b/bitbake/lib/bb/event.py | |||
@@ -24,10 +24,7 @@ BitBake build tools. | |||
24 | 24 | ||
25 | import os, sys | 25 | import os, sys |
26 | import warnings | 26 | import warnings |
27 | try: | 27 | import pickle |
28 | import cPickle as pickle | ||
29 | except ImportError: | ||
30 | import pickle | ||
31 | import logging | 28 | import logging |
32 | import atexit | 29 | import atexit |
33 | import traceback | 30 | import traceback |
@@ -107,7 +104,7 @@ def fire_class_handlers(event, d): | |||
107 | 104 | ||
108 | eid = str(event.__class__)[8:-2] | 105 | eid = str(event.__class__)[8:-2] |
109 | evt_hmap = _event_handler_map.get(eid, {}) | 106 | evt_hmap = _event_handler_map.get(eid, {}) |
110 | for name, handler in _handlers.iteritems(): | 107 | for name, handler in list(_handlers.items()): |
111 | if name in _catchall_handlers or name in evt_hmap: | 108 | if name in _catchall_handlers or name in evt_hmap: |
112 | if _eventfilter: | 109 | if _eventfilter: |
113 | if not _eventfilter(name, handler, event, d): | 110 | if not _eventfilter(name, handler, event, d): |
@@ -192,7 +189,7 @@ def register(name, handler, mask=None, filename=None, lineno=None): | |||
192 | 189 | ||
193 | if handler is not None: | 190 | if handler is not None: |
194 | # handle string containing python code | 191 | # handle string containing python code |
195 | if isinstance(handler, basestring): | 192 | if isinstance(handler, str): |
196 | tmp = "def %s(e):\n%s" % (name, handler) | 193 | tmp = "def %s(e):\n%s" % (name, handler) |
197 | try: | 194 | try: |
198 | code = bb.methodpool.compile_cache(tmp) | 195 | code = bb.methodpool.compile_cache(tmp) |
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py index eadfc57157..cd713439ea 100644 --- a/bitbake/lib/bb/exceptions.py +++ b/bitbake/lib/bb/exceptions.py | |||
@@ -86,6 +86,6 @@ def format_exception(etype, value, tb, context=1, limit=None, formatter=None): | |||
86 | 86 | ||
87 | def to_string(exc): | 87 | def to_string(exc): |
88 | if isinstance(exc, SystemExit): | 88 | if isinstance(exc, SystemExit): |
89 | if not isinstance(exc.code, basestring): | 89 | if not isinstance(exc.code, str): |
90 | return 'Exited with "%d"' % exc.code | 90 | return 'Exited with "%d"' % exc.code |
91 | return str(exc) | 91 | return str(exc) |
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 14fe3c753a..be01bdbb34 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -28,27 +28,23 @@ BitBake build tools. | |||
28 | import os, re | 28 | import os, re |
29 | import signal | 29 | import signal |
30 | import logging | 30 | import logging |
31 | import urllib | 31 | import urllib.request, urllib.parse, urllib.error |
32 | import urlparse | 32 | if 'git' not in urllib.parse.uses_netloc: |
33 | urllib.parse.uses_netloc.append('git') | ||
34 | import operator | ||
33 | import collections | 35 | import collections |
36 | import subprocess | ||
37 | import pickle | ||
34 | import bb.persist_data, bb.utils | 38 | import bb.persist_data, bb.utils |
35 | import bb.checksum | 39 | import bb.checksum |
36 | from bb import data | 40 | from bb import data |
37 | import bb.process | 41 | import bb.process |
38 | import subprocess | ||
39 | 42 | ||
40 | __version__ = "2" | 43 | __version__ = "2" |
41 | _checksum_cache = bb.checksum.FileChecksumCache() | 44 | _checksum_cache = bb.checksum.FileChecksumCache() |
42 | 45 | ||
43 | logger = logging.getLogger("BitBake.Fetcher") | 46 | logger = logging.getLogger("BitBake.Fetcher") |
44 | 47 | ||
45 | try: | ||
46 | import cPickle as pickle | ||
47 | except ImportError: | ||
48 | import pickle | ||
49 | logger.info("Importing cPickle failed. " | ||
50 | "Falling back to a very slow implementation.") | ||
51 | |||
52 | class BBFetchException(Exception): | 48 | class BBFetchException(Exception): |
53 | """Class all fetch exceptions inherit from""" | 49 | """Class all fetch exceptions inherit from""" |
54 | def __init__(self, message): | 50 | def __init__(self, message): |
@@ -230,14 +226,14 @@ class URI(object): | |||
230 | # them are not quite RFC compliant. | 226 | # them are not quite RFC compliant. |
231 | uri, param_str = (uri.split(";", 1) + [None])[:2] | 227 | uri, param_str = (uri.split(";", 1) + [None])[:2] |
232 | 228 | ||
233 | urlp = urlparse.urlparse(uri) | 229 | urlp = urllib.parse.urlparse(uri) |
234 | self.scheme = urlp.scheme | 230 | self.scheme = urlp.scheme |
235 | 231 | ||
236 | reparse = 0 | 232 | reparse = 0 |
237 | 233 | ||
238 | # Coerce urlparse to make URI scheme use netloc | 234 | # Coerce urlparse to make URI scheme use netloc |
239 | if not self.scheme in urlparse.uses_netloc: | 235 | if not self.scheme in urllib.parse.uses_netloc: |
240 | urlparse.uses_params.append(self.scheme) | 236 | urllib.parse.uses_params.append(self.scheme) |
241 | reparse = 1 | 237 | reparse = 1 |
242 | 238 | ||
243 | # Make urlparse happy(/ier) by converting local resources | 239 | # Make urlparse happy(/ier) by converting local resources |
@@ -248,7 +244,7 @@ class URI(object): | |||
248 | reparse = 1 | 244 | reparse = 1 |
249 | 245 | ||
250 | if reparse: | 246 | if reparse: |
251 | urlp = urlparse.urlparse(uri) | 247 | urlp = urllib.parse.urlparse(uri) |
252 | 248 | ||
253 | # Identify if the URI is relative or not | 249 | # Identify if the URI is relative or not |
254 | if urlp.scheme in self._relative_schemes and \ | 250 | if urlp.scheme in self._relative_schemes and \ |
@@ -264,7 +260,7 @@ class URI(object): | |||
264 | if urlp.password: | 260 | if urlp.password: |
265 | self.userinfo += ':%s' % urlp.password | 261 | self.userinfo += ':%s' % urlp.password |
266 | 262 | ||
267 | self.path = urllib.unquote(urlp.path) | 263 | self.path = urllib.parse.unquote(urlp.path) |
268 | 264 | ||
269 | if param_str: | 265 | if param_str: |
270 | self.params = self._param_str_split(param_str, ";") | 266 | self.params = self._param_str_split(param_str, ";") |
@@ -312,11 +308,11 @@ class URI(object): | |||
312 | 308 | ||
313 | @property | 309 | @property |
314 | def path_quoted(self): | 310 | def path_quoted(self): |
315 | return urllib.quote(self.path) | 311 | return urllib.parse.quote(self.path) |
316 | 312 | ||
317 | @path_quoted.setter | 313 | @path_quoted.setter |
318 | def path_quoted(self, path): | 314 | def path_quoted(self, path): |
319 | self.path = urllib.unquote(path) | 315 | self.path = urllib.parse.unquote(path) |
320 | 316 | ||
321 | @property | 317 | @property |
322 | def path(self): | 318 | def path(self): |
@@ -398,7 +394,7 @@ def decodeurl(url): | |||
398 | s1, s2 = s.split('=') | 394 | s1, s2 = s.split('=') |
399 | p[s1] = s2 | 395 | p[s1] = s2 |
400 | 396 | ||
401 | return type, host, urllib.unquote(path), user, pswd, p | 397 | return type, host, urllib.parse.unquote(path), user, pswd, p |
402 | 398 | ||
403 | def encodeurl(decoded): | 399 | def encodeurl(decoded): |
404 | """Encodes a URL from tokens (scheme, network location, path, | 400 | """Encodes a URL from tokens (scheme, network location, path, |
@@ -422,7 +418,7 @@ def encodeurl(decoded): | |||
422 | # Standardise path to ensure comparisons work | 418 | # Standardise path to ensure comparisons work |
423 | while '//' in path: | 419 | while '//' in path: |
424 | path = path.replace("//", "/") | 420 | path = path.replace("//", "/") |
425 | url += "%s" % urllib.quote(path) | 421 | url += "%s" % urllib.parse.quote(path) |
426 | if p: | 422 | if p: |
427 | for parm in p: | 423 | for parm in p: |
428 | url += ";%s=%s" % (parm, p[parm]) | 424 | url += ";%s=%s" % (parm, p[parm]) |
@@ -1735,7 +1731,7 @@ class FetchConnectionCache(object): | |||
1735 | del self.cache[cn] | 1731 | del self.cache[cn] |
1736 | 1732 | ||
1737 | def close_connections(self): | 1733 | def close_connections(self): |
1738 | for cn in self.cache.keys(): | 1734 | for cn in list(self.cache.keys()): |
1739 | self.cache[cn].close() | 1735 | self.cache[cn].close() |
1740 | del self.cache[cn] | 1736 | del self.cache[cn] |
1741 | 1737 | ||
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 303a52b638..51ca78d12b 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
@@ -26,7 +26,7 @@ BitBake build tools. | |||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig |
27 | 27 | ||
28 | import os | 28 | import os |
29 | import urllib | 29 | import urllib.request, urllib.parse, urllib.error |
30 | import bb | 30 | import bb |
31 | import bb.utils | 31 | import bb.utils |
32 | from bb import data | 32 | from bb import data |
@@ -42,7 +42,7 @@ class Local(FetchMethod): | |||
42 | 42 | ||
43 | def urldata_init(self, ud, d): | 43 | def urldata_init(self, ud, d): |
44 | # We don't set localfile as for this fetcher the file is already local! | 44 | # We don't set localfile as for this fetcher the file is already local! |
45 | ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0]) | 45 | ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) |
46 | ud.basename = os.path.basename(ud.decodedurl) | 46 | ud.basename = os.path.basename(ud.decodedurl) |
47 | ud.basepath = ud.decodedurl | 47 | ud.basepath = ud.decodedurl |
48 | ud.needdonestamp = False | 48 | ud.needdonestamp = False |
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py index d9e46b2e8c..2fd43034ba 100644 --- a/bitbake/lib/bb/fetch2/npm.py +++ b/bitbake/lib/bb/fetch2/npm.py | |||
@@ -20,7 +20,7 @@ Usage in the recipe: | |||
20 | 20 | ||
21 | import os | 21 | import os |
22 | import sys | 22 | import sys |
23 | import urllib | 23 | import urllib.request, urllib.parse, urllib.error |
24 | import json | 24 | import json |
25 | import subprocess | 25 | import subprocess |
26 | import signal | 26 | import signal |
@@ -196,9 +196,9 @@ class Npm(FetchMethod): | |||
196 | optdepsfound[dep] = dependencies[dep] | 196 | optdepsfound[dep] = dependencies[dep] |
197 | else: | 197 | else: |
198 | depsfound[dep] = dependencies[dep] | 198 | depsfound[dep] = dependencies[dep] |
199 | for dep, version in optdepsfound.iteritems(): | 199 | for dep, version in optdepsfound.items(): |
200 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True) | 200 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True) |
201 | for dep, version in depsfound.iteritems(): | 201 | for dep, version in depsfound.items(): |
202 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud) | 202 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud) |
203 | 203 | ||
204 | def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest): | 204 | def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest): |
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index 1aef246942..ce3cda2670 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
@@ -61,7 +61,7 @@ class Perforce(FetchMethod): | |||
61 | keys.append(key) | 61 | keys.append(key) |
62 | values.append(value) | 62 | values.append(value) |
63 | 63 | ||
64 | parm = dict(zip(keys, values)) | 64 | parm = dict(list(zip(keys, values))) |
65 | path = "//" + path.split(';')[0] | 65 | path = "//" + path.split(';')[0] |
66 | host += ":%s" % (port) | 66 | host += ":%s" % (port) |
67 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) | 67 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) |
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py index cb2f753a8e..7989fccc75 100644 --- a/bitbake/lib/bb/fetch2/sftp.py +++ b/bitbake/lib/bb/fetch2/sftp.py | |||
@@ -61,8 +61,7 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt" | |||
61 | 61 | ||
62 | import os | 62 | import os |
63 | import bb | 63 | import bb |
64 | import urllib | 64 | import urllib.request, urllib.parse, urllib.error |
65 | import commands | ||
66 | from bb import data | 65 | from bb import data |
67 | from bb.fetch2 import URI | 66 | from bb.fetch2 import URI |
68 | from bb.fetch2 import FetchMethod | 67 | from bb.fetch2 import FetchMethod |
@@ -93,7 +92,7 @@ class SFTP(FetchMethod): | |||
93 | else: | 92 | else: |
94 | ud.basename = os.path.basename(ud.path) | 93 | ud.basename = os.path.basename(ud.path) |
95 | 94 | ||
96 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 95 | ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) |
97 | 96 | ||
98 | def download(self, ud, d): | 97 | def download(self, ud, d): |
99 | """Fetch urls""" | 98 | """Fetch urls""" |
@@ -121,8 +120,7 @@ class SFTP(FetchMethod): | |||
121 | 120 | ||
122 | remote = '%s%s:%s' % (user, urlo.hostname, path) | 121 | remote = '%s%s:%s' % (user, urlo.hostname, path) |
123 | 122 | ||
124 | cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote), | 123 | cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) |
125 | commands.mkarg(lpath)) | ||
126 | 124 | ||
127 | bb.fetch2.check_network_access(d, cmd, ud.url) | 125 | bb.fetch2.check_network_access(d, cmd, ud.url) |
128 | runfetchcmd(cmd, d) | 126 | runfetchcmd(cmd, d) |
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py index 635578a711..56f9b7eb35 100644 --- a/bitbake/lib/bb/fetch2/ssh.py +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
@@ -114,12 +114,10 @@ class SSH(FetchMethod): | |||
114 | fr = host | 114 | fr = host |
115 | fr += ':%s' % path | 115 | fr += ':%s' % path |
116 | 116 | ||
117 | |||
118 | import commands | ||
119 | cmd = 'scp -B -r %s %s %s/' % ( | 117 | cmd = 'scp -B -r %s %s %s/' % ( |
120 | portarg, | 118 | portarg, |
121 | commands.mkarg(fr), | 119 | fr, |
122 | commands.mkarg(dldir) | 120 | dldir |
123 | ) | 121 | ) |
124 | 122 | ||
125 | bb.fetch2.check_network_access(d, cmd, urldata.url) | 123 | bb.fetch2.check_network_access(d, cmd, urldata.url) |
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index 8bc9e93ca0..d688fd9d02 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -31,7 +31,7 @@ import subprocess | |||
31 | import os | 31 | import os |
32 | import logging | 32 | import logging |
33 | import bb | 33 | import bb |
34 | import urllib | 34 | import urllib.request, urllib.parse, urllib.error |
35 | from bb import data | 35 | from bb import data |
36 | from bb.fetch2 import FetchMethod | 36 | from bb.fetch2 import FetchMethod |
37 | from bb.fetch2 import FetchError | 37 | from bb.fetch2 import FetchError |
@@ -62,9 +62,9 @@ class Wget(FetchMethod): | |||
62 | else: | 62 | else: |
63 | ud.basename = os.path.basename(ud.path) | 63 | ud.basename = os.path.basename(ud.path) |
64 | 64 | ||
65 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 65 | ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) |
66 | if not ud.localfile: | 66 | if not ud.localfile: |
67 | ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d) | 67 | ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) |
68 | 68 | ||
69 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" | 69 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" |
70 | 70 | ||
@@ -105,11 +105,11 @@ class Wget(FetchMethod): | |||
105 | return True | 105 | return True |
106 | 106 | ||
107 | def checkstatus(self, fetch, ud, d): | 107 | def checkstatus(self, fetch, ud, d): |
108 | import urllib2, socket, httplib | 108 | import urllib.request, urllib.error, urllib.parse, socket, http.client |
109 | from urllib import addinfourl | 109 | from urllib.response import addinfourl |
110 | from bb.fetch2 import FetchConnectionCache | 110 | from bb.fetch2 import FetchConnectionCache |
111 | 111 | ||
112 | class HTTPConnectionCache(httplib.HTTPConnection): | 112 | class HTTPConnectionCache(http.client.HTTPConnection): |
113 | if fetch.connection_cache: | 113 | if fetch.connection_cache: |
114 | def connect(self): | 114 | def connect(self): |
115 | """Connect to the host and port specified in __init__.""" | 115 | """Connect to the host and port specified in __init__.""" |
@@ -125,7 +125,7 @@ class Wget(FetchMethod): | |||
125 | if self._tunnel_host: | 125 | if self._tunnel_host: |
126 | self._tunnel() | 126 | self._tunnel() |
127 | 127 | ||
128 | class CacheHTTPHandler(urllib2.HTTPHandler): | 128 | class CacheHTTPHandler(urllib.request.HTTPHandler): |
129 | def http_open(self, req): | 129 | def http_open(self, req): |
130 | return self.do_open(HTTPConnectionCache, req) | 130 | return self.do_open(HTTPConnectionCache, req) |
131 | 131 | ||
@@ -139,7 +139,7 @@ class Wget(FetchMethod): | |||
139 | - geturl(): return the original request URL | 139 | - geturl(): return the original request URL |
140 | - code: HTTP status code | 140 | - code: HTTP status code |
141 | """ | 141 | """ |
142 | host = req.get_host() | 142 | host = req.host |
143 | if not host: | 143 | if not host: |
144 | raise urlllib2.URLError('no host given') | 144 | raise urlllib2.URLError('no host given') |
145 | 145 | ||
@@ -147,7 +147,7 @@ class Wget(FetchMethod): | |||
147 | h.set_debuglevel(self._debuglevel) | 147 | h.set_debuglevel(self._debuglevel) |
148 | 148 | ||
149 | headers = dict(req.unredirected_hdrs) | 149 | headers = dict(req.unredirected_hdrs) |
150 | headers.update(dict((k, v) for k, v in req.headers.items() | 150 | headers.update(dict((k, v) for k, v in list(req.headers.items()) |
151 | if k not in headers)) | 151 | if k not in headers)) |
152 | 152 | ||
153 | # We want to make an HTTP/1.1 request, but the addinfourl | 153 | # We want to make an HTTP/1.1 request, but the addinfourl |
@@ -164,7 +164,7 @@ class Wget(FetchMethod): | |||
164 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 | 164 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 |
165 | 165 | ||
166 | headers = dict( | 166 | headers = dict( |
167 | (name.title(), val) for name, val in headers.items()) | 167 | (name.title(), val) for name, val in list(headers.items())) |
168 | 168 | ||
169 | if req._tunnel_host: | 169 | if req._tunnel_host: |
170 | tunnel_headers = {} | 170 | tunnel_headers = {} |
@@ -177,12 +177,12 @@ class Wget(FetchMethod): | |||
177 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) | 177 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) |
178 | 178 | ||
179 | try: | 179 | try: |
180 | h.request(req.get_method(), req.get_selector(), req.data, headers) | 180 | h.request(req.get_method(), req.selector, req.data, headers) |
181 | except socket.error, err: # XXX what error? | 181 | except socket.error as err: # XXX what error? |
182 | # Don't close connection when cache is enabled. | 182 | # Don't close connection when cache is enabled. |
183 | if fetch.connection_cache is None: | 183 | if fetch.connection_cache is None: |
184 | h.close() | 184 | h.close() |
185 | raise urllib2.URLError(err) | 185 | raise urllib.error.URLError(err) |
186 | else: | 186 | else: |
187 | try: | 187 | try: |
188 | r = h.getresponse(buffering=True) | 188 | r = h.getresponse(buffering=True) |
@@ -222,7 +222,7 @@ class Wget(FetchMethod): | |||
222 | 222 | ||
223 | return resp | 223 | return resp |
224 | 224 | ||
225 | class HTTPMethodFallback(urllib2.BaseHandler): | 225 | class HTTPMethodFallback(urllib.request.BaseHandler): |
226 | """ | 226 | """ |
227 | Fallback to GET if HEAD is not allowed (405 HTTP error) | 227 | Fallback to GET if HEAD is not allowed (405 HTTP error) |
228 | """ | 228 | """ |
@@ -230,11 +230,11 @@ class Wget(FetchMethod): | |||
230 | fp.read() | 230 | fp.read() |
231 | fp.close() | 231 | fp.close() |
232 | 232 | ||
233 | newheaders = dict((k,v) for k,v in req.headers.items() | 233 | newheaders = dict((k,v) for k,v in list(req.headers.items()) |
234 | if k.lower() not in ("content-length", "content-type")) | 234 | if k.lower() not in ("content-length", "content-type")) |
235 | return self.parent.open(urllib2.Request(req.get_full_url(), | 235 | return self.parent.open(urllib.request.Request(req.get_full_url(), |
236 | headers=newheaders, | 236 | headers=newheaders, |
237 | origin_req_host=req.get_origin_req_host(), | 237 | origin_req_host=req.origin_req_host, |
238 | unverifiable=True)) | 238 | unverifiable=True)) |
239 | 239 | ||
240 | """ | 240 | """ |
@@ -249,35 +249,35 @@ class Wget(FetchMethod): | |||
249 | """ | 249 | """ |
250 | http_error_406 = http_error_405 | 250 | http_error_406 = http_error_405 |
251 | 251 | ||
252 | class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler): | 252 | class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): |
253 | """ | 253 | """ |
254 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, | 254 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, |
255 | when we want to follow redirects using the original method. | 255 | when we want to follow redirects using the original method. |
256 | """ | 256 | """ |
257 | def redirect_request(self, req, fp, code, msg, headers, newurl): | 257 | def redirect_request(self, req, fp, code, msg, headers, newurl): |
258 | newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) | 258 | newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) |
259 | newreq.get_method = lambda: req.get_method() | 259 | newreq.get_method = lambda: req.get_method() |
260 | return newreq | 260 | return newreq |
261 | exported_proxies = export_proxies(d) | 261 | exported_proxies = export_proxies(d) |
262 | 262 | ||
263 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] | 263 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] |
264 | if export_proxies: | 264 | if export_proxies: |
265 | handlers.append(urllib2.ProxyHandler()) | 265 | handlers.append(urllib.request.ProxyHandler()) |
266 | handlers.append(CacheHTTPHandler()) | 266 | handlers.append(CacheHTTPHandler()) |
267 | # XXX: Since Python 2.7.9 ssl cert validation is enabled by default | 267 | # XXX: Since Python 2.7.9 ssl cert validation is enabled by default |
268 | # see PEP-0476, this causes verification errors on some https servers | 268 | # see PEP-0476, this causes verification errors on some https servers |
269 | # so disable by default. | 269 | # so disable by default. |
270 | import ssl | 270 | import ssl |
271 | if hasattr(ssl, '_create_unverified_context'): | 271 | if hasattr(ssl, '_create_unverified_context'): |
272 | handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context())) | 272 | handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) |
273 | opener = urllib2.build_opener(*handlers) | 273 | opener = urllib.request.build_opener(*handlers) |
274 | 274 | ||
275 | try: | 275 | try: |
276 | uri = ud.url.split(";")[0] | 276 | uri = ud.url.split(";")[0] |
277 | r = urllib2.Request(uri) | 277 | r = urllib.request.Request(uri) |
278 | r.get_method = lambda: "HEAD" | 278 | r.get_method = lambda: "HEAD" |
279 | opener.open(r) | 279 | opener.open(r) |
280 | except urllib2.URLError as e: | 280 | except urllib.error.URLError as e: |
281 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 281 | # debug for now to avoid spamming the logs in e.g. remote sstate searches |
282 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) | 282 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) |
283 | return False | 283 | return False |
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py index 761ea459cf..b296ef8b8c 100755 --- a/bitbake/lib/bb/main.py +++ b/bitbake/lib/bb/main.py | |||
@@ -27,6 +27,7 @@ import sys | |||
27 | import logging | 27 | import logging |
28 | import optparse | 28 | import optparse |
29 | import warnings | 29 | import warnings |
30 | import fcntl | ||
30 | 31 | ||
31 | import bb | 32 | import bb |
32 | from bb import event | 33 | from bb import event |
@@ -336,10 +337,7 @@ def start_server(servermodule, configParams, configuration, features): | |||
336 | server.saveConnectionDetails() | 337 | server.saveConnectionDetails() |
337 | except Exception as e: | 338 | except Exception as e: |
338 | while hasattr(server, "event_queue"): | 339 | while hasattr(server, "event_queue"): |
339 | try: | 340 | import queue |
340 | import queue | ||
341 | except ImportError: | ||
342 | import Queue as queue | ||
343 | try: | 341 | try: |
344 | event = server.event_queue.get(block=False) | 342 | event = server.event_queue.get(block=False) |
345 | except (queue.Empty, IOError): | 343 | except (queue.Empty, IOError): |
@@ -363,7 +361,10 @@ def bitbake_main(configParams, configuration): | |||
363 | # updates to log files for use with tail | 361 | # updates to log files for use with tail |
364 | try: | 362 | try: |
365 | if sys.stdout.name == '<stdout>': | 363 | if sys.stdout.name == '<stdout>': |
366 | sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) | 364 | # Reopen with O_SYNC (unbuffered) |
365 | fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL) | ||
366 | fl |= os.O_SYNC | ||
367 | fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl) | ||
367 | except: | 368 | except: |
368 | pass | 369 | pass |
369 | 370 | ||
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py index 6fdd1f52a0..8c3ab47623 100644 --- a/bitbake/lib/bb/msg.py +++ b/bitbake/lib/bb/msg.py | |||
@@ -57,7 +57,7 @@ class BBLogFormatter(logging.Formatter): | |||
57 | } | 57 | } |
58 | 58 | ||
59 | color_enabled = False | 59 | color_enabled = False |
60 | BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38) | 60 | BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(29,38)) |
61 | 61 | ||
62 | COLORS = { | 62 | COLORS = { |
63 | DEBUG3 : CYAN, | 63 | DEBUG3 : CYAN, |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index bc3a2f8826..548929f63d 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
@@ -138,7 +138,7 @@ class DataNode(AstNode): | |||
138 | data.setVar(key, val, parsing=True, **loginfo) | 138 | data.setVar(key, val, parsing=True, **loginfo) |
139 | 139 | ||
140 | class MethodNode(AstNode): | 140 | class MethodNode(AstNode): |
141 | tr_tbl = string.maketrans('/.+-@%&', '_______') | 141 | tr_tbl = str.maketrans('/.+-@%&', '_______') |
142 | 142 | ||
143 | def __init__(self, filename, lineno, func_name, body, python, fakeroot): | 143 | def __init__(self, filename, lineno, func_name, body, python, fakeroot): |
144 | AstNode.__init__(self, filename, lineno) | 144 | AstNode.__init__(self, filename, lineno) |
@@ -340,17 +340,17 @@ def _create_variants(datastores, names, function, onlyfinalise): | |||
340 | function(arg or name, new_d) | 340 | function(arg or name, new_d) |
341 | datastores[name] = new_d | 341 | datastores[name] = new_d |
342 | 342 | ||
343 | for variant, variant_d in datastores.items(): | 343 | for variant in list(datastores.keys()): |
344 | for name in names: | 344 | for name in names: |
345 | if not variant: | 345 | if not variant: |
346 | # Based on main recipe | 346 | # Based on main recipe |
347 | create_variant(name, variant_d) | 347 | create_variant(name, datastores[""]) |
348 | else: | 348 | else: |
349 | create_variant("%s-%s" % (variant, name), variant_d, name) | 349 | create_variant("%s-%s" % (variant, name), datastores[variant], name) |
350 | 350 | ||
351 | def _expand_versions(versions): | 351 | def _expand_versions(versions): |
352 | def expand_one(version, start, end): | 352 | def expand_one(version, start, end): |
353 | for i in xrange(start, end + 1): | 353 | for i in range(start, end + 1): |
354 | ver = _bbversions_re.sub(str(i), version, 1) | 354 | ver = _bbversions_re.sub(str(i), version, 1) |
355 | yield ver | 355 | yield ver |
356 | 356 | ||
@@ -459,16 +459,16 @@ def multi_finalize(fn, d): | |||
459 | safe_d.setVar("BBCLASSEXTEND", extended) | 459 | safe_d.setVar("BBCLASSEXTEND", extended) |
460 | _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) | 460 | _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) |
461 | 461 | ||
462 | for variant, variant_d in datastores.iteritems(): | 462 | for variant in datastores.keys(): |
463 | if variant: | 463 | if variant: |
464 | try: | 464 | try: |
465 | if not onlyfinalise or variant in onlyfinalise: | 465 | if not onlyfinalise or variant in onlyfinalise: |
466 | finalize(fn, variant_d, variant) | 466 | finalize(fn, datastores[variant], variant) |
467 | except bb.parse.SkipRecipe as e: | 467 | except bb.parse.SkipRecipe as e: |
468 | variant_d.setVar("__SKIPPED", e.args[0]) | 468 | datastores[variant].setVar("__SKIPPED", e.args[0]) |
469 | 469 | ||
470 | if len(datastores) > 1: | 470 | if len(datastores) > 1: |
471 | variants = filter(None, datastores.iterkeys()) | 471 | variants = filter(None, datastores.keys()) |
472 | safe_d.setVar("__VARIANTS", " ".join(variants)) | 472 | safe_d.setVar("__VARIANTS", " ".join(variants)) |
473 | 473 | ||
474 | datastores[""] = d | 474 | datastores[""] = d |
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index e45042324e..403f3a541e 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py | |||
@@ -92,9 +92,9 @@ class SQLTable(collections.MutableMapping): | |||
92 | self._execute("DELETE from %s where key=?;" % self.table, [key]) | 92 | self._execute("DELETE from %s where key=?;" % self.table, [key]) |
93 | 93 | ||
94 | def __setitem__(self, key, value): | 94 | def __setitem__(self, key, value): |
95 | if not isinstance(key, basestring): | 95 | if not isinstance(key, str): |
96 | raise TypeError('Only string keys are supported') | 96 | raise TypeError('Only string keys are supported') |
97 | elif not isinstance(value, basestring): | 97 | elif not isinstance(value, str): |
98 | raise TypeError('Only string values are supported') | 98 | raise TypeError('Only string values are supported') |
99 | 99 | ||
100 | data = self._execute("SELECT * from %s where key=?;" % | 100 | data = self._execute("SELECT * from %s where key=?;" % |
@@ -131,14 +131,14 @@ class SQLTable(collections.MutableMapping): | |||
131 | return [row[1] for row in data] | 131 | return [row[1] for row in data] |
132 | 132 | ||
133 | def values(self): | 133 | def values(self): |
134 | return list(self.itervalues()) | 134 | return list(self.values()) |
135 | 135 | ||
136 | def itervalues(self): | 136 | def itervalues(self): |
137 | data = self._execute("SELECT value FROM %s;" % self.table) | 137 | data = self._execute("SELECT value FROM %s;" % self.table) |
138 | return (row[0] for row in data) | 138 | return (row[0] for row in data) |
139 | 139 | ||
140 | def items(self): | 140 | def items(self): |
141 | return list(self.iteritems()) | 141 | return list(self.items()) |
142 | 142 | ||
143 | def iteritems(self): | 143 | def iteritems(self): |
144 | return self._execute("SELECT * FROM %s;" % self.table) | 144 | return self._execute("SELECT * FROM %s;" % self.table) |
@@ -178,7 +178,7 @@ class PersistData(object): | |||
178 | """ | 178 | """ |
179 | Return a list of key + value pairs for a domain | 179 | Return a list of key + value pairs for a domain |
180 | """ | 180 | """ |
181 | return self.data[domain].items() | 181 | return list(self.data[domain].items()) |
182 | 182 | ||
183 | def getValue(self, domain, key): | 183 | def getValue(self, domain, key): |
184 | """ | 184 | """ |
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py index 1c07f2d9b7..c62d7bca4f 100644 --- a/bitbake/lib/bb/process.py +++ b/bitbake/lib/bb/process.py | |||
@@ -17,7 +17,7 @@ class CmdError(RuntimeError): | |||
17 | self.msg = msg | 17 | self.msg = msg |
18 | 18 | ||
19 | def __str__(self): | 19 | def __str__(self): |
20 | if not isinstance(self.command, basestring): | 20 | if not isinstance(self.command, str): |
21 | cmd = subprocess.list2cmdline(self.command) | 21 | cmd = subprocess.list2cmdline(self.command) |
22 | else: | 22 | else: |
23 | cmd = self.command | 23 | cmd = self.command |
@@ -97,6 +97,8 @@ def _logged_communicate(pipe, log, input, extrafiles): | |||
97 | try: | 97 | try: |
98 | while pipe.poll() is None: | 98 | while pipe.poll() is None: |
99 | rlist = rin | 99 | rlist = rin |
100 | stdoutbuf = b"" | ||
101 | stderrbuf = b"" | ||
100 | try: | 102 | try: |
101 | r,w,e = select.select (rlist, [], [], 1) | 103 | r,w,e = select.select (rlist, [], [], 1) |
102 | except OSError as e: | 104 | except OSError as e: |
@@ -104,16 +106,26 @@ def _logged_communicate(pipe, log, input, extrafiles): | |||
104 | raise | 106 | raise |
105 | 107 | ||
106 | if pipe.stdout in r: | 108 | if pipe.stdout in r: |
107 | data = pipe.stdout.read() | 109 | data = stdoutbuf + pipe.stdout.read() |
108 | if data is not None: | 110 | if data is not None and len(data) > 0: |
109 | outdata.append(data) | 111 | try: |
110 | log.write(data) | 112 | data = data.decode("utf-8") |
113 | outdata.append(data) | ||
114 | log.write(data) | ||
115 | stdoutbuf = b"" | ||
116 | except UnicodeDecodeError: | ||
117 | stdoutbuf = data | ||
111 | 118 | ||
112 | if pipe.stderr in r: | 119 | if pipe.stderr in r: |
113 | data = pipe.stderr.read() | 120 | data = stderrbuf + pipe.stderr.read() |
114 | if data is not None: | 121 | if data is not None and len(data) > 0: |
115 | errdata.append(data) | 122 | try: |
116 | log.write(data) | 123 | data = data.decode("utf-8") |
124 | errdata.append(data) | ||
125 | log.write(data) | ||
126 | stderrbuf = b"" | ||
127 | except UnicodeDecodeError: | ||
128 | stderrbuf = data | ||
117 | 129 | ||
118 | readextras(r) | 130 | readextras(r) |
119 | 131 | ||
@@ -135,7 +147,7 @@ def run(cmd, input=None, log=None, extrafiles=None, **options): | |||
135 | if not extrafiles: | 147 | if not extrafiles: |
136 | extrafiles = [] | 148 | extrafiles = [] |
137 | 149 | ||
138 | if isinstance(cmd, basestring) and not "shell" in options: | 150 | if isinstance(cmd, str) and not "shell" in options: |
139 | options["shell"] = True | 151 | options["shell"] = True |
140 | 152 | ||
141 | try: | 153 | try: |
@@ -150,6 +162,10 @@ def run(cmd, input=None, log=None, extrafiles=None, **options): | |||
150 | stdout, stderr = _logged_communicate(pipe, log, input, extrafiles) | 162 | stdout, stderr = _logged_communicate(pipe, log, input, extrafiles) |
151 | else: | 163 | else: |
152 | stdout, stderr = pipe.communicate(input) | 164 | stdout, stderr = pipe.communicate(input) |
165 | if stdout: | ||
166 | stdout = stdout.decode("utf-8") | ||
167 | if stderr: | ||
168 | stderr = stderr.decode("utf-8") | ||
153 | 169 | ||
154 | if pipe.returncode != 0: | 170 | if pipe.returncode != 0: |
155 | raise ExecutionError(cmd, pipe.returncode, stdout, stderr) | 171 | raise ExecutionError(cmd, pipe.returncode, stdout, stderr) |
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py index 563a091fda..80701b2811 100644 --- a/bitbake/lib/bb/providers.py +++ b/bitbake/lib/bb/providers.py | |||
@@ -245,7 +245,7 @@ def _filterProviders(providers, item, cfgData, dataCache): | |||
245 | pkg_pn[pn] = [] | 245 | pkg_pn[pn] = [] |
246 | pkg_pn[pn].append(p) | 246 | pkg_pn[pn].append(p) |
247 | 247 | ||
248 | logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys()) | 248 | logger.debug(1, "providers for %s are: %s", item, list(pkg_pn.keys())) |
249 | 249 | ||
250 | # First add PREFERRED_VERSIONS | 250 | # First add PREFERRED_VERSIONS |
251 | for pn in pkg_pn: | 251 | for pn in pkg_pn: |
diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py index b748e4a4f2..a8814dc330 100644 --- a/bitbake/lib/bb/pysh/builtin.py +++ b/bitbake/lib/bb/pysh/builtin.py | |||
@@ -527,7 +527,7 @@ def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags): | |||
527 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | 527 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') |
528 | 528 | ||
529 | # Scan pattern arguments and append a space if necessary | 529 | # Scan pattern arguments and append a space if necessary |
530 | for i in xrange(len(args)): | 530 | for i in range(len(args)): |
531 | if not RE_SED.search(args[i]): | 531 | if not RE_SED.search(args[i]): |
532 | continue | 532 | continue |
533 | args[i] = args[i] + ' ' | 533 | args[i] = args[i] + ' ' |
diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py index 25d8c92ec4..d14ecf3c6d 100644 --- a/bitbake/lib/bb/pysh/interp.py +++ b/bitbake/lib/bb/pysh/interp.py | |||
@@ -474,7 +474,7 @@ class Environment: | |||
474 | """ | 474 | """ |
475 | # Save and remove previous arguments | 475 | # Save and remove previous arguments |
476 | prevargs = [] | 476 | prevargs = [] |
477 | for i in xrange(int(self._env['#'])): | 477 | for i in range(int(self._env['#'])): |
478 | i = str(i+1) | 478 | i = str(i+1) |
479 | prevargs.append(self._env[i]) | 479 | prevargs.append(self._env[i]) |
480 | del self._env[i] | 480 | del self._env[i] |
@@ -488,7 +488,7 @@ class Environment: | |||
488 | return prevargs | 488 | return prevargs |
489 | 489 | ||
490 | def get_positional_args(self): | 490 | def get_positional_args(self): |
491 | return [self._env[str(i+1)] for i in xrange(int(self._env['#']))] | 491 | return [self._env[str(i+1)] for i in range(int(self._env['#']))] |
492 | 492 | ||
493 | def get_variables(self): | 493 | def get_variables(self): |
494 | return dict(self._env) | 494 | return dict(self._env) |
diff --git a/bitbake/lib/bb/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py index b30123675c..fbf094b7a9 100644 --- a/bitbake/lib/bb/pysh/pyshlex.py +++ b/bitbake/lib/bb/pysh/pyshlex.py | |||
@@ -20,7 +20,7 @@ except NameError: | |||
20 | from Set import Set as set | 20 | from Set import Set as set |
21 | 21 | ||
22 | from ply import lex | 22 | from ply import lex |
23 | from sherrors import * | 23 | from bb.pysh.sherrors import * |
24 | 24 | ||
25 | class NeedMore(Exception): | 25 | class NeedMore(Exception): |
26 | pass | 26 | pass |
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py index e8e80aac45..ba4cefdcb8 100644 --- a/bitbake/lib/bb/pysh/pyshyacc.py +++ b/bitbake/lib/bb/pysh/pyshyacc.py | |||
@@ -10,11 +10,11 @@ | |||
10 | import os.path | 10 | import os.path |
11 | import sys | 11 | import sys |
12 | 12 | ||
13 | import pyshlex | 13 | import bb.pysh.pyshlex as pyshlex |
14 | tokens = pyshlex.tokens | 14 | tokens = pyshlex.tokens |
15 | 15 | ||
16 | from ply import yacc | 16 | from ply import yacc |
17 | import sherrors | 17 | import bb.pysh.sherrors as sherrors |
18 | 18 | ||
19 | class IORedirect: | 19 | class IORedirect: |
20 | def __init__(self, op, filename, io_number=None): | 20 | def __init__(self, op, filename, io_number=None): |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 5ab7e97088..376c9f51e8 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -35,11 +35,7 @@ import bb | |||
35 | from bb import msg, data, event | 35 | from bb import msg, data, event |
36 | from bb import monitordisk | 36 | from bb import monitordisk |
37 | import subprocess | 37 | import subprocess |
38 | 38 | import pickle | |
39 | try: | ||
40 | import cPickle as pickle | ||
41 | except ImportError: | ||
42 | import pickle | ||
43 | 39 | ||
44 | bblogger = logging.getLogger("BitBake") | 40 | bblogger = logging.getLogger("BitBake") |
45 | logger = logging.getLogger("BitBake.RunQueue") | 41 | logger = logging.getLogger("BitBake.RunQueue") |
@@ -108,7 +104,7 @@ class RunQueueScheduler(object): | |||
108 | 104 | ||
109 | self.buildable = [] | 105 | self.buildable = [] |
110 | self.stamps = {} | 106 | self.stamps = {} |
111 | for taskid in xrange(self.numTasks): | 107 | for taskid in range(self.numTasks): |
112 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]] | 108 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]] |
113 | taskname = self.rqdata.runq_task[taskid] | 109 | taskname = self.rqdata.runq_task[taskid] |
114 | self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn) | 110 | self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn) |
@@ -127,12 +123,12 @@ class RunQueueScheduler(object): | |||
127 | if len(self.buildable) == 1: | 123 | if len(self.buildable) == 1: |
128 | taskid = self.buildable[0] | 124 | taskid = self.buildable[0] |
129 | stamp = self.stamps[taskid] | 125 | stamp = self.stamps[taskid] |
130 | if stamp not in self.rq.build_stamps.itervalues(): | 126 | if stamp not in self.rq.build_stamps.values(): |
131 | return taskid | 127 | return taskid |
132 | 128 | ||
133 | if not self.rev_prio_map: | 129 | if not self.rev_prio_map: |
134 | self.rev_prio_map = range(self.numTasks) | 130 | self.rev_prio_map = list(range(self.numTasks)) |
135 | for taskid in xrange(self.numTasks): | 131 | for taskid in range(self.numTasks): |
136 | self.rev_prio_map[self.prio_map[taskid]] = taskid | 132 | self.rev_prio_map[self.prio_map[taskid]] = taskid |
137 | 133 | ||
138 | best = None | 134 | best = None |
@@ -141,7 +137,7 @@ class RunQueueScheduler(object): | |||
141 | prio = self.rev_prio_map[taskid] | 137 | prio = self.rev_prio_map[taskid] |
142 | if bestprio is None or bestprio > prio: | 138 | if bestprio is None or bestprio > prio: |
143 | stamp = self.stamps[taskid] | 139 | stamp = self.stamps[taskid] |
144 | if stamp in self.rq.build_stamps.itervalues(): | 140 | if stamp in self.rq.build_stamps.values(): |
145 | continue | 141 | continue |
146 | bestprio = prio | 142 | bestprio = prio |
147 | best = taskid | 143 | best = taskid |
@@ -269,7 +265,7 @@ class RunQueueData: | |||
269 | 265 | ||
270 | 266 | ||
271 | def get_task_id(self, fnid, taskname): | 267 | def get_task_id(self, fnid, taskname): |
272 | for listid in xrange(len(self.runq_fnid)): | 268 | for listid in range(len(self.runq_fnid)): |
273 | if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname: | 269 | if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname: |
274 | return listid | 270 | return listid |
275 | return None | 271 | return None |
@@ -291,7 +287,7 @@ class RunQueueData: | |||
291 | """ | 287 | """ |
292 | lowest = 0 | 288 | lowest = 0 |
293 | new_chain = [] | 289 | new_chain = [] |
294 | for entry in xrange(len(chain)): | 290 | for entry in range(len(chain)): |
295 | if chain[entry] < chain[lowest]: | 291 | if chain[entry] < chain[lowest]: |
296 | lowest = entry | 292 | lowest = entry |
297 | new_chain.extend(chain[lowest:]) | 293 | new_chain.extend(chain[lowest:]) |
@@ -304,7 +300,7 @@ class RunQueueData: | |||
304 | """ | 300 | """ |
305 | if len(chain1) != len(chain2): | 301 | if len(chain1) != len(chain2): |
306 | return False | 302 | return False |
307 | for index in xrange(len(chain1)): | 303 | for index in range(len(chain1)): |
308 | if chain1[index] != chain2[index]: | 304 | if chain1[index] != chain2[index]: |
309 | return False | 305 | return False |
310 | return True | 306 | return True |
@@ -375,7 +371,7 @@ class RunQueueData: | |||
375 | deps_left = [] | 371 | deps_left = [] |
376 | task_done = [] | 372 | task_done = [] |
377 | 373 | ||
378 | for listid in xrange(numTasks): | 374 | for listid in range(numTasks): |
379 | task_done.append(False) | 375 | task_done.append(False) |
380 | weight.append(1) | 376 | weight.append(1) |
381 | deps_left.append(len(self.runq_revdeps[listid])) | 377 | deps_left.append(len(self.runq_revdeps[listid])) |
@@ -399,7 +395,7 @@ class RunQueueData: | |||
399 | 395 | ||
400 | # Circular dependency sanity check | 396 | # Circular dependency sanity check |
401 | problem_tasks = [] | 397 | problem_tasks = [] |
402 | for task in xrange(numTasks): | 398 | for task in range(numTasks): |
403 | if task_done[task] is False or deps_left[task] != 0: | 399 | if task_done[task] is False or deps_left[task] != 0: |
404 | problem_tasks.append(task) | 400 | problem_tasks.append(task) |
405 | logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task)) | 401 | logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task)) |
@@ -482,7 +478,7 @@ class RunQueueData: | |||
482 | if taskid is not None: | 478 | if taskid is not None: |
483 | depends.add(taskid) | 479 | depends.add(taskid) |
484 | 480 | ||
485 | for task in xrange(len(taskData.tasks_name)): | 481 | for task in range(len(taskData.tasks_name)): |
486 | depends = set() | 482 | depends = set() |
487 | fnid = taskData.tasks_fnid[task] | 483 | fnid = taskData.tasks_fnid[task] |
488 | fn = taskData.fn_index[fnid] | 484 | fn = taskData.fn_index[fnid] |
@@ -597,7 +593,7 @@ class RunQueueData: | |||
597 | for task in recursivetasks: | 593 | for task in recursivetasks: |
598 | extradeps[task].difference_update(recursivetasksselfref) | 594 | extradeps[task].difference_update(recursivetasksselfref) |
599 | 595 | ||
600 | for task in xrange(len(taskData.tasks_name)): | 596 | for task in range(len(taskData.tasks_name)): |
601 | # Add in extra dependencies | 597 | # Add in extra dependencies |
602 | if task in extradeps: | 598 | if task in extradeps: |
603 | self.runq_depends[task] = extradeps[task] | 599 | self.runq_depends[task] = extradeps[task] |
@@ -675,7 +671,7 @@ class RunQueueData: | |||
675 | 671 | ||
676 | maps = [] | 672 | maps = [] |
677 | delcount = 0 | 673 | delcount = 0 |
678 | for listid in xrange(len(self.runq_fnid)): | 674 | for listid in range(len(self.runq_fnid)): |
679 | if runq_build[listid-delcount] == 1: | 675 | if runq_build[listid-delcount] == 1: |
680 | maps.append(listid-delcount) | 676 | maps.append(listid-delcount) |
681 | else: | 677 | else: |
@@ -703,7 +699,7 @@ class RunQueueData: | |||
703 | 699 | ||
704 | # Remap the dependencies to account for the deleted tasks | 700 | # Remap the dependencies to account for the deleted tasks |
705 | # Check we didn't delete a task we depend on | 701 | # Check we didn't delete a task we depend on |
706 | for listid in xrange(len(self.runq_fnid)): | 702 | for listid in range(len(self.runq_fnid)): |
707 | newdeps = [] | 703 | newdeps = [] |
708 | origdeps = self.runq_depends[listid] | 704 | origdeps = self.runq_depends[listid] |
709 | for origdep in origdeps: | 705 | for origdep in origdeps: |
@@ -715,14 +711,14 @@ class RunQueueData: | |||
715 | logger.verbose("Assign Weightings") | 711 | logger.verbose("Assign Weightings") |
716 | 712 | ||
717 | # Generate a list of reverse dependencies to ease future calculations | 713 | # Generate a list of reverse dependencies to ease future calculations |
718 | for listid in xrange(len(self.runq_fnid)): | 714 | for listid in range(len(self.runq_fnid)): |
719 | for dep in self.runq_depends[listid]: | 715 | for dep in self.runq_depends[listid]: |
720 | self.runq_revdeps[dep].add(listid) | 716 | self.runq_revdeps[dep].add(listid) |
721 | 717 | ||
722 | # Identify tasks at the end of dependency chains | 718 | # Identify tasks at the end of dependency chains |
723 | # Error on circular dependency loops (length two) | 719 | # Error on circular dependency loops (length two) |
724 | endpoints = [] | 720 | endpoints = [] |
725 | for listid in xrange(len(self.runq_fnid)): | 721 | for listid in range(len(self.runq_fnid)): |
726 | revdeps = self.runq_revdeps[listid] | 722 | revdeps = self.runq_revdeps[listid] |
727 | if len(revdeps) == 0: | 723 | if len(revdeps) == 0: |
728 | endpoints.append(listid) | 724 | endpoints.append(listid) |
@@ -740,7 +736,7 @@ class RunQueueData: | |||
740 | # Sanity Check - Check for multiple tasks building the same provider | 736 | # Sanity Check - Check for multiple tasks building the same provider |
741 | prov_list = {} | 737 | prov_list = {} |
742 | seen_fn = [] | 738 | seen_fn = [] |
743 | for task in xrange(len(self.runq_fnid)): | 739 | for task in range(len(self.runq_fnid)): |
744 | fn = taskData.fn_index[self.runq_fnid[task]] | 740 | fn = taskData.fn_index[self.runq_fnid[task]] |
745 | if fn in seen_fn: | 741 | if fn in seen_fn: |
746 | continue | 742 | continue |
@@ -905,7 +901,7 @@ class RunQueueData: | |||
905 | Dump some debug information on the internal data structures | 901 | Dump some debug information on the internal data structures |
906 | """ | 902 | """ |
907 | logger.debug(3, "run_tasks:") | 903 | logger.debug(3, "run_tasks:") |
908 | for task in xrange(len(self.rqdata.runq_task)): | 904 | for task in range(len(self.rqdata.runq_task)): |
909 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, | 905 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, |
910 | taskQueue.fn_index[self.rqdata.runq_fnid[task]], | 906 | taskQueue.fn_index[self.rqdata.runq_fnid[task]], |
911 | self.rqdata.runq_task[task], | 907 | self.rqdata.runq_task[task], |
@@ -914,7 +910,7 @@ class RunQueueData: | |||
914 | self.rqdata.runq_revdeps[task]) | 910 | self.rqdata.runq_revdeps[task]) |
915 | 911 | ||
916 | logger.debug(3, "sorted_tasks:") | 912 | logger.debug(3, "sorted_tasks:") |
917 | for task1 in xrange(len(self.rqdata.runq_task)): | 913 | for task1 in range(len(self.rqdata.runq_task)): |
918 | if task1 in self.prio_map: | 914 | if task1 in self.prio_map: |
919 | task = self.prio_map[task1] | 915 | task = self.prio_map[task1] |
920 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, | 916 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, |
@@ -982,8 +978,8 @@ class RunQueue: | |||
982 | "time" : self.cfgData.getVar("TIME", True), | 978 | "time" : self.cfgData.getVar("TIME", True), |
983 | } | 979 | } |
984 | 980 | ||
985 | worker.stdin.write("<cookerconfig>" + pickle.dumps(self.cooker.configuration) + "</cookerconfig>") | 981 | worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") |
986 | worker.stdin.write("<workerdata>" + pickle.dumps(workerdata) + "</workerdata>") | 982 | worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>") |
987 | worker.stdin.flush() | 983 | worker.stdin.flush() |
988 | 984 | ||
989 | return worker, workerpipe | 985 | return worker, workerpipe |
@@ -993,8 +989,9 @@ class RunQueue: | |||
993 | return | 989 | return |
994 | logger.debug(1, "Teardown for bitbake-worker") | 990 | logger.debug(1, "Teardown for bitbake-worker") |
995 | try: | 991 | try: |
996 | worker.stdin.write("<quit></quit>") | 992 | worker.stdin.write(b"<quit></quit>") |
997 | worker.stdin.flush() | 993 | worker.stdin.flush() |
994 | worker.stdin.close() | ||
998 | except IOError: | 995 | except IOError: |
999 | pass | 996 | pass |
1000 | while worker.returncode is None: | 997 | while worker.returncode is None: |
@@ -1245,7 +1242,7 @@ class RunQueue: | |||
1245 | stamppresent = [] | 1242 | stamppresent = [] |
1246 | valid_new = set() | 1243 | valid_new = set() |
1247 | 1244 | ||
1248 | for task in xrange(len(self.rqdata.runq_fnid)): | 1245 | for task in range(len(self.rqdata.runq_fnid)): |
1249 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] | 1246 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] |
1250 | taskname = self.rqdata.runq_task[task] | 1247 | taskname = self.rqdata.runq_task[task] |
1251 | taskdep = self.rqdata.dataCache.task_deps[fn] | 1248 | taskdep = self.rqdata.dataCache.task_deps[fn] |
@@ -1287,7 +1284,7 @@ class RunQueue: | |||
1287 | valid_new.add(dep) | 1284 | valid_new.add(dep) |
1288 | 1285 | ||
1289 | invalidtasks = set() | 1286 | invalidtasks = set() |
1290 | for task in xrange(len(self.rqdata.runq_fnid)): | 1287 | for task in range(len(self.rqdata.runq_fnid)): |
1291 | if task not in valid_new and task not in noexec: | 1288 | if task not in valid_new and task not in noexec: |
1292 | invalidtasks.add(task) | 1289 | invalidtasks.add(task) |
1293 | 1290 | ||
@@ -1346,7 +1343,7 @@ class RunQueue: | |||
1346 | match = m | 1343 | match = m |
1347 | if match is None: | 1344 | if match is None: |
1348 | bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h) | 1345 | bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h) |
1349 | matches = {k : v for k, v in matches.iteritems() if h not in k} | 1346 | matches = {k : v for k, v in iter(matches.items()) if h not in k} |
1350 | if matches: | 1347 | if matches: |
1351 | latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1] | 1348 | latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1] |
1352 | prevh = __find_md5__.search(latestmatch).group(0) | 1349 | prevh = __find_md5__.search(latestmatch).group(0) |
@@ -1395,17 +1392,15 @@ class RunQueueExecute: | |||
1395 | return True | 1392 | return True |
1396 | 1393 | ||
1397 | def finish_now(self): | 1394 | def finish_now(self): |
1398 | |||
1399 | for worker in [self.rq.worker, self.rq.fakeworker]: | 1395 | for worker in [self.rq.worker, self.rq.fakeworker]: |
1400 | if not worker: | 1396 | if not worker: |
1401 | continue | 1397 | continue |
1402 | try: | 1398 | try: |
1403 | worker.stdin.write("<finishnow></finishnow>") | 1399 | worker.stdin.write(b"<finishnow></finishnow>") |
1404 | worker.stdin.flush() | 1400 | worker.stdin.flush() |
1405 | except IOError: | 1401 | except IOError: |
1406 | # worker must have died? | 1402 | # worker must have died? |
1407 | pass | 1403 | pass |
1408 | |||
1409 | if len(self.failed_fnids) != 0: | 1404 | if len(self.failed_fnids) != 0: |
1410 | self.rq.state = runQueueFailed | 1405 | self.rq.state = runQueueFailed |
1411 | return | 1406 | return |
@@ -1468,7 +1463,7 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
1468 | initial_covered = self.rq.scenequeue_covered.copy() | 1463 | initial_covered = self.rq.scenequeue_covered.copy() |
1469 | 1464 | ||
1470 | # Mark initial buildable tasks | 1465 | # Mark initial buildable tasks |
1471 | for task in xrange(self.stats.total): | 1466 | for task in range(self.stats.total): |
1472 | self.runq_running.append(0) | 1467 | self.runq_running.append(0) |
1473 | self.runq_complete.append(0) | 1468 | self.runq_complete.append(0) |
1474 | if len(self.rqdata.runq_depends[task]) == 0: | 1469 | if len(self.rqdata.runq_depends[task]) == 0: |
@@ -1481,7 +1476,7 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
1481 | found = True | 1476 | found = True |
1482 | while found: | 1477 | while found: |
1483 | found = False | 1478 | found = False |
1484 | for task in xrange(self.stats.total): | 1479 | for task in range(self.stats.total): |
1485 | if task in self.rq.scenequeue_covered: | 1480 | if task in self.rq.scenequeue_covered: |
1486 | continue | 1481 | continue |
1487 | logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task]))) | 1482 | logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task]))) |
@@ -1496,7 +1491,7 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
1496 | covered_remove = set() | 1491 | covered_remove = set() |
1497 | if self.rq.setsceneverify: | 1492 | if self.rq.setsceneverify: |
1498 | invalidtasks = [] | 1493 | invalidtasks = [] |
1499 | for task in xrange(len(self.rqdata.runq_task)): | 1494 | for task in range(len(self.rqdata.runq_task)): |
1500 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] | 1495 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] |
1501 | taskname = self.rqdata.runq_task[task] | 1496 | taskname = self.rqdata.runq_task[task] |
1502 | taskdep = self.rqdata.dataCache.task_deps[fn] | 1497 | taskdep = self.rqdata.dataCache.task_deps[fn] |
@@ -1684,10 +1679,10 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
1684 | logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc))) | 1679 | logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc))) |
1685 | self.rq.state = runQueueFailed | 1680 | self.rq.state = runQueueFailed |
1686 | return True | 1681 | return True |
1687 | self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>") | 1682 | self.rq.fakeworker.stdin.write(b"<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"</runtask>") |
1688 | self.rq.fakeworker.stdin.flush() | 1683 | self.rq.fakeworker.stdin.flush() |
1689 | else: | 1684 | else: |
1690 | self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>") | 1685 | self.rq.worker.stdin.write(b"<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"</runtask>") |
1691 | self.rq.worker.stdin.flush() | 1686 | self.rq.worker.stdin.flush() |
1692 | 1687 | ||
1693 | self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn) | 1688 | self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn) |
@@ -1706,7 +1701,7 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
1706 | return True | 1701 | return True |
1707 | 1702 | ||
1708 | # Sanity Checks | 1703 | # Sanity Checks |
1709 | for task in xrange(self.stats.total): | 1704 | for task in range(self.stats.total): |
1710 | if self.runq_buildable[task] == 0: | 1705 | if self.runq_buildable[task] == 0: |
1711 | logger.error("Task %s never buildable!", task) | 1706 | logger.error("Task %s never buildable!", task) |
1712 | if self.runq_running[task] == 0: | 1707 | if self.runq_running[task] == 0: |
@@ -1764,14 +1759,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1764 | # therefore aims to collapse the huge runqueue dependency tree into a smaller one | 1759 | # therefore aims to collapse the huge runqueue dependency tree into a smaller one |
1765 | # only containing the setscene functions. | 1760 | # only containing the setscene functions. |
1766 | 1761 | ||
1767 | for task in xrange(self.stats.total): | 1762 | for task in range(self.stats.total): |
1768 | self.runq_running.append(0) | 1763 | self.runq_running.append(0) |
1769 | self.runq_complete.append(0) | 1764 | self.runq_complete.append(0) |
1770 | self.runq_buildable.append(0) | 1765 | self.runq_buildable.append(0) |
1771 | 1766 | ||
1772 | # First process the chains up to the first setscene task. | 1767 | # First process the chains up to the first setscene task. |
1773 | endpoints = {} | 1768 | endpoints = {} |
1774 | for task in xrange(len(self.rqdata.runq_fnid)): | 1769 | for task in range(len(self.rqdata.runq_fnid)): |
1775 | sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task])) | 1770 | sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task])) |
1776 | sq_revdeps_new.append(set()) | 1771 | sq_revdeps_new.append(set()) |
1777 | if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene: | 1772 | if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene: |
@@ -1833,7 +1828,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1833 | newendpoints[dep] = tasks | 1828 | newendpoints[dep] = tasks |
1834 | if len(newendpoints) != 0: | 1829 | if len(newendpoints) != 0: |
1835 | process_endpoints2(newendpoints) | 1830 | process_endpoints2(newendpoints) |
1836 | for task in xrange(len(self.rqdata.runq_fnid)): | 1831 | for task in range(len(self.rqdata.runq_fnid)): |
1837 | sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task])) | 1832 | sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task])) |
1838 | sq_revdeps_new2.append(set()) | 1833 | sq_revdeps_new2.append(set()) |
1839 | if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene: | 1834 | if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene: |
@@ -1844,7 +1839,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1844 | if sq_revdeps_new2[task]: | 1839 | if sq_revdeps_new2[task]: |
1845 | self.unskippable.append(self.rqdata.runq_setscene.index(task)) | 1840 | self.unskippable.append(self.rqdata.runq_setscene.index(task)) |
1846 | 1841 | ||
1847 | for task in xrange(len(self.rqdata.runq_fnid)): | 1842 | for task in range(len(self.rqdata.runq_fnid)): |
1848 | if task in self.rqdata.runq_setscene: | 1843 | if task in self.rqdata.runq_setscene: |
1849 | deps = set() | 1844 | deps = set() |
1850 | for dep in sq_revdeps_new[task]: | 1845 | for dep in sq_revdeps_new[task]: |
@@ -1883,7 +1878,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1883 | for dep in self.sq_harddeps[task]: | 1878 | for dep in self.sq_harddeps[task]: |
1884 | sq_revdeps_squash[dep].add(task) | 1879 | sq_revdeps_squash[dep].add(task) |
1885 | 1880 | ||
1886 | #for task in xrange(len(sq_revdeps_squash)): | 1881 | #for task in range(len(sq_revdeps_squash)): |
1887 | # realtask = self.rqdata.runq_setscene[task] | 1882 | # realtask = self.rqdata.runq_setscene[task] |
1888 | # bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task])) | 1883 | # bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task])) |
1889 | 1884 | ||
@@ -1891,13 +1886,13 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1891 | self.sq_revdeps = sq_revdeps_squash | 1886 | self.sq_revdeps = sq_revdeps_squash |
1892 | self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps) | 1887 | self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps) |
1893 | 1888 | ||
1894 | for task in xrange(len(self.sq_revdeps)): | 1889 | for task in range(len(self.sq_revdeps)): |
1895 | self.sq_deps.append(set()) | 1890 | self.sq_deps.append(set()) |
1896 | for task in xrange(len(self.sq_revdeps)): | 1891 | for task in range(len(self.sq_revdeps)): |
1897 | for dep in self.sq_revdeps[task]: | 1892 | for dep in self.sq_revdeps[task]: |
1898 | self.sq_deps[dep].add(task) | 1893 | self.sq_deps[dep].add(task) |
1899 | 1894 | ||
1900 | for task in xrange(len(self.sq_revdeps)): | 1895 | for task in range(len(self.sq_revdeps)): |
1901 | if len(self.sq_revdeps[task]) == 0: | 1896 | if len(self.sq_revdeps[task]) == 0: |
1902 | self.runq_buildable[task] = 1 | 1897 | self.runq_buildable[task] = 1 |
1903 | 1898 | ||
@@ -1910,7 +1905,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1910 | sq_task = [] | 1905 | sq_task = [] |
1911 | noexec = [] | 1906 | noexec = [] |
1912 | stamppresent = [] | 1907 | stamppresent = [] |
1913 | for task in xrange(len(self.sq_revdeps)): | 1908 | for task in range(len(self.sq_revdeps)): |
1914 | realtask = self.rqdata.runq_setscene[task] | 1909 | realtask = self.rqdata.runq_setscene[task] |
1915 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]] | 1910 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]] |
1916 | taskname = self.rqdata.runq_task[realtask] | 1911 | taskname = self.rqdata.runq_task[realtask] |
@@ -1947,7 +1942,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
1947 | for v in valid: | 1942 | for v in valid: |
1948 | valid_new.append(sq_task[v]) | 1943 | valid_new.append(sq_task[v]) |
1949 | 1944 | ||
1950 | for task in xrange(len(self.sq_revdeps)): | 1945 | for task in range(len(self.sq_revdeps)): |
1951 | if task not in valid_new and task not in noexec: | 1946 | if task not in valid_new and task not in noexec: |
1952 | realtask = self.rqdata.runq_setscene[task] | 1947 | realtask = self.rqdata.runq_setscene[task] |
1953 | logger.debug(2, 'No package found, so skipping setscene task %s', | 1948 | logger.debug(2, 'No package found, so skipping setscene task %s', |
@@ -2024,7 +2019,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
2024 | task = None | 2019 | task = None |
2025 | if self.stats.active < self.number_tasks: | 2020 | if self.stats.active < self.number_tasks: |
2026 | # Find the next setscene to run | 2021 | # Find the next setscene to run |
2027 | for nexttask in xrange(self.stats.total): | 2022 | for nexttask in range(self.stats.total): |
2028 | if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1: | 2023 | if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1: |
2029 | if nexttask in self.unskippable: | 2024 | if nexttask in self.unskippable: |
2030 | logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask])) | 2025 | logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask])) |
@@ -2076,10 +2071,10 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
2076 | if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: | 2071 | if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: |
2077 | if not self.rq.fakeworker: | 2072 | if not self.rq.fakeworker: |
2078 | self.rq.start_fakeworker(self) | 2073 | self.rq.start_fakeworker(self) |
2079 | self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>") | 2074 | self.rq.fakeworker.stdin.write(b"<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + b"</runtask>") |
2080 | self.rq.fakeworker.stdin.flush() | 2075 | self.rq.fakeworker.stdin.flush() |
2081 | else: | 2076 | else: |
2082 | self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>") | 2077 | self.rq.worker.stdin.write(b"<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + b"</runtask>") |
2083 | self.rq.worker.stdin.flush() | 2078 | self.rq.worker.stdin.flush() |
2084 | 2079 | ||
2085 | self.runq_running[task] = 1 | 2080 | self.runq_running[task] = 1 |
@@ -2091,7 +2086,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
2091 | self.rq.read_workers() | 2086 | self.rq.read_workers() |
2092 | return self.rq.active_fds() | 2087 | return self.rq.active_fds() |
2093 | 2088 | ||
2094 | #for task in xrange(self.stats.total): | 2089 | #for task in range(self.stats.total): |
2095 | # if self.runq_running[task] != 1: | 2090 | # if self.runq_running[task] != 1: |
2096 | # buildable = self.runq_buildable[task] | 2091 | # buildable = self.runq_buildable[task] |
2097 | # revdeps = self.sq_revdeps[task] | 2092 | # revdeps = self.sq_revdeps[task] |
@@ -2227,7 +2222,7 @@ class runQueuePipe(): | |||
2227 | if pipeout: | 2222 | if pipeout: |
2228 | pipeout.close() | 2223 | pipeout.close() |
2229 | bb.utils.nonblockingfd(self.input) | 2224 | bb.utils.nonblockingfd(self.input) |
2230 | self.queue = "" | 2225 | self.queue = b"" |
2231 | self.d = d | 2226 | self.d = d |
2232 | self.rq = rq | 2227 | self.rq = rq |
2233 | self.rqexec = rqexec | 2228 | self.rqexec = rqexec |
@@ -2251,7 +2246,7 @@ class runQueuePipe(): | |||
2251 | 2246 | ||
2252 | start = len(self.queue) | 2247 | start = len(self.queue) |
2253 | try: | 2248 | try: |
2254 | self.queue = self.queue + self.input.read(102400) | 2249 | self.queue = self.queue + (self.input.read(102400) or b"") |
2255 | except (OSError, IOError) as e: | 2250 | except (OSError, IOError) as e: |
2256 | if e.errno != errno.EAGAIN: | 2251 | if e.errno != errno.EAGAIN: |
2257 | raise | 2252 | raise |
@@ -2259,8 +2254,8 @@ class runQueuePipe(): | |||
2259 | found = True | 2254 | found = True |
2260 | while found and len(self.queue): | 2255 | while found and len(self.queue): |
2261 | found = False | 2256 | found = False |
2262 | index = self.queue.find("</event>") | 2257 | index = self.queue.find(b"</event>") |
2263 | while index != -1 and self.queue.startswith("<event>"): | 2258 | while index != -1 and self.queue.startswith(b"<event>"): |
2264 | try: | 2259 | try: |
2265 | event = pickle.loads(self.queue[7:index]) | 2260 | event = pickle.loads(self.queue[7:index]) |
2266 | except ValueError as e: | 2261 | except ValueError as e: |
@@ -2268,9 +2263,9 @@ class runQueuePipe(): | |||
2268 | bb.event.fire_from_worker(event, self.d) | 2263 | bb.event.fire_from_worker(event, self.d) |
2269 | found = True | 2264 | found = True |
2270 | self.queue = self.queue[index+8:] | 2265 | self.queue = self.queue[index+8:] |
2271 | index = self.queue.find("</event>") | 2266 | index = self.queue.find(b"</event>") |
2272 | index = self.queue.find("</exitcode>") | 2267 | index = self.queue.find(b"</exitcode>") |
2273 | while index != -1 and self.queue.startswith("<exitcode>"): | 2268 | while index != -1 and self.queue.startswith(b"<exitcode>"): |
2274 | try: | 2269 | try: |
2275 | task, status = pickle.loads(self.queue[10:index]) | 2270 | task, status = pickle.loads(self.queue[10:index]) |
2276 | except ValueError as e: | 2271 | except ValueError as e: |
@@ -2278,7 +2273,7 @@ class runQueuePipe(): | |||
2278 | self.rqexec.runqueue_process_waitpid(task, status) | 2273 | self.rqexec.runqueue_process_waitpid(task, status) |
2279 | found = True | 2274 | found = True |
2280 | self.queue = self.queue[index+11:] | 2275 | self.queue = self.queue[index+11:] |
2281 | index = self.queue.find("</exitcode>") | 2276 | index = self.queue.find(b"</exitcode>") |
2282 | return (end > start) | 2277 | return (end > start) |
2283 | 2278 | ||
2284 | def close(self): | 2279 | def close(self): |
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py index cc58c720a2..982fcf71c3 100644 --- a/bitbake/lib/bb/server/process.py +++ b/bitbake/lib/bb/server/process.py | |||
@@ -30,7 +30,7 @@ import signal | |||
30 | import sys | 30 | import sys |
31 | import time | 31 | import time |
32 | import select | 32 | import select |
33 | from Queue import Empty | 33 | from queue import Empty |
34 | from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager | 34 | from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager |
35 | 35 | ||
36 | from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer | 36 | from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer |
@@ -137,7 +137,7 @@ class ProcessServer(Process, BaseImplServer): | |||
137 | if not fds: | 137 | if not fds: |
138 | fds = [] | 138 | fds = [] |
139 | 139 | ||
140 | for function, data in self._idlefuns.items(): | 140 | for function, data in list(self._idlefuns.items()): |
141 | try: | 141 | try: |
142 | retval = function(self, data, False) | 142 | retval = function(self, data, False) |
143 | if retval is False: | 143 | if retval is False: |
@@ -145,7 +145,7 @@ class ProcessServer(Process, BaseImplServer): | |||
145 | nextsleep = None | 145 | nextsleep = None |
146 | elif retval is True: | 146 | elif retval is True: |
147 | nextsleep = None | 147 | nextsleep = None |
148 | elif isinstance(retval, float): | 148 | elif isinstance(retval, float) and nextsleep: |
149 | if (retval < nextsleep): | 149 | if (retval < nextsleep): |
150 | nextsleep = retval | 150 | nextsleep = retval |
151 | elif nextsleep is None: | 151 | elif nextsleep is None: |
@@ -213,7 +213,7 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection): | |||
213 | # Wrap Queue to provide API which isn't server implementation specific | 213 | # Wrap Queue to provide API which isn't server implementation specific |
214 | class ProcessEventQueue(multiprocessing.queues.Queue): | 214 | class ProcessEventQueue(multiprocessing.queues.Queue): |
215 | def __init__(self, maxsize): | 215 | def __init__(self, maxsize): |
216 | multiprocessing.queues.Queue.__init__(self, maxsize) | 216 | multiprocessing.queues.Queue.__init__(self, maxsize, ctx=multiprocessing.get_context()) |
217 | self.exit = False | 217 | self.exit = False |
218 | bb.utils.set_process_name("ProcessEQueue") | 218 | bb.utils.set_process_name("ProcessEQueue") |
219 | 219 | ||
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py index ace1cf646b..146ca17b51 100644 --- a/bitbake/lib/bb/server/xmlrpc.py +++ b/bitbake/lib/bb/server/xmlrpc.py | |||
@@ -31,31 +31,33 @@ | |||
31 | in the server's main loop. | 31 | in the server's main loop. |
32 | """ | 32 | """ |
33 | 33 | ||
34 | import os | ||
35 | import sys | ||
36 | |||
37 | import hashlib | ||
38 | import time | ||
39 | import socket | ||
40 | import signal | ||
41 | import threading | ||
42 | import pickle | ||
43 | import inspect | ||
44 | import select | ||
45 | import http.client | ||
46 | import xmlrpc.client | ||
47 | from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | ||
48 | |||
34 | import bb | 49 | import bb |
35 | import xmlrpclib, sys | ||
36 | from bb import daemonize | 50 | from bb import daemonize |
37 | from bb.ui import uievent | 51 | from bb.ui import uievent |
38 | import hashlib, time | 52 | from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer |
39 | import socket | ||
40 | import os, signal | ||
41 | import threading | ||
42 | try: | ||
43 | import cPickle as pickle | ||
44 | except ImportError: | ||
45 | import pickle | ||
46 | 53 | ||
47 | DEBUG = False | 54 | DEBUG = False |
48 | 55 | ||
49 | from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | 56 | class BBTransport(xmlrpc.client.Transport): |
50 | import inspect, select, httplib | ||
51 | |||
52 | from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer | ||
53 | |||
54 | class BBTransport(xmlrpclib.Transport): | ||
55 | def __init__(self, timeout): | 57 | def __init__(self, timeout): |
56 | self.timeout = timeout | 58 | self.timeout = timeout |
57 | self.connection_token = None | 59 | self.connection_token = None |
58 | xmlrpclib.Transport.__init__(self) | 60 | xmlrpc.client.Transport.__init__(self) |
59 | 61 | ||
60 | # Modified from default to pass timeout to HTTPConnection | 62 | # Modified from default to pass timeout to HTTPConnection |
61 | def make_connection(self, host): | 63 | def make_connection(self, host): |
@@ -67,7 +69,7 @@ class BBTransport(xmlrpclib.Transport): | |||
67 | # create a HTTP connection object from a host descriptor | 69 | # create a HTTP connection object from a host descriptor |
68 | chost, self._extra_headers, x509 = self.get_host_info(host) | 70 | chost, self._extra_headers, x509 = self.get_host_info(host) |
69 | #store the host argument along with the connection object | 71 | #store the host argument along with the connection object |
70 | self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout) | 72 | self._connection = host, http.client.HTTPConnection(chost, timeout=self.timeout) |
71 | return self._connection[1] | 73 | return self._connection[1] |
72 | 74 | ||
73 | def set_connection_token(self, token): | 75 | def set_connection_token(self, token): |
@@ -76,11 +78,11 @@ class BBTransport(xmlrpclib.Transport): | |||
76 | def send_content(self, h, body): | 78 | def send_content(self, h, body): |
77 | if self.connection_token: | 79 | if self.connection_token: |
78 | h.putheader("Bitbake-token", self.connection_token) | 80 | h.putheader("Bitbake-token", self.connection_token) |
79 | xmlrpclib.Transport.send_content(self, h, body) | 81 | xmlrpc.client.Transport.send_content(self, h, body) |
80 | 82 | ||
81 | def _create_server(host, port, timeout = 60): | 83 | def _create_server(host, port, timeout = 60): |
82 | t = BBTransport(timeout) | 84 | t = BBTransport(timeout) |
83 | s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True) | 85 | s = xmlrpc.client.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True, use_builtin_types=True) |
84 | return s, t | 86 | return s, t |
85 | 87 | ||
86 | class BitBakeServerCommands(): | 88 | class BitBakeServerCommands(): |
@@ -128,7 +130,7 @@ class BitBakeServerCommands(): | |||
128 | def addClient(self): | 130 | def addClient(self): |
129 | if self.has_client: | 131 | if self.has_client: |
130 | return None | 132 | return None |
131 | token = hashlib.md5(str(time.time())).hexdigest() | 133 | token = hashlib.md5(str(time.time()).encode("utf-8")).hexdigest() |
132 | self.server.set_connection_token(token) | 134 | self.server.set_connection_token(token) |
133 | self.has_client = True | 135 | self.has_client = True |
134 | return token | 136 | return token |
@@ -232,7 +234,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer): | |||
232 | while not self.quit: | 234 | while not self.quit: |
233 | fds = [self] | 235 | fds = [self] |
234 | nextsleep = 0.1 | 236 | nextsleep = 0.1 |
235 | for function, data in self._idlefuns.items(): | 237 | for function, data in list(self._idlefuns.items()): |
236 | retval = None | 238 | retval = None |
237 | try: | 239 | try: |
238 | retval = function(self, data, False) | 240 | retval = function(self, data, False) |
@@ -267,7 +269,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer): | |||
267 | pass | 269 | pass |
268 | 270 | ||
269 | # Tell idle functions we're exiting | 271 | # Tell idle functions we're exiting |
270 | for function, data in self._idlefuns.items(): | 272 | for function, data in list(self._idlefuns.items()): |
271 | try: | 273 | try: |
272 | retval = function(self, data, True) | 274 | retval = function(self, data, True) |
273 | except: | 275 | except: |
@@ -379,7 +381,7 @@ class BitBakeXMLRPCClient(BitBakeBaseServer): | |||
379 | bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e))) | 381 | bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e))) |
380 | raise e | 382 | raise e |
381 | try: | 383 | try: |
382 | self.serverImpl = XMLRPCProxyServer(host, port) | 384 | self.serverImpl = XMLRPCProxyServer(host, port, use_builtin_types=True) |
383 | self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset) | 385 | self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset) |
384 | return self.connection.connect(self.token) | 386 | return self.connection.connect(self.token) |
385 | except Exception as e: | 387 | except Exception as e: |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index e4085cfba7..db3daeffb5 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -3,19 +3,14 @@ import logging | |||
3 | import os | 3 | import os |
4 | import re | 4 | import re |
5 | import tempfile | 5 | import tempfile |
6 | import pickle | ||
6 | import bb.data | 7 | import bb.data |
7 | from bb.checksum import FileChecksumCache | 8 | from bb.checksum import FileChecksumCache |
8 | 9 | ||
9 | logger = logging.getLogger('BitBake.SigGen') | 10 | logger = logging.getLogger('BitBake.SigGen') |
10 | 11 | ||
11 | try: | ||
12 | import cPickle as pickle | ||
13 | except ImportError: | ||
14 | import pickle | ||
15 | logger.info('Importing cPickle failed. Falling back to a very slow implementation.') | ||
16 | |||
17 | def init(d): | 12 | def init(d): |
18 | siggens = [obj for obj in globals().itervalues() | 13 | siggens = [obj for obj in globals().values() |
19 | if type(obj) is type and issubclass(obj, SignatureGenerator)] | 14 | if type(obj) is type and issubclass(obj, SignatureGenerator)] |
20 | 15 | ||
21 | desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop" | 16 | desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop" |
@@ -138,7 +133,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
138 | var = lookupcache[dep] | 133 | var = lookupcache[dep] |
139 | if var is not None: | 134 | if var is not None: |
140 | data = data + str(var) | 135 | data = data + str(var) |
141 | self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest() | 136 | self.basehash[fn + "." + task] = hashlib.md5(data.encode("utf-8")).hexdigest() |
142 | taskdeps[task] = alldeps | 137 | taskdeps[task] = alldeps |
143 | 138 | ||
144 | self.taskdeps[fn] = taskdeps | 139 | self.taskdeps[fn] = taskdeps |
@@ -223,7 +218,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
223 | self.taints[k] = taint | 218 | self.taints[k] = taint |
224 | logger.warning("%s is tainted from a forced run" % k) | 219 | logger.warning("%s is tainted from a forced run" % k) |
225 | 220 | ||
226 | h = hashlib.md5(data).hexdigest() | 221 | h = hashlib.md5(data.encode("utf-8")).hexdigest() |
227 | self.taskhash[k] = h | 222 | self.taskhash[k] = h |
228 | #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) | 223 | #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) |
229 | return h | 224 | return h |
@@ -287,7 +282,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
287 | with os.fdopen(fd, "wb") as stream: | 282 | with os.fdopen(fd, "wb") as stream: |
288 | p = pickle.dump(data, stream, -1) | 283 | p = pickle.dump(data, stream, -1) |
289 | stream.flush() | 284 | stream.flush() |
290 | os.chmod(tmpfile, 0664) | 285 | os.chmod(tmpfile, 0o664) |
291 | os.rename(tmpfile, sigfile) | 286 | os.rename(tmpfile, sigfile) |
292 | except (OSError, IOError) as err: | 287 | except (OSError, IOError) as err: |
293 | try: | 288 | try: |
@@ -545,7 +540,7 @@ def calc_basehash(sigdata): | |||
545 | if val is not None: | 540 | if val is not None: |
546 | basedata = basedata + str(val) | 541 | basedata = basedata + str(val) |
547 | 542 | ||
548 | return hashlib.md5(basedata).hexdigest() | 543 | return hashlib.md5(basedata.encode("utf-8")).hexdigest() |
549 | 544 | ||
550 | def calc_taskhash(sigdata): | 545 | def calc_taskhash(sigdata): |
551 | data = sigdata['basehash'] | 546 | data = sigdata['basehash'] |
@@ -562,7 +557,7 @@ def calc_taskhash(sigdata): | |||
562 | else: | 557 | else: |
563 | data = data + sigdata['taint'] | 558 | data = data + sigdata['taint'] |
564 | 559 | ||
565 | return hashlib.md5(data).hexdigest() | 560 | return hashlib.md5(data.encode("utf-8")).hexdigest() |
566 | 561 | ||
567 | 562 | ||
568 | def dump_sigfile(a): | 563 | def dump_sigfile(a): |
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py index 9ae52d77da..65628c86f2 100644 --- a/bitbake/lib/bb/taskdata.py +++ b/bitbake/lib/bb/taskdata.py | |||
@@ -446,7 +446,7 @@ class TaskData: | |||
446 | return | 446 | return |
447 | 447 | ||
448 | if not item in dataCache.providers: | 448 | if not item in dataCache.providers: |
449 | close_matches = self.get_close_matches(item, dataCache.providers.keys()) | 449 | close_matches = self.get_close_matches(item, list(dataCache.providers.keys())) |
450 | # Is it in RuntimeProviders ? | 450 | # Is it in RuntimeProviders ? |
451 | all_p = bb.providers.getRuntimeProviders(dataCache, item) | 451 | all_p = bb.providers.getRuntimeProviders(dataCache, item) |
452 | for fn in all_p: | 452 | for fn in all_p: |
@@ -576,7 +576,7 @@ class TaskData: | |||
576 | dependees = self.get_dependees(targetid) | 576 | dependees = self.get_dependees(targetid) |
577 | for fnid in dependees: | 577 | for fnid in dependees: |
578 | self.fail_fnid(fnid, missing_list) | 578 | self.fail_fnid(fnid, missing_list) |
579 | for taskid in xrange(len(self.tasks_idepends)): | 579 | for taskid in range(len(self.tasks_idepends)): |
580 | idepends = self.tasks_idepends[taskid] | 580 | idepends = self.tasks_idepends[taskid] |
581 | for (idependid, idependtask) in idepends: | 581 | for (idependid, idependtask) in idepends: |
582 | if idependid == targetid: | 582 | if idependid == targetid: |
@@ -602,7 +602,7 @@ class TaskData: | |||
602 | dependees = self.get_rdependees(targetid) | 602 | dependees = self.get_rdependees(targetid) |
603 | for fnid in dependees: | 603 | for fnid in dependees: |
604 | self.fail_fnid(fnid, missing_list) | 604 | self.fail_fnid(fnid, missing_list) |
605 | for taskid in xrange(len(self.tasks_irdepends)): | 605 | for taskid in range(len(self.tasks_irdepends)): |
606 | irdepends = self.tasks_irdepends[taskid] | 606 | irdepends = self.tasks_irdepends[taskid] |
607 | for (idependid, idependtask) in irdepends: | 607 | for (idependid, idependtask) in irdepends: |
608 | if idependid == targetid: | 608 | if idependid == targetid: |
@@ -658,7 +658,7 @@ class TaskData: | |||
658 | logger.debug(3, ", ".join(self.run_names_index)) | 658 | logger.debug(3, ", ".join(self.run_names_index)) |
659 | 659 | ||
660 | logger.debug(3, "build_targets:") | 660 | logger.debug(3, "build_targets:") |
661 | for buildid in xrange(len(self.build_names_index)): | 661 | for buildid in range(len(self.build_names_index)): |
662 | target = self.build_names_index[buildid] | 662 | target = self.build_names_index[buildid] |
663 | targets = "None" | 663 | targets = "None" |
664 | if buildid in self.build_targets: | 664 | if buildid in self.build_targets: |
@@ -666,7 +666,7 @@ class TaskData: | |||
666 | logger.debug(3, " (%s)%s: %s", buildid, target, targets) | 666 | logger.debug(3, " (%s)%s: %s", buildid, target, targets) |
667 | 667 | ||
668 | logger.debug(3, "run_targets:") | 668 | logger.debug(3, "run_targets:") |
669 | for runid in xrange(len(self.run_names_index)): | 669 | for runid in range(len(self.run_names_index)): |
670 | target = self.run_names_index[runid] | 670 | target = self.run_names_index[runid] |
671 | targets = "None" | 671 | targets = "None" |
672 | if runid in self.run_targets: | 672 | if runid in self.run_targets: |
@@ -674,7 +674,7 @@ class TaskData: | |||
674 | logger.debug(3, " (%s)%s: %s", runid, target, targets) | 674 | logger.debug(3, " (%s)%s: %s", runid, target, targets) |
675 | 675 | ||
676 | logger.debug(3, "tasks:") | 676 | logger.debug(3, "tasks:") |
677 | for task in xrange(len(self.tasks_name)): | 677 | for task in range(len(self.tasks_name)): |
678 | logger.debug(3, " (%s)%s - %s: %s", | 678 | logger.debug(3, " (%s)%s - %s: %s", |
679 | task, | 679 | task, |
680 | self.fn_index[self.tasks_fnid[task]], | 680 | self.fn_index[self.tasks_fnid[task]], |
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py index 5ea9d84803..14f0e2572c 100644 --- a/bitbake/lib/bb/tests/codeparser.py +++ b/bitbake/lib/bb/tests/codeparser.py | |||
@@ -191,8 +191,8 @@ class PythonReferenceTest(ReferenceTest): | |||
191 | if hasattr(bb.utils, "_context"): | 191 | if hasattr(bb.utils, "_context"): |
192 | self.context = bb.utils._context | 192 | self.context = bb.utils._context |
193 | else: | 193 | else: |
194 | import __builtin__ | 194 | import builtins |
195 | self.context = __builtin__.__dict__ | 195 | self.context = builtins.__dict__ |
196 | 196 | ||
197 | def parseExpression(self, exp): | 197 | def parseExpression(self, exp): |
198 | parsedvar = self.d.expandWithRefs(exp, None) | 198 | parsedvar = self.d.expandWithRefs(exp, None) |
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py index 12232305c3..b54eb06797 100644 --- a/bitbake/lib/bb/tests/data.py +++ b/bitbake/lib/bb/tests/data.py | |||
@@ -147,14 +147,14 @@ class DataExpansions(unittest.TestCase): | |||
147 | self.assertEqual(self.d.getVar("foo", False), None) | 147 | self.assertEqual(self.d.getVar("foo", False), None) |
148 | 148 | ||
149 | def test_keys(self): | 149 | def test_keys(self): |
150 | keys = self.d.keys() | 150 | keys = list(self.d.keys()) |
151 | self.assertEqual(keys, ['value_of_foo', 'foo', 'bar']) | 151 | self.assertCountEqual(keys, ['value_of_foo', 'foo', 'bar']) |
152 | 152 | ||
153 | def test_keys_deletion(self): | 153 | def test_keys_deletion(self): |
154 | newd = bb.data.createCopy(self.d) | 154 | newd = bb.data.createCopy(self.d) |
155 | newd.delVar("bar") | 155 | newd.delVar("bar") |
156 | keys = newd.keys() | 156 | keys = list(newd.keys()) |
157 | self.assertEqual(keys, ['value_of_foo', 'foo']) | 157 | self.assertCountEqual(keys, ['value_of_foo', 'foo']) |
158 | 158 | ||
159 | class TestNestedExpansions(unittest.TestCase): | 159 | class TestNestedExpansions(unittest.TestCase): |
160 | def setUp(self): | 160 | def setUp(self): |
@@ -334,7 +334,7 @@ class TestOverrides(unittest.TestCase): | |||
334 | self.d.setVar("TEST2_bar", "testvalue2") | 334 | self.d.setVar("TEST2_bar", "testvalue2") |
335 | bb.data.update_data(self.d) | 335 | bb.data.update_data(self.d) |
336 | self.assertEqual(self.d.getVar("TEST2", True), "testvalue2") | 336 | self.assertEqual(self.d.getVar("TEST2", True), "testvalue2") |
337 | self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) | 337 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) |
338 | 338 | ||
339 | def test_multiple_override(self): | 339 | def test_multiple_override(self): |
340 | self.d.setVar("TEST_bar", "testvalue2") | 340 | self.d.setVar("TEST_bar", "testvalue2") |
@@ -342,7 +342,7 @@ class TestOverrides(unittest.TestCase): | |||
342 | self.d.setVar("TEST_foo", "testvalue4") | 342 | self.d.setVar("TEST_foo", "testvalue4") |
343 | bb.data.update_data(self.d) | 343 | bb.data.update_data(self.d) |
344 | self.assertEqual(self.d.getVar("TEST", True), "testvalue3") | 344 | self.assertEqual(self.d.getVar("TEST", True), "testvalue3") |
345 | self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) | 345 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) |
346 | 346 | ||
347 | def test_multiple_combined_overrides(self): | 347 | def test_multiple_combined_overrides(self): |
348 | self.d.setVar("TEST_local_foo_bar", "testvalue3") | 348 | self.d.setVar("TEST_local_foo_bar", "testvalue3") |
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py index 6beb76a48d..c296db2013 100644 --- a/bitbake/lib/bb/tests/parse.py +++ b/bitbake/lib/bb/tests/parse.py | |||
@@ -50,7 +50,7 @@ C = "3" | |||
50 | def parsehelper(self, content, suffix = ".bb"): | 50 | def parsehelper(self, content, suffix = ".bb"): |
51 | 51 | ||
52 | f = tempfile.NamedTemporaryFile(suffix = suffix) | 52 | f = tempfile.NamedTemporaryFile(suffix = suffix) |
53 | f.write(content) | 53 | f.write(bytes(content, "utf-8")) |
54 | f.flush() | 54 | f.flush() |
55 | os.chdir(os.path.dirname(f.name)) | 55 | os.chdir(os.path.dirname(f.name)) |
56 | return f | 56 | return f |
diff --git a/bitbake/lib/bb/ui/crumbs/hobwidget.py b/bitbake/lib/bb/ui/crumbs/hobwidget.py index 2b969c146e..1f51a3cf7f 100644 --- a/bitbake/lib/bb/ui/crumbs/hobwidget.py +++ b/bitbake/lib/bb/ui/crumbs/hobwidget.py | |||
@@ -612,7 +612,7 @@ class HobIconChecker(hic): | |||
612 | def set_hob_icon_to_stock_icon(self, file_path, stock_id=""): | 612 | def set_hob_icon_to_stock_icon(self, file_path, stock_id=""): |
613 | try: | 613 | try: |
614 | pixbuf = gtk.gdk.pixbuf_new_from_file(file_path) | 614 | pixbuf = gtk.gdk.pixbuf_new_from_file(file_path) |
615 | except Exception, e: | 615 | except Exception as e: |
616 | return None | 616 | return None |
617 | 617 | ||
618 | if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None): | 618 | if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None): |
diff --git a/bitbake/lib/bb/ui/crumbs/progressbar.py b/bitbake/lib/bb/ui/crumbs/progressbar.py index 3e2c660e4a..03230ae8a9 100644 --- a/bitbake/lib/bb/ui/crumbs/progressbar.py +++ b/bitbake/lib/bb/ui/crumbs/progressbar.py | |||
@@ -44,9 +44,9 @@ class HobProgressBar (gtk.ProgressBar): | |||
44 | self.set_text(text) | 44 | self.set_text(text) |
45 | 45 | ||
46 | def set_stop_title(self, text=None): | 46 | def set_stop_title(self, text=None): |
47 | if not text: | 47 | if not text: |
48 | text = "" | 48 | text = "" |
49 | self.set_text(text) | 49 | self.set_text(text) |
50 | 50 | ||
51 | def reset(self): | 51 | def reset(self): |
52 | self.set_fraction(0) | 52 | self.set_fraction(0) |
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py index 16a955d2b1..9b695ac2ed 100644 --- a/bitbake/lib/bb/ui/crumbs/runningbuild.py +++ b/bitbake/lib/bb/ui/crumbs/runningbuild.py | |||
@@ -23,14 +23,14 @@ import gtk | |||
23 | import gobject | 23 | import gobject |
24 | import logging | 24 | import logging |
25 | import time | 25 | import time |
26 | import urllib | 26 | import urllib.request, urllib.parse, urllib.error |
27 | import urllib2 | 27 | import urllib.request, urllib.error, urllib.parse |
28 | import pango | 28 | import pango |
29 | from bb.ui.crumbs.hobcolor import HobColors | 29 | from bb.ui.crumbs.hobcolor import HobColors |
30 | from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf | 30 | from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf |
31 | 31 | ||
32 | class RunningBuildModel (gtk.TreeStore): | 32 | class RunningBuildModel (gtk.TreeStore): |
33 | (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7) | 33 | (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = list(range(7)) |
34 | 34 | ||
35 | def __init__ (self): | 35 | def __init__ (self): |
36 | gtk.TreeStore.__init__ (self, | 36 | gtk.TreeStore.__init__ (self, |
@@ -443,8 +443,8 @@ def do_pastebin(text): | |||
443 | url = 'http://pastebin.com/api_public.php' | 443 | url = 'http://pastebin.com/api_public.php' |
444 | params = {'paste_code': text, 'paste_format': 'text'} | 444 | params = {'paste_code': text, 'paste_format': 'text'} |
445 | 445 | ||
446 | req = urllib2.Request(url, urllib.urlencode(params)) | 446 | req = urllib.request.Request(url, urllib.parse.urlencode(params)) |
447 | response = urllib2.urlopen(req) | 447 | response = urllib.request.urlopen(req) |
448 | paste_url = response.read() | 448 | paste_url = response.read() |
449 | 449 | ||
450 | return paste_url | 450 | return paste_url |
@@ -519,7 +519,7 @@ class RunningBuildTreeView (gtk.TreeView): | |||
519 | 519 | ||
520 | # @todo Provide visual feedback to the user that it is done and that | 520 | # @todo Provide visual feedback to the user that it is done and that |
521 | # it worked. | 521 | # it worked. |
522 | print paste_url | 522 | print(paste_url) |
523 | 523 | ||
524 | self._add_to_clipboard(paste_url) | 524 | self._add_to_clipboard(paste_url) |
525 | 525 | ||
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py index f4ee7b41ae..f5f8f1668f 100644 --- a/bitbake/lib/bb/ui/goggle.py +++ b/bitbake/lib/bb/ui/goggle.py | |||
@@ -18,13 +18,18 @@ | |||
18 | # with this program; if not, write to the Free Software Foundation, Inc., | 18 | # with this program; if not, write to the Free Software Foundation, Inc., |
19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
20 | 20 | ||
21 | from gi import pygtkcompat | ||
22 | |||
23 | pygtkcompat.enable() | ||
24 | pygtkcompat.enable_gtk(version='3.0') | ||
25 | |||
21 | import gobject | 26 | import gobject |
22 | import gtk | 27 | import gtk |
23 | import xmlrpclib | 28 | import xmlrpc.client |
24 | from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild | 29 | from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild |
25 | from bb.ui.crumbs.progress import ProgressBar | 30 | from bb.ui.crumbs.progress import ProgressBar |
26 | 31 | ||
27 | import Queue | 32 | import queue |
28 | 33 | ||
29 | 34 | ||
30 | def event_handle_idle_func (eventHandler, build, pbar): | 35 | def event_handle_idle_func (eventHandler, build, pbar): |
@@ -96,7 +101,7 @@ def main (server, eventHandler, params): | |||
96 | elif ret != True: | 101 | elif ret != True: |
97 | print("Error running command '%s': returned %s" % (cmdline, ret)) | 102 | print("Error running command '%s': returned %s" % (cmdline, ret)) |
98 | return 1 | 103 | return 1 |
99 | except xmlrpclib.Fault as x: | 104 | except xmlrpcclient.Fault as x: |
100 | print("XMLRPC Fault getting commandline:\n %s" % x) | 105 | print("XMLRPC Fault getting commandline:\n %s" % x) |
101 | return 1 | 106 | return 1 |
102 | 107 | ||
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py index 08c872e397..9605c8ee50 100644 --- a/bitbake/lib/bb/ui/knotty.py +++ b/bitbake/lib/bb/ui/knotty.py | |||
@@ -22,7 +22,7 @@ from __future__ import division | |||
22 | 22 | ||
23 | import os | 23 | import os |
24 | import sys | 24 | import sys |
25 | import xmlrpclib | 25 | import xmlrpc.client as xmlrpclib |
26 | import logging | 26 | import logging |
27 | import progressbar | 27 | import progressbar |
28 | import signal | 28 | import signal |
@@ -184,8 +184,8 @@ class TerminalFilter(object): | |||
184 | def clearFooter(self): | 184 | def clearFooter(self): |
185 | if self.footer_present: | 185 | if self.footer_present: |
186 | lines = self.footer_present | 186 | lines = self.footer_present |
187 | sys.stdout.write(self.curses.tparm(self.cuu, lines)) | 187 | sys.stdout.buffer.write(self.curses.tparm(self.cuu, lines)) |
188 | sys.stdout.write(self.curses.tparm(self.ed)) | 188 | sys.stdout.buffer.write(self.curses.tparm(self.ed)) |
189 | sys.stdout.flush() | 189 | sys.stdout.flush() |
190 | self.footer_present = False | 190 | self.footer_present = False |
191 | 191 | ||
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py index 9589a77d75..d81e4138ba 100644 --- a/bitbake/lib/bb/ui/ncurses.py +++ b/bitbake/lib/bb/ui/ncurses.py | |||
@@ -45,7 +45,7 @@ | |||
45 | """ | 45 | """ |
46 | 46 | ||
47 | 47 | ||
48 | from __future__ import division | 48 | |
49 | import logging | 49 | import logging |
50 | import os, sys, itertools, time, subprocess | 50 | import os, sys, itertools, time, subprocess |
51 | 51 | ||
@@ -55,7 +55,7 @@ except ImportError: | |||
55 | sys.exit("FATAL: The ncurses ui could not load the required curses python module.") | 55 | sys.exit("FATAL: The ncurses ui could not load the required curses python module.") |
56 | 56 | ||
57 | import bb | 57 | import bb |
58 | import xmlrpclib | 58 | import xmlrpc.client |
59 | from bb import ui | 59 | from bb import ui |
60 | from bb.ui import uihelper | 60 | from bb.ui import uihelper |
61 | 61 | ||
@@ -252,7 +252,7 @@ class NCursesUI: | |||
252 | elif ret != True: | 252 | elif ret != True: |
253 | print("Couldn't get default commandlind! %s" % ret) | 253 | print("Couldn't get default commandlind! %s" % ret) |
254 | return | 254 | return |
255 | except xmlrpclib.Fault as x: | 255 | except xmlrpc.client.Fault as x: |
256 | print("XMLRPC Fault getting commandline:\n %s" % x) | 256 | print("XMLRPC Fault getting commandline:\n %s" % x) |
257 | return | 257 | return |
258 | 258 | ||
@@ -331,7 +331,7 @@ class NCursesUI: | |||
331 | taw.setText(0, 0, "") | 331 | taw.setText(0, 0, "") |
332 | if activetasks: | 332 | if activetasks: |
333 | taw.appendText("Active Tasks:\n") | 333 | taw.appendText("Active Tasks:\n") |
334 | for task in activetasks.itervalues(): | 334 | for task in activetasks.values(): |
335 | taw.appendText(task["title"] + '\n') | 335 | taw.appendText(task["title"] + '\n') |
336 | if failedtasks: | 336 | if failedtasks: |
337 | taw.appendText("Failed Tasks:\n") | 337 | taw.appendText("Failed Tasks:\n") |
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py index df093c53c0..ca1916664d 100644 --- a/bitbake/lib/bb/ui/uievent.py +++ b/bitbake/lib/bb/ui/uievent.py | |||
@@ -25,7 +25,7 @@ client/server deadlocks. | |||
25 | """ | 25 | """ |
26 | 26 | ||
27 | import socket, threading, pickle, collections | 27 | import socket, threading, pickle, collections |
28 | from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | 28 | from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler |
29 | 29 | ||
30 | class BBUIEventQueue: | 30 | class BBUIEventQueue: |
31 | def __init__(self, BBServer, clientinfo=("localhost, 0")): | 31 | def __init__(self, BBServer, clientinfo=("localhost, 0")): |
@@ -137,7 +137,7 @@ class UIXMLRPCServer (SimpleXMLRPCServer): | |||
137 | SimpleXMLRPCServer.__init__( self, | 137 | SimpleXMLRPCServer.__init__( self, |
138 | interface, | 138 | interface, |
139 | requestHandler=SimpleXMLRPCRequestHandler, | 139 | requestHandler=SimpleXMLRPCRequestHandler, |
140 | logRequests=False, allow_none=True) | 140 | logRequests=False, allow_none=True, use_builtin_types=True) |
141 | 141 | ||
142 | def get_request(self): | 142 | def get_request(self): |
143 | while not self.quit: | 143 | while not self.quit: |
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 588c192c08..138da44ef1 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
@@ -37,7 +37,7 @@ import errno | |||
37 | import signal | 37 | import signal |
38 | import ast | 38 | import ast |
39 | import collections | 39 | import collections |
40 | from commands import getstatusoutput | 40 | from subprocess import getstatusoutput |
41 | from contextlib import contextmanager | 41 | from contextlib import contextmanager |
42 | from ctypes import cdll | 42 | from ctypes import cdll |
43 | 43 | ||
@@ -76,7 +76,7 @@ def explode_version(s): | |||
76 | r.append((0, int(m.group(1)))) | 76 | r.append((0, int(m.group(1)))) |
77 | s = m.group(2) | 77 | s = m.group(2) |
78 | continue | 78 | continue |
79 | if s[0] in string.letters: | 79 | if s[0] in string.ascii_letters: |
80 | m = alpha_regexp.match(s) | 80 | m = alpha_regexp.match(s) |
81 | r.append((1, m.group(1))) | 81 | r.append((1, m.group(1))) |
82 | s = m.group(2) | 82 | s = m.group(2) |
@@ -588,7 +588,7 @@ def filter_environment(good_vars): | |||
588 | """ | 588 | """ |
589 | 589 | ||
590 | removed_vars = {} | 590 | removed_vars = {} |
591 | for key in os.environ.keys(): | 591 | for key in list(os.environ): |
592 | if key in good_vars: | 592 | if key in good_vars: |
593 | continue | 593 | continue |
594 | 594 | ||
@@ -641,7 +641,7 @@ def empty_environment(): | |||
641 | """ | 641 | """ |
642 | Remove all variables from the environment. | 642 | Remove all variables from the environment. |
643 | """ | 643 | """ |
644 | for s in os.environ.keys(): | 644 | for s in list(os.environ.keys()): |
645 | os.unsetenv(s) | 645 | os.unsetenv(s) |
646 | del os.environ[s] | 646 | del os.environ[s] |
647 | 647 | ||
@@ -958,7 +958,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
958 | if not val: | 958 | if not val: |
959 | return falsevalue | 959 | return falsevalue |
960 | val = set(val.split()) | 960 | val = set(val.split()) |
961 | if isinstance(checkvalues, basestring): | 961 | if isinstance(checkvalues, str): |
962 | checkvalues = set(checkvalues.split()) | 962 | checkvalues = set(checkvalues.split()) |
963 | else: | 963 | else: |
964 | checkvalues = set(checkvalues) | 964 | checkvalues = set(checkvalues) |
@@ -971,7 +971,7 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d): | |||
971 | if not val: | 971 | if not val: |
972 | return falsevalue | 972 | return falsevalue |
973 | val = set(val.split()) | 973 | val = set(val.split()) |
974 | if isinstance(checkvalues, basestring): | 974 | if isinstance(checkvalues, str): |
975 | checkvalues = set(checkvalues.split()) | 975 | checkvalues = set(checkvalues.split()) |
976 | else: | 976 | else: |
977 | checkvalues = set(checkvalues) | 977 | checkvalues = set(checkvalues) |
@@ -1040,7 +1040,7 @@ def exec_flat_python_func(func, *args, **kwargs): | |||
1040 | aidx += 1 | 1040 | aidx += 1 |
1041 | # Handle keyword arguments | 1041 | # Handle keyword arguments |
1042 | context.update(kwargs) | 1042 | context.update(kwargs) |
1043 | funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()]) | 1043 | funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()]) |
1044 | code = 'retval = %s(%s)' % (func, ', '.join(funcargs)) | 1044 | code = 'retval = %s(%s)' % (func, ', '.join(funcargs)) |
1045 | comp = bb.utils.better_compile(code, '<string>', '<string>') | 1045 | comp = bb.utils.better_compile(code, '<string>', '<string>') |
1046 | bb.utils.better_exec(comp, context, code, '<string>') | 1046 | bb.utils.better_exec(comp, context, code, '<string>') |
@@ -1127,7 +1127,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1127 | else: | 1127 | else: |
1128 | varset_new = varset_start | 1128 | varset_new = varset_start |
1129 | 1129 | ||
1130 | if isinstance(indent, (int, long)): | 1130 | if isinstance(indent, int): |
1131 | if indent == -1: | 1131 | if indent == -1: |
1132 | indentspc = ' ' * (len(varset_new) + 2) | 1132 | indentspc = ' ' * (len(varset_new) + 2) |
1133 | else: | 1133 | else: |
@@ -1195,7 +1195,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1195 | in_var = None | 1195 | in_var = None |
1196 | else: | 1196 | else: |
1197 | skip = False | 1197 | skip = False |
1198 | for (varname, var_re) in var_res.iteritems(): | 1198 | for (varname, var_re) in var_res.items(): |
1199 | res = var_re.match(line) | 1199 | res = var_re.match(line) |
1200 | if res: | 1200 | if res: |
1201 | isfunc = varname.endswith('()') | 1201 | isfunc = varname.endswith('()') |
@@ -1373,7 +1373,7 @@ def get_file_layer(filename, d): | |||
1373 | # Use longest path so we handle nested layers | 1373 | # Use longest path so we handle nested layers |
1374 | matchlen = 0 | 1374 | matchlen = 0 |
1375 | match = None | 1375 | match = None |
1376 | for collection, regex in collection_res.iteritems(): | 1376 | for collection, regex in collection_res.items(): |
1377 | if len(regex) > matchlen and re.match(regex, path): | 1377 | if len(regex) > matchlen and re.match(regex, path): |
1378 | matchlen = len(regex) | 1378 | matchlen = len(regex) |
1379 | match = collection | 1379 | match = collection |
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py index 5b95e2ecb2..d4c1792f60 100644 --- a/bitbake/lib/bblayers/action.py +++ b/bitbake/lib/bblayers/action.py | |||
@@ -117,7 +117,7 @@ build results (as the layer priority order has effectively changed). | |||
117 | applied_appends = [] | 117 | applied_appends = [] |
118 | for layer in layers: | 118 | for layer in layers: |
119 | overlayed = [] | 119 | overlayed = [] |
120 | for f in self.tinfoil.cooker.collection.overlayed.iterkeys(): | 120 | for f in self.tinfoil.cooker.collection.overlayed.keys(): |
121 | for of in self.tinfoil.cooker.collection.overlayed[f]: | 121 | for of in self.tinfoil.cooker.collection.overlayed[f]: |
122 | if of.startswith(layer): | 122 | if of.startswith(layer): |
123 | overlayed.append(of) | 123 | overlayed.append(of) |
diff --git a/bitbake/lib/bblayers/common.py b/bitbake/lib/bblayers/common.py index 360b9d764f..b10fb4cead 100644 --- a/bitbake/lib/bblayers/common.py +++ b/bitbake/lib/bblayers/common.py | |||
@@ -14,7 +14,7 @@ class LayerPlugin(): | |||
14 | self.tinfoil = tinfoil | 14 | self.tinfoil = tinfoil |
15 | self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split() | 15 | self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split() |
16 | layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data) | 16 | layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data) |
17 | self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.iteritems()} | 17 | self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()} |
18 | 18 | ||
19 | @staticmethod | 19 | @staticmethod |
20 | def add_command(subparsers, cmdname, function, parserecipes=True, *args, **kwargs): | 20 | def add_command(subparsers, cmdname, function, parserecipes=True, *args, **kwargs): |
diff --git a/bitbake/lib/bblayers/layerindex.py b/bitbake/lib/bblayers/layerindex.py index 3c39d8a79e..10ad718eba 100644 --- a/bitbake/lib/bblayers/layerindex.py +++ b/bitbake/lib/bblayers/layerindex.py | |||
@@ -1,10 +1,10 @@ | |||
1 | import argparse | 1 | import argparse |
2 | import httplib | 2 | import http.client |
3 | import json | 3 | import json |
4 | import logging | 4 | import logging |
5 | import os | 5 | import os |
6 | import subprocess | 6 | import subprocess |
7 | import urlparse | 7 | import urllib.parse |
8 | 8 | ||
9 | from bblayers.action import ActionPlugin | 9 | from bblayers.action import ActionPlugin |
10 | 10 | ||
@@ -24,12 +24,12 @@ class LayerIndexPlugin(ActionPlugin): | |||
24 | def get_json_data(self, apiurl): | 24 | def get_json_data(self, apiurl): |
25 | proxy_settings = os.environ.get("http_proxy", None) | 25 | proxy_settings = os.environ.get("http_proxy", None) |
26 | conn = None | 26 | conn = None |
27 | _parsedurl = urlparse.urlparse(apiurl) | 27 | _parsedurl = urllib.parse.urlparse(apiurl) |
28 | path = _parsedurl.path | 28 | path = _parsedurl.path |
29 | query = _parsedurl.query | 29 | query = _parsedurl.query |
30 | 30 | ||
31 | def parse_url(url): | 31 | def parse_url(url): |
32 | parsedurl = urlparse.urlparse(url) | 32 | parsedurl = urllib.parse.urlparse(url) |
33 | if parsedurl.netloc[0] == '[': | 33 | if parsedurl.netloc[0] == '[': |
34 | host, port = parsedurl.netloc[1:].split(']', 1) | 34 | host, port = parsedurl.netloc[1:].split(']', 1) |
35 | if ':' in port: | 35 | if ':' in port: |
@@ -46,11 +46,11 @@ class LayerIndexPlugin(ActionPlugin): | |||
46 | 46 | ||
47 | if proxy_settings is None: | 47 | if proxy_settings is None: |
48 | host, port = parse_url(apiurl) | 48 | host, port = parse_url(apiurl) |
49 | conn = httplib.HTTPConnection(host, port) | 49 | conn = http.client.HTTPConnection(host, port) |
50 | conn.request("GET", path + "?" + query) | 50 | conn.request("GET", path + "?" + query) |
51 | else: | 51 | else: |
52 | host, port = parse_url(proxy_settings) | 52 | host, port = parse_url(proxy_settings) |
53 | conn = httplib.HTTPConnection(host, port) | 53 | conn = http.client.HTTPConnection(host, port) |
54 | conn.request("GET", apiurl) | 54 | conn.request("GET", apiurl) |
55 | 55 | ||
56 | r = conn.getresponse() | 56 | r = conn.getresponse() |
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py index b5b98f7639..b8c817b124 100644 --- a/bitbake/lib/bblayers/query.py +++ b/bitbake/lib/bblayers/query.py | |||
@@ -128,7 +128,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
128 | # Ensure we list skipped recipes | 128 | # Ensure we list skipped recipes |
129 | # We are largely guessing about PN, PV and the preferred version here, | 129 | # We are largely guessing about PN, PV and the preferred version here, |
130 | # but we have no choice since skipped recipes are not fully parsed | 130 | # but we have no choice since skipped recipes are not fully parsed |
131 | skiplist = self.tinfoil.cooker.skiplist.keys() | 131 | skiplist = list(self.tinfoil.cooker.skiplist.keys()) |
132 | skiplist.sort( key=lambda fileitem: self.tinfoil.cooker.collection.calc_bbfile_priority(fileitem) ) | 132 | skiplist.sort( key=lambda fileitem: self.tinfoil.cooker.collection.calc_bbfile_priority(fileitem) ) |
133 | skiplist.reverse() | 133 | skiplist.reverse() |
134 | for fn in skiplist: | 134 | for fn in skiplist: |
@@ -275,7 +275,7 @@ Lists recipes with the bbappends that apply to them as subitems. | |||
275 | 275 | ||
276 | def show_appends_for_skipped(self): | 276 | def show_appends_for_skipped(self): |
277 | filenames = [os.path.basename(f) | 277 | filenames = [os.path.basename(f) |
278 | for f in self.tinfoil.cooker.skiplist.iterkeys()] | 278 | for f in self.tinfoil.cooker.skiplist.keys()] |
279 | return self.show_appends_output(filenames, None, " (skipped)") | 279 | return self.show_appends_output(filenames, None, " (skipped)") |
280 | 280 | ||
281 | def show_appends_output(self, filenames, best_filename, name_suffix = ''): | 281 | def show_appends_output(self, filenames, best_filename, name_suffix = ''): |
diff --git a/bitbake/lib/codegen.py b/bitbake/lib/codegen.py index be772d5107..62a6748c47 100644 --- a/bitbake/lib/codegen.py +++ b/bitbake/lib/codegen.py | |||
@@ -214,11 +214,11 @@ class SourceGenerator(NodeVisitor): | |||
214 | paren_or_comma() | 214 | paren_or_comma() |
215 | self.write(keyword.arg + '=') | 215 | self.write(keyword.arg + '=') |
216 | self.visit(keyword.value) | 216 | self.visit(keyword.value) |
217 | if node.starargs is not None: | 217 | if hasattr(node, 'starargs') and node.starargs is not None: |
218 | paren_or_comma() | 218 | paren_or_comma() |
219 | self.write('*') | 219 | self.write('*') |
220 | self.visit(node.starargs) | 220 | self.visit(node.starargs) |
221 | if node.kwargs is not None: | 221 | if hasattr(node, 'kwargs') and node.kwargs is not None: |
222 | paren_or_comma() | 222 | paren_or_comma() |
223 | self.write('**') | 223 | self.write('**') |
224 | self.visit(node.kwargs) | 224 | self.visit(node.kwargs) |
@@ -379,11 +379,11 @@ class SourceGenerator(NodeVisitor): | |||
379 | write_comma() | 379 | write_comma() |
380 | self.write(keyword.arg + '=') | 380 | self.write(keyword.arg + '=') |
381 | self.visit(keyword.value) | 381 | self.visit(keyword.value) |
382 | if node.starargs is not None: | 382 | if hasattr(node, 'starargs') and node.starargs is not None: |
383 | write_comma() | 383 | write_comma() |
384 | self.write('*') | 384 | self.write('*') |
385 | self.visit(node.starargs) | 385 | self.visit(node.starargs) |
386 | if node.kwargs is not None: | 386 | if hasattr(node, 'kwargs') and node.kwargs is not None: |
387 | write_comma() | 387 | write_comma() |
388 | self.write('**') | 388 | self.write('**') |
389 | self.visit(node.kwargs) | 389 | self.visit(node.kwargs) |
diff --git a/bitbake/lib/ply/yacc.py b/bitbake/lib/ply/yacc.py index 6168fd9a03..d50886ed2f 100644 --- a/bitbake/lib/ply/yacc.py +++ b/bitbake/lib/ply/yacc.py | |||
@@ -195,6 +195,8 @@ class YaccProduction: | |||
195 | self.lexer = None | 195 | self.lexer = None |
196 | self.parser= None | 196 | self.parser= None |
197 | def __getitem__(self,n): | 197 | def __getitem__(self,n): |
198 | if isinstance(n,slice): | ||
199 | return [self[i] for i in range(*(n.indices(len(self.slice))))] | ||
198 | if n >= 0: return self.slice[n].value | 200 | if n >= 0: return self.slice[n].value |
199 | else: return self.stack[n].value | 201 | else: return self.stack[n].value |
200 | 202 | ||
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py index 2a86184170..495d09f39b 100644 --- a/bitbake/lib/prserv/db.py +++ b/bitbake/lib/prserv/db.py | |||
@@ -260,7 +260,7 @@ class PRData(object): | |||
260 | self.connection.close() | 260 | self.connection.close() |
261 | 261 | ||
262 | def __getitem__(self,tblname): | 262 | def __getitem__(self,tblname): |
263 | if not isinstance(tblname, basestring): | 263 | if not isinstance(tblname, str): |
264 | raise TypeError("tblname argument must be a string, not '%s'" % | 264 | raise TypeError("tblname argument must be a string, not '%s'" % |
265 | type(tblname)) | 265 | type(tblname)) |
266 | if tblname in self._tables: | 266 | if tblname in self._tables: |
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py index 8cec9f8870..cafcc820cd 100644 --- a/bitbake/lib/prserv/serv.py +++ b/bitbake/lib/prserv/serv.py | |||
@@ -1,10 +1,10 @@ | |||
1 | import os,sys,logging | 1 | import os,sys,logging |
2 | import signal, time | 2 | import signal, time |
3 | from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | 3 | from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler |
4 | import threading | 4 | import threading |
5 | import Queue | 5 | import queue |
6 | import socket | 6 | import socket |
7 | import StringIO | 7 | import io |
8 | 8 | ||
9 | try: | 9 | try: |
10 | import sqlite3 | 10 | import sqlite3 |
@@ -64,7 +64,7 @@ class PRServer(SimpleXMLRPCServer): | |||
64 | self.register_function(self.importone, "importone") | 64 | self.register_function(self.importone, "importone") |
65 | self.register_introspection_functions() | 65 | self.register_introspection_functions() |
66 | 66 | ||
67 | self.requestqueue = Queue.Queue() | 67 | self.requestqueue = queue.Queue() |
68 | self.handlerthread = threading.Thread(target = self.process_request_thread) | 68 | self.handlerthread = threading.Thread(target = self.process_request_thread) |
69 | self.handlerthread.daemon = False | 69 | self.handlerthread.daemon = False |
70 | 70 | ||
@@ -83,7 +83,7 @@ class PRServer(SimpleXMLRPCServer): | |||
83 | while not self.quit: | 83 | while not self.quit: |
84 | try: | 84 | try: |
85 | (request, client_address) = self.requestqueue.get(True, 30) | 85 | (request, client_address) = self.requestqueue.get(True, 30) |
86 | except Queue.Empty: | 86 | except queue.Empty: |
87 | self.table.sync_if_dirty() | 87 | self.table.sync_if_dirty() |
88 | continue | 88 | continue |
89 | try: | 89 | try: |
@@ -126,7 +126,7 @@ class PRServer(SimpleXMLRPCServer): | |||
126 | Returns None if the database engine does not support dumping to | 126 | Returns None if the database engine does not support dumping to |
127 | script or if some other error is encountered in processing. | 127 | script or if some other error is encountered in processing. |
128 | """ | 128 | """ |
129 | buff = StringIO.StringIO() | 129 | buff = io.StringIO() |
130 | try: | 130 | try: |
131 | self.table.sync() | 131 | self.table.sync() |
132 | self.table.dump_db(buff) | 132 | self.table.dump_db(buff) |
@@ -420,7 +420,7 @@ class PRServiceConfigError(Exception): | |||
420 | def auto_start(d): | 420 | def auto_start(d): |
421 | global singleton | 421 | global singleton |
422 | 422 | ||
423 | host_params = filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':')) | 423 | host_params = list(filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':'))) |
424 | if not host_params: | 424 | if not host_params: |
425 | return None | 425 | return None |
426 | 426 | ||