diff options
Diffstat (limited to 'bitbake/bin')
-rwxr-xr-x | bitbake/bin/bitbake | 2 | ||||
l--------- | bitbake/bin/bitbake-config-build | 1 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-diffsigs | 9 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-getvar | 15 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-hashclient | 107 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-hashserv | 10 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-layers | 28 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-prserv | 26 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-selftest | 3 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-server | 7 | ||||
-rwxr-xr-x | bitbake/bin/bitbake-worker | 24 | ||||
-rwxr-xr-x | bitbake/bin/git-make-shallow | 4 |
12 files changed, 190 insertions, 46 deletions
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake index 382983e087..40b5d895c1 100755 --- a/bitbake/bin/bitbake +++ b/bitbake/bin/bitbake | |||
@@ -27,7 +27,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException | |||
27 | 27 | ||
28 | bb.utils.check_system_locale() | 28 | bb.utils.check_system_locale() |
29 | 29 | ||
30 | __version__ = "2.9.0" | 30 | __version__ = "2.15.1" |
31 | 31 | ||
32 | if __name__ == "__main__": | 32 | if __name__ == "__main__": |
33 | if __version__ != bb.__version__: | 33 | if __version__ != bb.__version__: |
diff --git a/bitbake/bin/bitbake-config-build b/bitbake/bin/bitbake-config-build new file mode 120000 index 0000000000..11e6df80c4 --- /dev/null +++ b/bitbake/bin/bitbake-config-build | |||
@@ -0,0 +1 @@ | |||
bitbake-layers \ No newline at end of file | |||
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs index 8202c78623..9d6cb8c944 100755 --- a/bitbake/bin/bitbake-diffsigs +++ b/bitbake/bin/bitbake-diffsigs | |||
@@ -72,16 +72,17 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None): | |||
72 | elif sig2 not in sigfiles: | 72 | elif sig2 not in sigfiles: |
73 | logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2)) | 73 | logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2)) |
74 | sys.exit(1) | 74 | sys.exit(1) |
75 | |||
76 | latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']] | ||
75 | else: | 77 | else: |
76 | sigfiles = find_siginfo(bbhandler, pn, taskname) | 78 | sigfiles = find_siginfo(bbhandler, pn, taskname) |
77 | latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:] | 79 | latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:] |
78 | if not latestsigs: | 80 | if not latestsigs: |
79 | logger.error('No sigdata files found matching %s %s' % (pn, taskname)) | 81 | logger.error('No sigdata files found matching %s %s' % (pn, taskname)) |
80 | sys.exit(1) | 82 | sys.exit(1) |
81 | sig1 = latestsigs[0] | 83 | latestfiles = [sigfiles[latestsigs[0]]['path']] |
82 | sig2 = latestsigs[1] | 84 | if len(latestsigs) > 1: |
83 | 85 | latestfiles.append(sigfiles[latestsigs[1]]['path']) | |
84 | latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']] | ||
85 | 86 | ||
86 | return latestfiles | 87 | return latestfiles |
87 | 88 | ||
diff --git a/bitbake/bin/bitbake-getvar b/bitbake/bin/bitbake-getvar index 8901f99ae2..378fb13572 100755 --- a/bitbake/bin/bitbake-getvar +++ b/bitbake/bin/bitbake-getvar | |||
@@ -10,12 +10,14 @@ import io | |||
10 | import os | 10 | import os |
11 | import sys | 11 | import sys |
12 | import warnings | 12 | import warnings |
13 | import logging | ||
13 | warnings.simplefilter("default") | 14 | warnings.simplefilter("default") |
14 | 15 | ||
15 | bindir = os.path.dirname(__file__) | 16 | bindir = os.path.dirname(__file__) |
16 | topdir = os.path.dirname(bindir) | 17 | topdir = os.path.dirname(bindir) |
17 | sys.path[0:0] = [os.path.join(topdir, 'lib')] | 18 | sys.path[0:0] = [os.path.join(topdir, 'lib')] |
18 | 19 | ||
20 | import bb.providers | ||
19 | import bb.tinfoil | 21 | import bb.tinfoil |
20 | 22 | ||
21 | if __name__ == "__main__": | 23 | if __name__ == "__main__": |
@@ -37,13 +39,22 @@ if __name__ == "__main__": | |||
37 | sys.exit("--flag only makes sense with --value") | 39 | sys.exit("--flag only makes sense with --value") |
38 | 40 | ||
39 | quiet = args.quiet or args.value | 41 | quiet = args.quiet or args.value |
42 | if quiet: | ||
43 | logger = logging.getLogger("BitBake") | ||
44 | logger.setLevel(logging.WARNING) | ||
45 | |||
40 | with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil: | 46 | with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil: |
41 | if args.recipe: | 47 | if args.recipe: |
42 | tinfoil.prepare(quiet=3 if quiet else 2) | 48 | tinfoil.prepare(quiet=3 if quiet else 2) |
43 | d = tinfoil.parse_recipe(args.recipe) | 49 | try: |
50 | d = tinfoil.parse_recipe(args.recipe) | ||
51 | except bb.providers.NoProvider as e: | ||
52 | sys.exit(str(e)) | ||
44 | else: | 53 | else: |
45 | tinfoil.prepare(quiet=2, config_only=True) | 54 | tinfoil.prepare(quiet=2, config_only=True) |
46 | d = tinfoil.config_data | 55 | # Expand keys and run anonymous functions to get identical result to |
56 | # "bitbake -e" | ||
57 | d = tinfoil.finalizeData() | ||
47 | 58 | ||
48 | value = None | 59 | value = None |
49 | if args.flag: | 60 | if args.flag: |
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient index 610787ed2b..b8755c5797 100755 --- a/bitbake/bin/bitbake-hashclient +++ b/bitbake/bin/bitbake-hashclient | |||
@@ -16,6 +16,8 @@ import time | |||
16 | import warnings | 16 | import warnings |
17 | import netrc | 17 | import netrc |
18 | import json | 18 | import json |
19 | import statistics | ||
20 | import textwrap | ||
19 | warnings.simplefilter("default") | 21 | warnings.simplefilter("default") |
20 | 22 | ||
21 | try: | 23 | try: |
@@ -81,6 +83,7 @@ def main(): | |||
81 | nonlocal found_hashes | 83 | nonlocal found_hashes |
82 | nonlocal missed_hashes | 84 | nonlocal missed_hashes |
83 | nonlocal max_time | 85 | nonlocal max_time |
86 | nonlocal times | ||
84 | 87 | ||
85 | with hashserv.create_client(args.address) as client: | 88 | with hashserv.create_client(args.address) as client: |
86 | for i in range(args.requests): | 89 | for i in range(args.requests): |
@@ -98,29 +101,41 @@ def main(): | |||
98 | else: | 101 | else: |
99 | missed_hashes += 1 | 102 | missed_hashes += 1 |
100 | 103 | ||
101 | max_time = max(elapsed, max_time) | 104 | times.append(elapsed) |
102 | pbar.update() | 105 | pbar.update() |
103 | 106 | ||
104 | max_time = 0 | 107 | max_time = 0 |
105 | found_hashes = 0 | 108 | found_hashes = 0 |
106 | missed_hashes = 0 | 109 | missed_hashes = 0 |
107 | lock = threading.Lock() | 110 | lock = threading.Lock() |
108 | total_requests = args.clients * args.requests | 111 | times = [] |
109 | start_time = time.perf_counter() | 112 | start_time = time.perf_counter() |
110 | with ProgressBar(total=total_requests) as pbar: | 113 | with ProgressBar(total=args.clients * args.requests) as pbar: |
111 | threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] | 114 | threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] |
112 | for t in threads: | 115 | for t in threads: |
113 | t.start() | 116 | t.start() |
114 | 117 | ||
115 | for t in threads: | 118 | for t in threads: |
116 | t.join() | 119 | t.join() |
120 | total_elapsed = time.perf_counter() - start_time | ||
117 | 121 | ||
118 | elapsed = time.perf_counter() - start_time | ||
119 | with lock: | 122 | with lock: |
120 | print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed)) | 123 | mean = statistics.mean(times) |
121 | print("Average request time %.8fs" % (elapsed / total_requests)) | 124 | median = statistics.median(times) |
122 | print("Max request time was %.8fs" % max_time) | 125 | stddev = statistics.pstdev(times) |
123 | print("Found %d hashes, missed %d" % (found_hashes, missed_hashes)) | 126 | |
127 | print(f"Number of clients: {args.clients}") | ||
128 | print(f"Requests per client: {args.requests}") | ||
129 | print(f"Number of requests: {len(times)}") | ||
130 | print(f"Total elapsed time: {total_elapsed:.3f}s") | ||
131 | print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s") | ||
132 | print(f"Average request time: {mean:.3f}s") | ||
133 | print(f"Median request time: {median:.3f}s") | ||
134 | print(f"Request time std dev: {stddev:.3f}s") | ||
135 | print(f"Maximum request time: {max(times):.3f}s") | ||
136 | print(f"Minimum request time: {min(times):.3f}s") | ||
137 | print(f"Hashes found: {found_hashes}") | ||
138 | print(f"Hashes missed: {missed_hashes}") | ||
124 | 139 | ||
125 | if args.report: | 140 | if args.report: |
126 | with ProgressBar(total=args.requests) as pbar: | 141 | with ProgressBar(total=args.requests) as pbar: |
@@ -212,6 +227,27 @@ def main(): | |||
212 | print("New hashes marked: %d" % result["count"]) | 227 | print("New hashes marked: %d" % result["count"]) |
213 | return 0 | 228 | return 0 |
214 | 229 | ||
230 | def handle_gc_mark_stream(args, client): | ||
231 | stdin = (l.strip() for l in sys.stdin) | ||
232 | marked_hashes = 0 | ||
233 | |||
234 | try: | ||
235 | result = client.gc_mark_stream(args.mark, stdin) | ||
236 | marked_hashes = result["count"] | ||
237 | except ConnectionError: | ||
238 | logger.warning( | ||
239 | "Server doesn't seem to support `gc-mark-stream`. Sending " | ||
240 | "hashes sequentially using `gc-mark` API." | ||
241 | ) | ||
242 | for line in stdin: | ||
243 | pairs = line.split() | ||
244 | condition = dict(zip(pairs[::2], pairs[1::2])) | ||
245 | result = client.gc_mark(args.mark, condition) | ||
246 | marked_hashes += result["count"] | ||
247 | |||
248 | print("New hashes marked: %d" % marked_hashes) | ||
249 | return 0 | ||
250 | |||
215 | def handle_gc_sweep(args, client): | 251 | def handle_gc_sweep(args, client): |
216 | result = client.gc_sweep(args.mark) | 252 | result = client.gc_sweep(args.mark) |
217 | print("Removed %d rows" % result["count"]) | 253 | print("Removed %d rows" % result["count"]) |
@@ -225,7 +261,45 @@ def main(): | |||
225 | print("true" if result else "false") | 261 | print("true" if result else "false") |
226 | return 0 | 262 | return 0 |
227 | 263 | ||
228 | parser = argparse.ArgumentParser(description='Hash Equivalence Client') | 264 | def handle_ping(args, client): |
265 | times = [] | ||
266 | for i in range(1, args.count + 1): | ||
267 | if not args.quiet: | ||
268 | print(f"Ping {i} of {args.count}... ", end="") | ||
269 | start_time = time.perf_counter() | ||
270 | client.ping() | ||
271 | elapsed = time.perf_counter() - start_time | ||
272 | times.append(elapsed) | ||
273 | if not args.quiet: | ||
274 | print(f"{elapsed:.3f}s") | ||
275 | |||
276 | mean = statistics.mean(times) | ||
277 | median = statistics.median(times) | ||
278 | std_dev = statistics.pstdev(times) | ||
279 | |||
280 | if not args.quiet: | ||
281 | print("------------------------") | ||
282 | print(f"Number of pings: {len(times)}") | ||
283 | print(f"Average round trip time: {mean:.3f}s") | ||
284 | print(f"Median round trip time: {median:.3f}s") | ||
285 | print(f"Round trip time std dev: {std_dev:.3f}s") | ||
286 | print(f"Min time is: {min(times):.3f}s") | ||
287 | print(f"Max time is: {max(times):.3f}s") | ||
288 | return 0 | ||
289 | |||
290 | parser = argparse.ArgumentParser( | ||
291 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
292 | description='Hash Equivalence Client', | ||
293 | epilog=textwrap.dedent( | ||
294 | """ | ||
295 | Possible ADDRESS options are: | ||
296 | unix://PATH Connect to UNIX domain socket at PATH | ||
297 | ws://HOST[:PORT] Connect to websocket at HOST:PORT (default port is 80) | ||
298 | wss://HOST[:PORT] Connect to secure websocket at HOST:PORT (default port is 443) | ||
299 | HOST:PORT Connect to TCP server at HOST:PORT | ||
300 | """ | ||
301 | ), | ||
302 | ) | ||
229 | parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') | 303 | parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') |
230 | parser.add_argument('--log', default='WARNING', help='Set logging level') | 304 | parser.add_argument('--log', default='WARNING', help='Set logging level') |
231 | parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME") | 305 | parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME") |
@@ -313,6 +387,16 @@ def main(): | |||
313 | help="Keep entries in table where KEY == VALUE") | 387 | help="Keep entries in table where KEY == VALUE") |
314 | gc_mark_parser.set_defaults(func=handle_gc_mark) | 388 | gc_mark_parser.set_defaults(func=handle_gc_mark) |
315 | 389 | ||
390 | gc_mark_parser_stream = subparsers.add_parser( | ||
391 | 'gc-mark-stream', | ||
392 | help=( | ||
393 | "Mark multiple hashes to be retained for garbage collection. Input should be provided via stdin, " | ||
394 | "with each line formatted as key-value pairs separated by spaces, for example 'column1 foo column2 bar'." | ||
395 | ) | ||
396 | ) | ||
397 | gc_mark_parser_stream.add_argument("mark", help="Mark for this garbage collection operation") | ||
398 | gc_mark_parser_stream.set_defaults(func=handle_gc_mark_stream) | ||
399 | |||
316 | gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked") | 400 | gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked") |
317 | gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation") | 401 | gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation") |
318 | gc_sweep_parser.set_defaults(func=handle_gc_sweep) | 402 | gc_sweep_parser.set_defaults(func=handle_gc_sweep) |
@@ -322,6 +406,11 @@ def main(): | |||
322 | unihash_exists_parser.add_argument("unihash", help="Unihash to check") | 406 | unihash_exists_parser.add_argument("unihash", help="Unihash to check") |
323 | unihash_exists_parser.set_defaults(func=handle_unihash_exists) | 407 | unihash_exists_parser.set_defaults(func=handle_unihash_exists) |
324 | 408 | ||
409 | ping_parser = subparsers.add_parser('ping', help="Ping server") | ||
410 | ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10) | ||
411 | ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results") | ||
412 | ping_parser.set_defaults(func=handle_ping) | ||
413 | |||
325 | args = parser.parse_args() | 414 | args = parser.parse_args() |
326 | 415 | ||
327 | logger = logging.getLogger('hashserv') | 416 | logger = logging.getLogger('hashserv') |
diff --git a/bitbake/bin/bitbake-hashserv b/bitbake/bin/bitbake-hashserv index 4bfb7abfbc..01503736b9 100755 --- a/bitbake/bin/bitbake-hashserv +++ b/bitbake/bin/bitbake-hashserv | |||
@@ -125,6 +125,11 @@ The following permissions are supported by the server: | |||
125 | default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None), | 125 | default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None), |
126 | help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)", | 126 | help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)", |
127 | ) | 127 | ) |
128 | parser.add_argument( | ||
129 | "--reuseport", | ||
130 | action="store_true", | ||
131 | help="Enable SO_REUSEPORT, allowing multiple servers to bind to the same port for load balancing", | ||
132 | ) | ||
128 | 133 | ||
129 | args = parser.parse_args() | 134 | args = parser.parse_args() |
130 | 135 | ||
@@ -132,7 +137,9 @@ The following permissions are supported by the server: | |||
132 | 137 | ||
133 | level = getattr(logging, args.log.upper(), None) | 138 | level = getattr(logging, args.log.upper(), None) |
134 | if not isinstance(level, int): | 139 | if not isinstance(level, int): |
135 | raise ValueError("Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log) | 140 | raise ValueError( |
141 | "Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log | ||
142 | ) | ||
136 | 143 | ||
137 | logger.setLevel(level) | 144 | logger.setLevel(level) |
138 | console = logging.StreamHandler() | 145 | console = logging.StreamHandler() |
@@ -155,6 +162,7 @@ The following permissions are supported by the server: | |||
155 | anon_perms=anon_perms, | 162 | anon_perms=anon_perms, |
156 | admin_username=args.admin_user, | 163 | admin_username=args.admin_user, |
157 | admin_password=args.admin_password, | 164 | admin_password=args.admin_password, |
165 | reuseport=args.reuseport, | ||
158 | ) | 166 | ) |
159 | server.serve_forever() | 167 | server.serve_forever() |
160 | return 0 | 168 | return 0 |
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers index d4b1d1aaf2..341ecbcd97 100755 --- a/bitbake/bin/bitbake-layers +++ b/bitbake/bin/bitbake-layers | |||
@@ -18,13 +18,14 @@ import warnings | |||
18 | warnings.simplefilter("default") | 18 | warnings.simplefilter("default") |
19 | 19 | ||
20 | bindir = os.path.dirname(__file__) | 20 | bindir = os.path.dirname(__file__) |
21 | toolname = os.path.basename(__file__).split(".")[0] | ||
21 | topdir = os.path.dirname(bindir) | 22 | topdir = os.path.dirname(bindir) |
22 | sys.path[0:0] = [os.path.join(topdir, 'lib')] | 23 | sys.path[0:0] = [os.path.join(topdir, 'lib')] |
23 | 24 | ||
24 | import bb.tinfoil | 25 | import bb.tinfoil |
25 | import bb.msg | 26 | import bb.msg |
26 | 27 | ||
27 | logger = bb.msg.logger_create('bitbake-layers', sys.stdout) | 28 | logger = bb.msg.logger_create(toolname, sys.stdout) |
28 | 29 | ||
29 | def main(): | 30 | def main(): |
30 | parser = argparse.ArgumentParser( | 31 | parser = argparse.ArgumentParser( |
@@ -33,7 +34,7 @@ def main(): | |||
33 | add_help=False) | 34 | add_help=False) |
34 | parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') | 35 | parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') |
35 | parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') | 36 | parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') |
36 | parser.add_argument('-F', '--force', help='Force add without recipe parse verification', action='store_true') | 37 | parser.add_argument('-F', '--force', help='Forced execution: can be specified multiple times. -F will force add without recipe parse verification and -FF will additionally force the run withput layer parsing.', action='count', default=0) |
37 | parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR') | 38 | parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR') |
38 | 39 | ||
39 | global_args, unparsed_args = parser.parse_known_args() | 40 | global_args, unparsed_args = parser.parse_known_args() |
@@ -57,18 +58,23 @@ def main(): | |||
57 | level=logger.getEffectiveLevel()) | 58 | level=logger.getEffectiveLevel()) |
58 | 59 | ||
59 | plugins = [] | 60 | plugins = [] |
60 | tinfoil = bb.tinfoil.Tinfoil(tracking=True) | 61 | with bb.tinfoil.Tinfoil(tracking=True) as tinfoil: |
61 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | 62 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) |
62 | try: | 63 | |
63 | tinfoil.prepare(True) | 64 | if global_args.force > 1: |
64 | for path in ([topdir] + | 65 | bbpaths = [] |
65 | tinfoil.config_data.getVar('BBPATH').split(':')): | 66 | else: |
66 | pluginpath = os.path.join(path, 'lib', 'bblayers') | 67 | tinfoil.prepare(True) |
68 | bbpaths = tinfoil.config_data.getVar('BBPATH').split(':') | ||
69 | |||
70 | for path in ([topdir] + bbpaths): | ||
71 | pluginbasepath = {"bitbake-layers":'bblayers', 'bitbake-config-build':'bbconfigbuild'}[toolname] | ||
72 | pluginpath = os.path.join(path, 'lib', pluginbasepath) | ||
67 | bb.utils.load_plugins(logger, plugins, pluginpath) | 73 | bb.utils.load_plugins(logger, plugins, pluginpath) |
68 | 74 | ||
69 | registered = False | 75 | registered = False |
70 | for plugin in plugins: | 76 | for plugin in plugins: |
71 | if hasattr(plugin, 'tinfoil_init'): | 77 | if hasattr(plugin, 'tinfoil_init') and global_args.force <= 1: |
72 | plugin.tinfoil_init(tinfoil) | 78 | plugin.tinfoil_init(tinfoil) |
73 | if hasattr(plugin, 'register_commands'): | 79 | if hasattr(plugin, 'register_commands'): |
74 | registered = True | 80 | registered = True |
@@ -86,8 +92,6 @@ def main(): | |||
86 | tinfoil.config_data.enableTracking() | 92 | tinfoil.config_data.enableTracking() |
87 | 93 | ||
88 | return args.func(args) | 94 | return args.func(args) |
89 | finally: | ||
90 | tinfoil.shutdown() | ||
91 | 95 | ||
92 | 96 | ||
93 | if __name__ == "__main__": | 97 | if __name__ == "__main__": |
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv index ad0a069401..3992e84eab 100755 --- a/bitbake/bin/bitbake-prserv +++ b/bitbake/bin/bitbake-prserv | |||
@@ -16,11 +16,18 @@ sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib | |||
16 | import prserv | 16 | import prserv |
17 | import prserv.serv | 17 | import prserv.serv |
18 | 18 | ||
19 | VERSION = "1.1.0" | 19 | VERSION = "2.0.0" |
20 | 20 | ||
21 | PRHOST_DEFAULT="0.0.0.0" | 21 | PRHOST_DEFAULT="0.0.0.0" |
22 | PRPORT_DEFAULT=8585 | 22 | PRPORT_DEFAULT=8585 |
23 | 23 | ||
24 | def init_logger(logfile, loglevel): | ||
25 | numeric_level = getattr(logging, loglevel.upper(), None) | ||
26 | if not isinstance(numeric_level, int): | ||
27 | raise ValueError("Invalid log level: %s" % loglevel) | ||
28 | FORMAT = "%(asctime)-15s %(message)s" | ||
29 | logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT) | ||
30 | |||
24 | def main(): | 31 | def main(): |
25 | parser = argparse.ArgumentParser( | 32 | parser = argparse.ArgumentParser( |
26 | description="BitBake PR Server. Version=%s" % VERSION, | 33 | description="BitBake PR Server. Version=%s" % VERSION, |
@@ -70,12 +77,25 @@ def main(): | |||
70 | action="store_true", | 77 | action="store_true", |
71 | help="open database in read-only mode", | 78 | help="open database in read-only mode", |
72 | ) | 79 | ) |
80 | parser.add_argument( | ||
81 | "-u", | ||
82 | "--upstream", | ||
83 | default=os.environ.get("PRSERV_UPSTREAM", None), | ||
84 | help="Upstream PR service (host:port)", | ||
85 | ) | ||
73 | 86 | ||
74 | args = parser.parse_args() | 87 | args = parser.parse_args() |
75 | prserv.init_logger(os.path.abspath(args.log), args.loglevel) | 88 | init_logger(os.path.abspath(args.log), args.loglevel) |
76 | 89 | ||
77 | if args.start: | 90 | if args.start: |
78 | ret=prserv.serv.start_daemon(args.file, args.host, args.port, os.path.abspath(args.log), args.read_only) | 91 | ret=prserv.serv.start_daemon( |
92 | args.file, | ||
93 | args.host, | ||
94 | args.port, | ||
95 | os.path.abspath(args.log), | ||
96 | args.read_only, | ||
97 | args.upstream | ||
98 | ) | ||
79 | elif args.stop: | 99 | elif args.stop: |
80 | ret=prserv.serv.stop_daemon(args.host, args.port) | 100 | ret=prserv.serv.stop_daemon(args.host, args.port) |
81 | else: | 101 | else: |
diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest index f25f23b1ae..1b7a783fdc 100755 --- a/bitbake/bin/bitbake-selftest +++ b/bitbake/bin/bitbake-selftest | |||
@@ -15,6 +15,7 @@ import unittest | |||
15 | try: | 15 | try: |
16 | import bb | 16 | import bb |
17 | import hashserv | 17 | import hashserv |
18 | import prserv | ||
18 | import layerindexlib | 19 | import layerindexlib |
19 | except RuntimeError as exc: | 20 | except RuntimeError as exc: |
20 | sys.exit(str(exc)) | 21 | sys.exit(str(exc)) |
@@ -27,12 +28,12 @@ tests = ["bb.tests.codeparser", | |||
27 | "bb.tests.event", | 28 | "bb.tests.event", |
28 | "bb.tests.fetch", | 29 | "bb.tests.fetch", |
29 | "bb.tests.parse", | 30 | "bb.tests.parse", |
30 | "bb.tests.persist_data", | ||
31 | "bb.tests.runqueue", | 31 | "bb.tests.runqueue", |
32 | "bb.tests.siggen", | 32 | "bb.tests.siggen", |
33 | "bb.tests.utils", | 33 | "bb.tests.utils", |
34 | "bb.tests.compression", | 34 | "bb.tests.compression", |
35 | "hashserv.tests", | 35 | "hashserv.tests", |
36 | "prserv.tests", | ||
36 | "layerindexlib.tests.layerindexobj", | 37 | "layerindexlib.tests.layerindexobj", |
37 | "layerindexlib.tests.restapi", | 38 | "layerindexlib.tests.restapi", |
38 | "layerindexlib.tests.cooker"] | 39 | "layerindexlib.tests.cooker"] |
diff --git a/bitbake/bin/bitbake-server b/bitbake/bin/bitbake-server index 454a3919aa..a559109e3f 100755 --- a/bitbake/bin/bitbake-server +++ b/bitbake/bin/bitbake-server | |||
@@ -9,6 +9,7 @@ import os | |||
9 | import sys | 9 | import sys |
10 | import warnings | 10 | import warnings |
11 | warnings.simplefilter("default") | 11 | warnings.simplefilter("default") |
12 | warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*") | ||
12 | import logging | 13 | import logging |
13 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) | 14 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) |
14 | 15 | ||
@@ -38,9 +39,9 @@ if xmlrpcinterface[0] == "None": | |||
38 | with open('/dev/null', 'r') as si: | 39 | with open('/dev/null', 'r') as si: |
39 | os.dup2(si.fileno(), sys.stdin.fileno()) | 40 | os.dup2(si.fileno(), sys.stdin.fileno()) |
40 | 41 | ||
41 | so = open(logfile, 'a+') | 42 | with open(logfile, 'a+') as so: |
42 | os.dup2(so.fileno(), sys.stdout.fileno()) | 43 | os.dup2(so.fileno(), sys.stdout.fileno()) |
43 | os.dup2(so.fileno(), sys.stderr.fileno()) | 44 | os.dup2(so.fileno(), sys.stderr.fileno()) |
44 | 45 | ||
45 | # Have stdout and stderr be the same so log output matches chronologically | 46 | # Have stdout and stderr be the same so log output matches chronologically |
46 | # and there aren't two seperate buffers | 47 | # and there aren't two seperate buffers |
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker index e8073f2ac3..d2b146a6a9 100755 --- a/bitbake/bin/bitbake-worker +++ b/bitbake/bin/bitbake-worker | |||
@@ -9,6 +9,7 @@ import os | |||
9 | import sys | 9 | import sys |
10 | import warnings | 10 | import warnings |
11 | warnings.simplefilter("default") | 11 | warnings.simplefilter("default") |
12 | warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*") | ||
12 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) | 13 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) |
13 | from bb import fetch2 | 14 | from bb import fetch2 |
14 | import logging | 15 | import logging |
@@ -21,9 +22,14 @@ import traceback | |||
21 | import queue | 22 | import queue |
22 | import shlex | 23 | import shlex |
23 | import subprocess | 24 | import subprocess |
25 | import fcntl | ||
24 | from multiprocessing import Lock | 26 | from multiprocessing import Lock |
25 | from threading import Thread | 27 | from threading import Thread |
26 | 28 | ||
29 | # Remove when we have a minimum of python 3.10 | ||
30 | if not hasattr(fcntl, 'F_SETPIPE_SZ'): | ||
31 | fcntl.F_SETPIPE_SZ = 1031 | ||
32 | |||
27 | bb.utils.check_system_locale() | 33 | bb.utils.check_system_locale() |
28 | 34 | ||
29 | # Users shouldn't be running this code directly | 35 | # Users shouldn't be running this code directly |
@@ -44,7 +50,6 @@ if sys.argv[1].startswith("decafbadbad"): | |||
44 | # updates to log files for use with tail | 50 | # updates to log files for use with tail |
45 | try: | 51 | try: |
46 | if sys.stdout.name == '<stdout>': | 52 | if sys.stdout.name == '<stdout>': |
47 | import fcntl | ||
48 | fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL) | 53 | fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL) |
49 | fl |= os.O_SYNC | 54 | fl |= os.O_SYNC |
50 | fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl) | 55 | fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl) |
@@ -56,6 +61,12 @@ logger = logging.getLogger("BitBake") | |||
56 | 61 | ||
57 | worker_pipe = sys.stdout.fileno() | 62 | worker_pipe = sys.stdout.fileno() |
58 | bb.utils.nonblockingfd(worker_pipe) | 63 | bb.utils.nonblockingfd(worker_pipe) |
64 | # Try to make the pipe buffers larger as it is much more efficient. If we can't | ||
65 | # e.g. out of buffer space (/proc/sys/fs/pipe-user-pages-soft) then just pass over. | ||
66 | try: | ||
67 | fcntl.fcntl(worker_pipe, fcntl.F_SETPIPE_SZ, 512 * 1024) | ||
68 | except: | ||
69 | pass | ||
59 | # Need to guard against multiprocessing being used in child processes | 70 | # Need to guard against multiprocessing being used in child processes |
60 | # and multiple processes trying to write to the parent at the same time | 71 | # and multiple processes trying to write to the parent at the same time |
61 | worker_pipe_lock = None | 72 | worker_pipe_lock = None |
@@ -105,7 +116,7 @@ def worker_flush(worker_queue): | |||
105 | if not worker_queue.empty(): | 116 | if not worker_queue.empty(): |
106 | worker_queue_int.extend(worker_queue.get()) | 117 | worker_queue_int.extend(worker_queue.get()) |
107 | written = os.write(worker_pipe, worker_queue_int) | 118 | written = os.write(worker_pipe, worker_queue_int) |
108 | worker_queue_int = worker_queue_int[written:] | 119 | del worker_queue_int[0:written] |
109 | except (IOError, OSError) as e: | 120 | except (IOError, OSError) as e: |
110 | if e.errno != errno.EAGAIN and e.errno != errno.EPIPE: | 121 | if e.errno != errno.EAGAIN and e.errno != errno.EPIPE: |
111 | raise | 122 | raise |
@@ -171,11 +182,8 @@ def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask): | |||
171 | elif workerdata["umask"]: | 182 | elif workerdata["umask"]: |
172 | umask = workerdata["umask"] | 183 | umask = workerdata["umask"] |
173 | if umask: | 184 | if umask: |
174 | # umask might come in as a number or text string.. | 185 | # Convert to a python numeric value as it could be a string |
175 | try: | 186 | umask = bb.utils.to_filemode(umask) |
176 | umask = int(umask, 8) | ||
177 | except TypeError: | ||
178 | pass | ||
179 | 187 | ||
180 | dry_run = cfg.dry_run or runtask['dry_run'] | 188 | dry_run = cfg.dry_run or runtask['dry_run'] |
181 | 189 | ||
@@ -357,7 +365,7 @@ class runQueueWorkerPipe(): | |||
357 | def read(self): | 365 | def read(self): |
358 | start = len(self.queue) | 366 | start = len(self.queue) |
359 | try: | 367 | try: |
360 | self.queue.extend(self.input.read(102400) or b"") | 368 | self.queue.extend(self.input.read(512*1024) or b"") |
361 | except (OSError, IOError) as e: | 369 | except (OSError, IOError) as e: |
362 | if e.errno != errno.EAGAIN: | 370 | if e.errno != errno.EAGAIN: |
363 | raise | 371 | raise |
diff --git a/bitbake/bin/git-make-shallow b/bitbake/bin/git-make-shallow index 9de557c10e..e6c180b4d6 100755 --- a/bitbake/bin/git-make-shallow +++ b/bitbake/bin/git-make-shallow | |||
@@ -115,8 +115,8 @@ def filter_refs(refs): | |||
115 | all_refs = get_all_refs() | 115 | all_refs = get_all_refs() |
116 | to_remove = set(all_refs) - set(refs) | 116 | to_remove = set(all_refs) - set(refs) |
117 | if to_remove: | 117 | if to_remove: |
118 | check_output(['xargs', '-0', '-n', '1'] + git_cmd + ['update-ref', '-d', '--no-deref'], | 118 | check_output(git_cmd + ['update-ref', '--no-deref', '--stdin', '-z'], |
119 | input=''.join(l + '\0' for l in to_remove)) | 119 | input=''.join('delete ' + l + '\0\0' for l in to_remove)) |
120 | 120 | ||
121 | 121 | ||
122 | def follow_history_intersections(revisions, refs): | 122 | def follow_history_intersections(revisions, refs): |