summaryrefslogtreecommitdiffstats
path: root/bitbake/bin
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/bin')
-rwxr-xr-xbitbake/bin/bitbake7
l---------bitbake/bin/bitbake-config-build1
-rwxr-xr-xbitbake/bin/bitbake-diffsigs68
-rwxr-xr-xbitbake/bin/bitbake-getvar71
-rwxr-xr-xbitbake/bin/bitbake-hashclient351
-rwxr-xr-xbitbake/bin/bitbake-hashserv155
-rwxr-xr-xbitbake/bin/bitbake-layers32
-rwxr-xr-xbitbake/bin/bitbake-prserv114
-rwxr-xr-xbitbake/bin/bitbake-selftest6
-rwxr-xr-xbitbake/bin/bitbake-server24
-rwxr-xr-xbitbake/bin/bitbake-worker171
-rwxr-xr-xbitbake/bin/git-make-shallow40
-rwxr-xr-xbitbake/bin/toaster18
-rwxr-xr-xbitbake/bin/toaster-eventreplay82
14 files changed, 864 insertions, 276 deletions
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index bc762bfc15..40b5d895c1 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -12,6 +12,8 @@
12 12
13import os 13import os
14import sys 14import sys
15import warnings
16warnings.simplefilter("default")
15 17
16sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 18sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
17 'lib')) 19 'lib'))
@@ -23,10 +25,9 @@ except RuntimeError as exc:
23from bb import cookerdata 25from bb import cookerdata
24from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException 26from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
25 27
26if sys.getfilesystemencoding() != "utf-8": 28bb.utils.check_system_locale()
27 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
28 29
29__version__ = "1.49.2" 30__version__ = "2.15.1"
30 31
31if __name__ == "__main__": 32if __name__ == "__main__":
32 if __version__ != bb.__version__: 33 if __version__ != bb.__version__:
diff --git a/bitbake/bin/bitbake-config-build b/bitbake/bin/bitbake-config-build
new file mode 120000
index 0000000000..11e6df80c4
--- /dev/null
+++ b/bitbake/bin/bitbake-config-build
@@ -0,0 +1 @@
bitbake-layers \ No newline at end of file
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs
index 19420a2df6..9d6cb8c944 100755
--- a/bitbake/bin/bitbake-diffsigs
+++ b/bitbake/bin/bitbake-diffsigs
@@ -11,6 +11,8 @@
11import os 11import os
12import sys 12import sys
13import warnings 13import warnings
14
15warnings.simplefilter("default")
14import argparse 16import argparse
15import logging 17import logging
16import pickle 18import pickle
@@ -26,6 +28,7 @@ logger = bb.msg.logger_create(myname)
26 28
27is_dump = myname == 'bitbake-dumpsig' 29is_dump = myname == 'bitbake-dumpsig'
28 30
31
29def find_siginfo(tinfoil, pn, taskname, sigs=None): 32def find_siginfo(tinfoil, pn, taskname, sigs=None):
30 result = None 33 result = None
31 tinfoil.set_event_mask(['bb.event.FindSigInfoResult', 34 tinfoil.set_event_mask(['bb.event.FindSigInfoResult',
@@ -51,6 +54,7 @@ def find_siginfo(tinfoil, pn, taskname, sigs=None):
51 sys.exit(2) 54 sys.exit(2)
52 return result 55 return result
53 56
57
54def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None): 58def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
55 """ Find the most recent signature files for the specified PN/task """ 59 """ Find the most recent signature files for the specified PN/task """
56 60
@@ -59,22 +63,26 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
59 63
60 if sig1 and sig2: 64 if sig1 and sig2:
61 sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2]) 65 sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2])
62 if len(sigfiles) == 0: 66 if not sigfiles:
63 logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2)) 67 logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
64 sys.exit(1) 68 sys.exit(1)
65 elif not sig1 in sigfiles: 69 elif sig1 not in sigfiles:
66 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1)) 70 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1))
67 sys.exit(1) 71 sys.exit(1)
68 elif not sig2 in sigfiles: 72 elif sig2 not in sigfiles:
69 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2)) 73 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
70 sys.exit(1) 74 sys.exit(1)
71 latestfiles = [sigfiles[sig1], sigfiles[sig2]] 75
76 latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']]
72 else: 77 else:
73 filedates = find_siginfo(bbhandler, pn, taskname) 78 sigfiles = find_siginfo(bbhandler, pn, taskname)
74 latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:] 79 latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:]
75 if not latestfiles: 80 if not latestsigs:
76 logger.error('No sigdata files found matching %s %s' % (pn, taskname)) 81 logger.error('No sigdata files found matching %s %s' % (pn, taskname))
77 sys.exit(1) 82 sys.exit(1)
83 latestfiles = [sigfiles[latestsigs[0]]['path']]
84 if len(latestsigs) > 1:
85 latestfiles.append(sigfiles[latestsigs[1]]['path'])
78 86
79 return latestfiles 87 return latestfiles
80 88
@@ -85,14 +93,14 @@ def recursecb(key, hash1, hash2):
85 hashfiles = find_siginfo(tinfoil, key, None, hashes) 93 hashfiles = find_siginfo(tinfoil, key, None, hashes)
86 94
87 recout = [] 95 recout = []
88 if len(hashfiles) == 0: 96 if not hashfiles:
89 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) 97 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
90 elif not hash1 in hashfiles: 98 elif hash1 not in hashfiles:
91 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1)) 99 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
92 elif not hash2 in hashfiles: 100 elif hash2 not in hashfiles:
93 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2)) 101 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
94 else: 102 else:
95 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color) 103 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, color=color)
96 for change in out2: 104 for change in out2:
97 for line in change.splitlines(): 105 for line in change.splitlines():
98 recout.append(' ' + line) 106 recout.append(' ' + line)
@@ -109,36 +117,36 @@ parser.add_argument('-D', '--debug',
109 117
110if is_dump: 118if is_dump:
111 parser.add_argument("-t", "--task", 119 parser.add_argument("-t", "--task",
112 help="find the signature data file for the last run of the specified task", 120 help="find the signature data file for the last run of the specified task",
113 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname')) 121 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
114 122
115 parser.add_argument("sigdatafile1", 123 parser.add_argument("sigdatafile1",
116 help="Signature file to dump. Not used when using -t/--task.", 124 help="Signature file to dump. Not used when using -t/--task.",
117 action="store", nargs='?', metavar="sigdatafile") 125 action="store", nargs='?', metavar="sigdatafile")
118else: 126else:
119 parser.add_argument('-c', '--color', 127 parser.add_argument('-c', '--color',
120 help='Colorize the output (where %(metavar)s is %(choices)s)', 128 help='Colorize the output (where %(metavar)s is %(choices)s)',
121 choices=['auto', 'always', 'never'], default='auto', metavar='color') 129 choices=['auto', 'always', 'never'], default='auto', metavar='color')
122 130
123 parser.add_argument('-d', '--dump', 131 parser.add_argument('-d', '--dump',
124 help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)', 132 help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
125 action='store_true') 133 action='store_true')
126 134
127 parser.add_argument("-t", "--task", 135 parser.add_argument("-t", "--task",
128 help="find the signature data files for the last two runs of the specified task and compare them", 136 help="find the signature data files for the last two runs of the specified task and compare them",
129 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname')) 137 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
130 138
131 parser.add_argument("-s", "--signature", 139 parser.add_argument("-s", "--signature",
132 help="With -t/--task, specify the signatures to look for instead of taking the last two", 140 help="With -t/--task, specify the signatures to look for instead of taking the last two",
133 action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig')) 141 action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
134 142
135 parser.add_argument("sigdatafile1", 143 parser.add_argument("sigdatafile1",
136 help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.", 144 help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
137 action="store", nargs='?') 145 action="store", nargs='?')
138 146
139 parser.add_argument("sigdatafile2", 147 parser.add_argument("sigdatafile2",
140 help="Second signature file to compare", 148 help="Second signature file to compare",
141 action="store", nargs='?') 149 action="store", nargs='?')
142 150
143options = parser.parse_args() 151options = parser.parse_args()
144if is_dump: 152if is_dump:
@@ -156,7 +164,8 @@ if options.taskargs:
156 with bb.tinfoil.Tinfoil() as tinfoil: 164 with bb.tinfoil.Tinfoil() as tinfoil:
157 tinfoil.prepare(config_only=True) 165 tinfoil.prepare(config_only=True)
158 if not options.dump and options.sigargs: 166 if not options.dump and options.sigargs:
159 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1]) 167 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0],
168 options.sigargs[1])
160 else: 169 else:
161 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1]) 170 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
162 171
@@ -165,7 +174,8 @@ if options.taskargs:
165 output = bb.siggen.dump_sigfile(files[-1]) 174 output = bb.siggen.dump_sigfile(files[-1])
166 else: 175 else:
167 if len(files) < 2: 176 if len(files) < 2:
168 logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (options.taskargs[0], options.taskargs[1])) 177 logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (
178 options.taskargs[0], options.taskargs[1]))
169 sys.exit(1) 179 sys.exit(1)
170 180
171 # Recurse into signature comparison 181 # Recurse into signature comparison
diff --git a/bitbake/bin/bitbake-getvar b/bitbake/bin/bitbake-getvar
new file mode 100755
index 0000000000..378fb13572
--- /dev/null
+++ b/bitbake/bin/bitbake-getvar
@@ -0,0 +1,71 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2021 Richard Purdie
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import io
10import os
11import sys
12import warnings
13import logging
14warnings.simplefilter("default")
15
16bindir = os.path.dirname(__file__)
17topdir = os.path.dirname(bindir)
18sys.path[0:0] = [os.path.join(topdir, 'lib')]
19
20import bb.providers
21import bb.tinfoil
22
23if __name__ == "__main__":
24 parser = argparse.ArgumentParser(description="Bitbake Query Variable")
25 parser.add_argument("variable", help="variable name to query")
26 parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
27 parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
28 parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
29 parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
30 parser.add_argument('-q', '--quiet', help='Silence bitbake server logging', action="store_true")
31 parser.add_argument('--ignore-undefined', help='Suppress any errors related to undefined variables', action="store_true")
32 args = parser.parse_args()
33
34 if not args.value:
35 if args.unexpand:
36 sys.exit("--unexpand only makes sense with --value")
37
38 if args.flag:
39 sys.exit("--flag only makes sense with --value")
40
41 quiet = args.quiet or args.value
42 if quiet:
43 logger = logging.getLogger("BitBake")
44 logger.setLevel(logging.WARNING)
45
46 with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil:
47 if args.recipe:
48 tinfoil.prepare(quiet=3 if quiet else 2)
49 try:
50 d = tinfoil.parse_recipe(args.recipe)
51 except bb.providers.NoProvider as e:
52 sys.exit(str(e))
53 else:
54 tinfoil.prepare(quiet=2, config_only=True)
55 # Expand keys and run anonymous functions to get identical result to
56 # "bitbake -e"
57 d = tinfoil.finalizeData()
58
59 value = None
60 if args.flag:
61 value = d.getVarFlag(args.variable, args.flag, expand=not args.unexpand)
62 if value is None and not args.ignore_undefined:
63 sys.exit(f"The flag '{args.flag}' is not defined for variable '{args.variable}'")
64 else:
65 value = d.getVar(args.variable, expand=not args.unexpand)
66 if value is None and not args.ignore_undefined:
67 sys.exit(f"The variable '{args.variable}' is not defined")
68 if args.value:
69 print(str(value if value is not None else ""))
70 else:
71 bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient
index a89290217b..b8755c5797 100755
--- a/bitbake/bin/bitbake-hashclient
+++ b/bitbake/bin/bitbake-hashclient
@@ -13,6 +13,12 @@ import pprint
13import sys 13import sys
14import threading 14import threading
15import time 15import time
16import warnings
17import netrc
18import json
19import statistics
20import textwrap
21warnings.simplefilter("default")
16 22
17try: 23try:
18 import tqdm 24 import tqdm
@@ -34,18 +40,42 @@ except ImportError:
34sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) 40sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
35 41
36import hashserv 42import hashserv
43import bb.asyncrpc
37 44
38DEFAULT_ADDRESS = 'unix://./hashserve.sock' 45DEFAULT_ADDRESS = 'unix://./hashserve.sock'
39METHOD = 'stress.test.method' 46METHOD = 'stress.test.method'
40 47
48def print_user(u):
49 print(f"Username: {u['username']}")
50 if "permissions" in u:
51 print("Permissions: " + " ".join(u["permissions"]))
52 if "token" in u:
53 print(f"Token: {u['token']}")
54
41 55
42def main(): 56def main():
57 def handle_get(args, client):
58 result = client.get_taskhash(args.method, args.taskhash, all_properties=True)
59 if not result:
60 return 0
61
62 print(json.dumps(result, sort_keys=True, indent=4))
63 return 0
64
65 def handle_get_outhash(args, client):
66 result = client.get_outhash(args.method, args.outhash, args.taskhash)
67 if not result:
68 return 0
69
70 print(json.dumps(result, sort_keys=True, indent=4))
71 return 0
72
43 def handle_stats(args, client): 73 def handle_stats(args, client):
44 if args.reset: 74 if args.reset:
45 s = client.reset_stats() 75 s = client.reset_stats()
46 else: 76 else:
47 s = client.get_stats() 77 s = client.get_stats()
48 pprint.pprint(s) 78 print(json.dumps(s, sort_keys=True, indent=4))
49 return 0 79 return 0
50 80
51 def handle_stress(args, client): 81 def handle_stress(args, client):
@@ -53,47 +83,59 @@ def main():
53 nonlocal found_hashes 83 nonlocal found_hashes
54 nonlocal missed_hashes 84 nonlocal missed_hashes
55 nonlocal max_time 85 nonlocal max_time
86 nonlocal times
56 87
57 client = hashserv.create_client(args.address) 88 with hashserv.create_client(args.address) as client:
58 89 for i in range(args.requests):
59 for i in range(args.requests): 90 taskhash = hashlib.sha256()
60 taskhash = hashlib.sha256() 91 taskhash.update(args.taskhash_seed.encode('utf-8'))
61 taskhash.update(args.taskhash_seed.encode('utf-8')) 92 taskhash.update(str(i).encode('utf-8'))
62 taskhash.update(str(i).encode('utf-8'))
63 93
64 start_time = time.perf_counter() 94 start_time = time.perf_counter()
65 l = client.get_unihash(METHOD, taskhash.hexdigest()) 95 l = client.get_unihash(METHOD, taskhash.hexdigest())
66 elapsed = time.perf_counter() - start_time 96 elapsed = time.perf_counter() - start_time
67 97
68 with lock: 98 with lock:
69 if l: 99 if l:
70 found_hashes += 1 100 found_hashes += 1
71 else: 101 else:
72 missed_hashes += 1 102 missed_hashes += 1
73 103
74 max_time = max(elapsed, max_time) 104 times.append(elapsed)
75 pbar.update() 105 pbar.update()
76 106
77 max_time = 0 107 max_time = 0
78 found_hashes = 0 108 found_hashes = 0
79 missed_hashes = 0 109 missed_hashes = 0
80 lock = threading.Lock() 110 lock = threading.Lock()
81 total_requests = args.clients * args.requests 111 times = []
82 start_time = time.perf_counter() 112 start_time = time.perf_counter()
83 with ProgressBar(total=total_requests) as pbar: 113 with ProgressBar(total=args.clients * args.requests) as pbar:
84 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] 114 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)]
85 for t in threads: 115 for t in threads:
86 t.start() 116 t.start()
87 117
88 for t in threads: 118 for t in threads:
89 t.join() 119 t.join()
120 total_elapsed = time.perf_counter() - start_time
90 121
91 elapsed = time.perf_counter() - start_time
92 with lock: 122 with lock:
93 print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed)) 123 mean = statistics.mean(times)
94 print("Average request time %.8fs" % (elapsed / total_requests)) 124 median = statistics.median(times)
95 print("Max request time was %.8fs" % max_time) 125 stddev = statistics.pstdev(times)
96 print("Found %d hashes, missed %d" % (found_hashes, missed_hashes)) 126
127 print(f"Number of clients: {args.clients}")
128 print(f"Requests per client: {args.requests}")
129 print(f"Number of requests: {len(times)}")
130 print(f"Total elapsed time: {total_elapsed:.3f}s")
131 print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s")
132 print(f"Average request time: {mean:.3f}s")
133 print(f"Median request time: {median:.3f}s")
134 print(f"Request time std dev: {stddev:.3f}s")
135 print(f"Maximum request time: {max(times):.3f}s")
136 print(f"Minimum request time: {min(times):.3f}s")
137 print(f"Hashes found: {found_hashes}")
138 print(f"Hashes missed: {missed_hashes}")
97 139
98 if args.report: 140 if args.report:
99 with ProgressBar(total=args.requests) as pbar: 141 with ProgressBar(total=args.requests) as pbar:
@@ -111,12 +153,173 @@ def main():
111 with lock: 153 with lock:
112 pbar.update() 154 pbar.update()
113 155
114 parser = argparse.ArgumentParser(description='Hash Equivalence Client') 156 def handle_remove(args, client):
157 where = {k: v for k, v in args.where}
158 if where:
159 result = client.remove(where)
160 print("Removed %d row(s)" % (result["count"]))
161 else:
162 print("No query specified")
163
164 def handle_clean_unused(args, client):
165 result = client.clean_unused(args.max_age)
166 print("Removed %d rows" % (result["count"]))
167 return 0
168
169 def handle_refresh_token(args, client):
170 r = client.refresh_token(args.username)
171 print_user(r)
172
173 def handle_set_user_permissions(args, client):
174 r = client.set_user_perms(args.username, args.permissions)
175 print_user(r)
176
177 def handle_get_user(args, client):
178 r = client.get_user(args.username)
179 print_user(r)
180
181 def handle_get_all_users(args, client):
182 users = client.get_all_users()
183 print("{username:20}| {permissions}".format(username="Username", permissions="Permissions"))
184 print(("-" * 20) + "+" + ("-" * 20))
185 for u in users:
186 print("{username:20}| {permissions}".format(username=u["username"], permissions=" ".join(u["permissions"])))
187
188 def handle_new_user(args, client):
189 r = client.new_user(args.username, args.permissions)
190 print_user(r)
191
192 def handle_delete_user(args, client):
193 r = client.delete_user(args.username)
194 print_user(r)
195
196 def handle_get_db_usage(args, client):
197 usage = client.get_db_usage()
198 print(usage)
199 tables = sorted(usage.keys())
200 print("{name:20}| {rows:20}".format(name="Table name", rows="Rows"))
201 print(("-" * 20) + "+" + ("-" * 20))
202 for t in tables:
203 print("{name:20}| {rows:<20}".format(name=t, rows=usage[t]["rows"]))
204 print()
205
206 total_rows = sum(t["rows"] for t in usage.values())
207 print(f"Total rows: {total_rows}")
208
209 def handle_get_db_query_columns(args, client):
210 columns = client.get_db_query_columns()
211 print("\n".join(sorted(columns)))
212
213 def handle_gc_status(args, client):
214 result = client.gc_status()
215 if not result["mark"]:
216 print("No Garbage collection in progress")
217 return 0
218
219 print("Current Mark: %s" % result["mark"])
220 print("Total hashes to keep: %d" % result["keep"])
221 print("Total hashes to remove: %s" % result["remove"])
222 return 0
223
224 def handle_gc_mark(args, client):
225 where = {k: v for k, v in args.where}
226 result = client.gc_mark(args.mark, where)
227 print("New hashes marked: %d" % result["count"])
228 return 0
229
230 def handle_gc_mark_stream(args, client):
231 stdin = (l.strip() for l in sys.stdin)
232 marked_hashes = 0
233
234 try:
235 result = client.gc_mark_stream(args.mark, stdin)
236 marked_hashes = result["count"]
237 except ConnectionError:
238 logger.warning(
239 "Server doesn't seem to support `gc-mark-stream`. Sending "
240 "hashes sequentially using `gc-mark` API."
241 )
242 for line in stdin:
243 pairs = line.split()
244 condition = dict(zip(pairs[::2], pairs[1::2]))
245 result = client.gc_mark(args.mark, condition)
246 marked_hashes += result["count"]
247
248 print("New hashes marked: %d" % marked_hashes)
249 return 0
250
251 def handle_gc_sweep(args, client):
252 result = client.gc_sweep(args.mark)
253 print("Removed %d rows" % result["count"])
254 return 0
255
256 def handle_unihash_exists(args, client):
257 result = client.unihash_exists(args.unihash)
258 if args.quiet:
259 return 0 if result else 1
260
261 print("true" if result else "false")
262 return 0
263
264 def handle_ping(args, client):
265 times = []
266 for i in range(1, args.count + 1):
267 if not args.quiet:
268 print(f"Ping {i} of {args.count}... ", end="")
269 start_time = time.perf_counter()
270 client.ping()
271 elapsed = time.perf_counter() - start_time
272 times.append(elapsed)
273 if not args.quiet:
274 print(f"{elapsed:.3f}s")
275
276 mean = statistics.mean(times)
277 median = statistics.median(times)
278 std_dev = statistics.pstdev(times)
279
280 if not args.quiet:
281 print("------------------------")
282 print(f"Number of pings: {len(times)}")
283 print(f"Average round trip time: {mean:.3f}s")
284 print(f"Median round trip time: {median:.3f}s")
285 print(f"Round trip time std dev: {std_dev:.3f}s")
286 print(f"Min time is: {min(times):.3f}s")
287 print(f"Max time is: {max(times):.3f}s")
288 return 0
289
290 parser = argparse.ArgumentParser(
291 formatter_class=argparse.RawDescriptionHelpFormatter,
292 description='Hash Equivalence Client',
293 epilog=textwrap.dedent(
294 """
295 Possible ADDRESS options are:
296 unix://PATH Connect to UNIX domain socket at PATH
297 ws://HOST[:PORT] Connect to websocket at HOST:PORT (default port is 80)
298 wss://HOST[:PORT] Connect to secure websocket at HOST:PORT (default port is 443)
299 HOST:PORT Connect to TCP server at HOST:PORT
300 """
301 ),
302 )
115 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') 303 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
116 parser.add_argument('--log', default='WARNING', help='Set logging level') 304 parser.add_argument('--log', default='WARNING', help='Set logging level')
305 parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
306 parser.add_argument('--password', '-p', metavar="TOKEN", help="Authenticate using token TOKEN")
307 parser.add_argument('--become', '-b', metavar="USERNAME", help="Impersonate user USERNAME (if allowed) when performing actions")
308 parser.add_argument('--no-netrc', '-n', action="store_false", dest="netrc", help="Do not use .netrc")
117 309
118 subparsers = parser.add_subparsers() 310 subparsers = parser.add_subparsers()
119 311
312 get_parser = subparsers.add_parser('get', help="Get the unihash for a taskhash")
313 get_parser.add_argument("method", help="Method to query")
314 get_parser.add_argument("taskhash", help="Task hash to query")
315 get_parser.set_defaults(func=handle_get)
316
317 get_outhash_parser = subparsers.add_parser('get-outhash', help="Get output hash information")
318 get_outhash_parser.add_argument("method", help="Method to query")
319 get_outhash_parser.add_argument("outhash", help="Output hash to query")
320 get_outhash_parser.add_argument("taskhash", help="Task hash to query")
321 get_outhash_parser.set_defaults(func=handle_get_outhash)
322
120 stats_parser = subparsers.add_parser('stats', help='Show server stats') 323 stats_parser = subparsers.add_parser('stats', help='Show server stats')
121 stats_parser.add_argument('--reset', action='store_true', 324 stats_parser.add_argument('--reset', action='store_true',
122 help='Reset server stats') 325 help='Reset server stats')
@@ -135,6 +338,79 @@ def main():
135 help='Include string in outhash') 338 help='Include string in outhash')
136 stress_parser.set_defaults(func=handle_stress) 339 stress_parser.set_defaults(func=handle_stress)
137 340
341 remove_parser = subparsers.add_parser('remove', help="Remove hash entries")
342 remove_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
343 help="Remove entries from table where KEY == VALUE")
344 remove_parser.set_defaults(func=handle_remove)
345
346 clean_unused_parser = subparsers.add_parser('clean-unused', help="Remove unused database entries")
347 clean_unused_parser.add_argument("max_age", metavar="SECONDS", type=int, help="Remove unused entries older than SECONDS old")
348 clean_unused_parser.set_defaults(func=handle_clean_unused)
349
350 refresh_token_parser = subparsers.add_parser('refresh-token', help="Refresh auth token")
351 refresh_token_parser.add_argument("--username", "-u", help="Refresh the token for another user (if authorized)")
352 refresh_token_parser.set_defaults(func=handle_refresh_token)
353
354 set_user_perms_parser = subparsers.add_parser('set-user-perms', help="Set new permissions for user")
355 set_user_perms_parser.add_argument("--username", "-u", help="Username", required=True)
356 set_user_perms_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
357 set_user_perms_parser.set_defaults(func=handle_set_user_permissions)
358
359 get_user_parser = subparsers.add_parser('get-user', help="Get user")
360 get_user_parser.add_argument("--username", "-u", help="Username")
361 get_user_parser.set_defaults(func=handle_get_user)
362
363 get_all_users_parser = subparsers.add_parser('get-all-users', help="List all users")
364 get_all_users_parser.set_defaults(func=handle_get_all_users)
365
366 new_user_parser = subparsers.add_parser('new-user', help="Create new user")
367 new_user_parser.add_argument("--username", "-u", help="Username", required=True)
368 new_user_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
369 new_user_parser.set_defaults(func=handle_new_user)
370
371 delete_user_parser = subparsers.add_parser('delete-user', help="Delete user")
372 delete_user_parser.add_argument("--username", "-u", help="Username", required=True)
373 delete_user_parser.set_defaults(func=handle_delete_user)
374
375 db_usage_parser = subparsers.add_parser('get-db-usage', help="Database Usage")
376 db_usage_parser.set_defaults(func=handle_get_db_usage)
377
378 db_query_columns_parser = subparsers.add_parser('get-db-query-columns', help="Show columns that can be used in database queries")
379 db_query_columns_parser.set_defaults(func=handle_get_db_query_columns)
380
381 gc_status_parser = subparsers.add_parser("gc-status", help="Show garbage collection status")
382 gc_status_parser.set_defaults(func=handle_gc_status)
383
384 gc_mark_parser = subparsers.add_parser('gc-mark', help="Mark hashes to be kept for garbage collection")
385 gc_mark_parser.add_argument("mark", help="Mark for this garbage collection operation")
386 gc_mark_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
387 help="Keep entries in table where KEY == VALUE")
388 gc_mark_parser.set_defaults(func=handle_gc_mark)
389
390 gc_mark_parser_stream = subparsers.add_parser(
391 'gc-mark-stream',
392 help=(
393 "Mark multiple hashes to be retained for garbage collection. Input should be provided via stdin, "
394 "with each line formatted as key-value pairs separated by spaces, for example 'column1 foo column2 bar'."
395 )
396 )
397 gc_mark_parser_stream.add_argument("mark", help="Mark for this garbage collection operation")
398 gc_mark_parser_stream.set_defaults(func=handle_gc_mark_stream)
399
400 gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
401 gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
402 gc_sweep_parser.set_defaults(func=handle_gc_sweep)
403
404 unihash_exists_parser = subparsers.add_parser('unihash-exists', help="Check if a unihash is known to the server")
405 unihash_exists_parser.add_argument("--quiet", action="store_true", help="Don't print status. Instead, exit with 0 if unihash exists and 1 if it does not")
406 unihash_exists_parser.add_argument("unihash", help="Unihash to check")
407 unihash_exists_parser.set_defaults(func=handle_unihash_exists)
408
409 ping_parser = subparsers.add_parser('ping', help="Ping server")
410 ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10)
411 ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results")
412 ping_parser.set_defaults(func=handle_ping)
413
138 args = parser.parse_args() 414 args = parser.parse_args()
139 415
140 logger = logging.getLogger('hashserv') 416 logger = logging.getLogger('hashserv')
@@ -148,11 +424,30 @@ def main():
148 console.setLevel(level) 424 console.setLevel(level)
149 logger.addHandler(console) 425 logger.addHandler(console)
150 426
427 login = args.login
428 password = args.password
429
430 if login is None and args.netrc:
431 try:
432 n = netrc.netrc()
433 auth = n.authenticators(args.address)
434 if auth is not None:
435 login, _, password = auth
436 except FileNotFoundError:
437 pass
438 except netrc.NetrcParseError as e:
439 sys.stderr.write(f"Error parsing {e.filename}:{e.lineno}: {e.msg}\n")
440
151 func = getattr(args, 'func', None) 441 func = getattr(args, 'func', None)
152 if func: 442 if func:
153 client = hashserv.create_client(args.address) 443 try:
154 444 with hashserv.create_client(args.address, login, password) as client:
155 return func(args, client) 445 if args.become:
446 client.become_user(args.become)
447 return func(args, client)
448 except bb.asyncrpc.InvokeError as e:
449 print(f"ERROR: {e}")
450 return 1
156 451
157 return 0 452 return 0
158 453
diff --git a/bitbake/bin/bitbake-hashserv b/bitbake/bin/bitbake-hashserv
index 153f65a378..01503736b9 100755
--- a/bitbake/bin/bitbake-hashserv
+++ b/bitbake/bin/bitbake-hashserv
@@ -10,55 +10,170 @@ import sys
10import logging 10import logging
11import argparse 11import argparse
12import sqlite3 12import sqlite3
13import warnings
13 14
14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) 15warnings.simplefilter("default")
16
17sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
15 18
16import hashserv 19import hashserv
20from hashserv.server import DEFAULT_ANON_PERMS
17 21
18VERSION = "1.0.0" 22VERSION = "1.0.0"
19 23
20DEFAULT_BIND = 'unix://./hashserve.sock' 24DEFAULT_BIND = "unix://./hashserve.sock"
21 25
22 26
23def main(): 27def main():
24 parser = argparse.ArgumentParser(description='Hash Equivalence Reference Server. Version=%s' % VERSION, 28 parser = argparse.ArgumentParser(
25 epilog='''The bind address is the path to a unix domain socket if it is 29 description="Hash Equivalence Reference Server. Version=%s" % VERSION,
26 prefixed with "unix://". Otherwise, it is an IP address 30 formatter_class=argparse.RawTextHelpFormatter,
27 and port in form ADDRESS:PORT. To bind to all addresses, leave 31 epilog="""
28 the ADDRESS empty, e.g. "--bind :8686". To bind to a specific 32The bind address may take one of the following formats:
29 IPv6 address, enclose the address in "[]", e.g. 33 unix://PATH - Bind to unix domain socket at PATH
30 "--bind [::1]:8686"''' 34 ws://ADDRESS:PORT - Bind to websocket on ADDRESS:PORT
31 ) 35 ADDRESS:PORT - Bind to raw TCP socket on ADDRESS:PORT
32 36
33 parser.add_argument('-b', '--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")') 37To bind to all addresses, leave the ADDRESS empty, e.g. "--bind :8686" or
34 parser.add_argument('-d', '--database', default='./hashserv.db', help='Database file (default "%(default)s")') 38"--bind ws://:8686". To bind to a specific IPv6 address, enclose the address in
35 parser.add_argument('-l', '--log', default='WARNING', help='Set logging level') 39"[]", e.g. "--bind [::1]:8686" or "--bind ws://[::1]:8686"
36 parser.add_argument('-u', '--upstream', help='Upstream hashserv to pull hashes from') 40
37 parser.add_argument('-r', '--read-only', action='store_true', help='Disallow write operations from clients') 41Note that the default Anonymous permissions are designed to not break existing
42server instances when upgrading, but are not particularly secure defaults. If
43you want to use authentication, it is recommended that you use "--anon-perms
44@read" to only give anonymous users read access, or "--anon-perms @none" to
45give un-authenticated users no access at all.
46
47Setting "--anon-perms @all" or "--anon-perms @user-admin" is not allowed, since
48this would allow anonymous users to manage all users accounts, which is a bad
49idea.
50
51If you are using user authentication, you should run your server in websockets
52mode with an SSL terminating load balancer in front of it (as this server does
53not implement SSL). Otherwise all usernames and passwords will be transmitted
54in the clear. When configured this way, clients can connect using a secure
55websocket, as in "wss://SERVER:PORT"
56
57The following permissions are supported by the server:
58
59 @none - No permissions
60 @read - The ability to read equivalent hashes from the server
61 @report - The ability to report equivalent hashes to the server
62 @db-admin - Manage the hash database(s). This includes cleaning the
63 database, removing hashes, etc.
64 @user-admin - The ability to manage user accounts. This includes, creating
65 users, deleting users, resetting login tokens, and assigning
66 permissions.
67 @all - All possible permissions, including any that may be added
68 in the future
69 """,
70 )
71
72 parser.add_argument(
73 "-b",
74 "--bind",
75 default=os.environ.get("HASHSERVER_BIND", DEFAULT_BIND),
76 help='Bind address (default $HASHSERVER_BIND, "%(default)s")',
77 )
78 parser.add_argument(
79 "-d",
80 "--database",
81 default=os.environ.get("HASHSERVER_DB", "./hashserv.db"),
82 help='Database file (default $HASHSERVER_DB, "%(default)s")',
83 )
84 parser.add_argument(
85 "-l",
86 "--log",
87 default=os.environ.get("HASHSERVER_LOG_LEVEL", "WARNING"),
88 help='Set logging level (default $HASHSERVER_LOG_LEVEL, "%(default)s")',
89 )
90 parser.add_argument(
91 "-u",
92 "--upstream",
93 default=os.environ.get("HASHSERVER_UPSTREAM", None),
94 help="Upstream hashserv to pull hashes from ($HASHSERVER_UPSTREAM)",
95 )
96 parser.add_argument(
97 "-r",
98 "--read-only",
99 action="store_true",
100 help="Disallow write operations from clients ($HASHSERVER_READ_ONLY)",
101 )
102 parser.add_argument(
103 "--db-username",
104 default=os.environ.get("HASHSERVER_DB_USERNAME", None),
105 help="Database username ($HASHSERVER_DB_USERNAME)",
106 )
107 parser.add_argument(
108 "--db-password",
109 default=os.environ.get("HASHSERVER_DB_PASSWORD", None),
110 help="Database password ($HASHSERVER_DB_PASSWORD)",
111 )
112 parser.add_argument(
113 "--anon-perms",
114 metavar="PERM[,PERM[,...]]",
115 default=os.environ.get("HASHSERVER_ANON_PERMS", ",".join(DEFAULT_ANON_PERMS)),
116 help='Permissions to give anonymous users (default $HASHSERVER_ANON_PERMS, "%(default)s")',
117 )
118 parser.add_argument(
119 "--admin-user",
120 default=os.environ.get("HASHSERVER_ADMIN_USER", None),
121 help="Create default admin user with name ADMIN_USER ($HASHSERVER_ADMIN_USER)",
122 )
123 parser.add_argument(
124 "--admin-password",
125 default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None),
126 help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)",
127 )
128 parser.add_argument(
129 "--reuseport",
130 action="store_true",
131 help="Enable SO_REUSEPORT, allowing multiple servers to bind to the same port for load balancing",
132 )
38 133
39 args = parser.parse_args() 134 args = parser.parse_args()
40 135
41 logger = logging.getLogger('hashserv') 136 logger = logging.getLogger("hashserv")
42 137
43 level = getattr(logging, args.log.upper(), None) 138 level = getattr(logging, args.log.upper(), None)
44 if not isinstance(level, int): 139 if not isinstance(level, int):
45 raise ValueError('Invalid log level: %s' % args.log) 140 raise ValueError(
141 "Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log
142 )
46 143
47 logger.setLevel(level) 144 logger.setLevel(level)
48 console = logging.StreamHandler() 145 console = logging.StreamHandler()
49 console.setLevel(level) 146 console.setLevel(level)
50 logger.addHandler(console) 147 logger.addHandler(console)
51 148
52 server = hashserv.create_server(args.bind, args.database, upstream=args.upstream, read_only=args.read_only) 149 read_only = (os.environ.get("HASHSERVER_READ_ONLY", "0") == "1") or args.read_only
150 if "," in args.anon_perms:
151 anon_perms = args.anon_perms.split(",")
152 else:
153 anon_perms = args.anon_perms.split()
154
155 server = hashserv.create_server(
156 args.bind,
157 args.database,
158 upstream=args.upstream,
159 read_only=read_only,
160 db_username=args.db_username,
161 db_password=args.db_password,
162 anon_perms=anon_perms,
163 admin_username=args.admin_user,
164 admin_password=args.admin_password,
165 reuseport=args.reuseport,
166 )
53 server.serve_forever() 167 server.serve_forever()
54 return 0 168 return 0
55 169
56 170
57if __name__ == '__main__': 171if __name__ == "__main__":
58 try: 172 try:
59 ret = main() 173 ret = main()
60 except Exception: 174 except Exception:
61 ret = 1 175 ret = 1
62 import traceback 176 import traceback
177
63 traceback.print_exc() 178 traceback.print_exc()
64 sys.exit(ret) 179 sys.exit(ret)
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers
index ff085d6744..341ecbcd97 100755
--- a/bitbake/bin/bitbake-layers
+++ b/bitbake/bin/bitbake-layers
@@ -14,15 +14,18 @@ import logging
14import os 14import os
15import sys 15import sys
16import argparse 16import argparse
17import warnings
18warnings.simplefilter("default")
17 19
18bindir = os.path.dirname(__file__) 20bindir = os.path.dirname(__file__)
21toolname = os.path.basename(__file__).split(".")[0]
19topdir = os.path.dirname(bindir) 22topdir = os.path.dirname(bindir)
20sys.path[0:0] = [os.path.join(topdir, 'lib')] 23sys.path[0:0] = [os.path.join(topdir, 'lib')]
21 24
22import bb.tinfoil 25import bb.tinfoil
23import bb.msg 26import bb.msg
24 27
25logger = bb.msg.logger_create('bitbake-layers', sys.stdout) 28logger = bb.msg.logger_create(toolname, sys.stdout)
26 29
27def main(): 30def main():
28 parser = argparse.ArgumentParser( 31 parser = argparse.ArgumentParser(
@@ -31,7 +34,7 @@ def main():
31 add_help=False) 34 add_help=False)
32 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') 35 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
33 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') 36 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
34 parser.add_argument('-F', '--force', help='Force add without recipe parse verification', action='store_true') 37 parser.add_argument('-F', '--force', help='Forced execution: can be specified multiple times. -F will force add without recipe parse verification and -FF will additionally force the run withput layer parsing.', action='count', default=0)
35 parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR') 38 parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
36 39
37 global_args, unparsed_args = parser.parse_known_args() 40 global_args, unparsed_args = parser.parse_known_args()
@@ -55,22 +58,27 @@ def main():
55 level=logger.getEffectiveLevel()) 58 level=logger.getEffectiveLevel())
56 59
57 plugins = [] 60 plugins = []
58 tinfoil = bb.tinfoil.Tinfoil(tracking=True) 61 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
59 tinfoil.logger.setLevel(logger.getEffectiveLevel()) 62 tinfoil.logger.setLevel(logger.getEffectiveLevel())
60 try: 63
61 tinfoil.prepare(True) 64 if global_args.force > 1:
62 for path in ([topdir] + 65 bbpaths = []
63 tinfoil.config_data.getVar('BBPATH').split(':')): 66 else:
64 pluginpath = os.path.join(path, 'lib', 'bblayers') 67 tinfoil.prepare(True)
68 bbpaths = tinfoil.config_data.getVar('BBPATH').split(':')
69
70 for path in ([topdir] + bbpaths):
71 pluginbasepath = {"bitbake-layers":'bblayers', 'bitbake-config-build':'bbconfigbuild'}[toolname]
72 pluginpath = os.path.join(path, 'lib', pluginbasepath)
65 bb.utils.load_plugins(logger, plugins, pluginpath) 73 bb.utils.load_plugins(logger, plugins, pluginpath)
66 74
67 registered = False 75 registered = False
68 for plugin in plugins: 76 for plugin in plugins:
77 if hasattr(plugin, 'tinfoil_init') and global_args.force <= 1:
78 plugin.tinfoil_init(tinfoil)
69 if hasattr(plugin, 'register_commands'): 79 if hasattr(plugin, 'register_commands'):
70 registered = True 80 registered = True
71 plugin.register_commands(subparsers) 81 plugin.register_commands(subparsers)
72 if hasattr(plugin, 'tinfoil_init'):
73 plugin.tinfoil_init(tinfoil)
74 82
75 if not registered: 83 if not registered:
76 logger.error("No commands registered - missing plugins?") 84 logger.error("No commands registered - missing plugins?")
@@ -84,8 +92,6 @@ def main():
84 tinfoil.config_data.enableTracking() 92 tinfoil.config_data.enableTracking()
85 93
86 return args.func(args) 94 return args.func(args)
87 finally:
88 tinfoil.shutdown()
89 95
90 96
91if __name__ == "__main__": 97if __name__ == "__main__":
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv
index 1e9b6cbc1b..3992e84eab 100755
--- a/bitbake/bin/bitbake-prserv
+++ b/bitbake/bin/bitbake-prserv
@@ -1,49 +1,103 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
6import os 8import os
7import sys,logging 9import sys,logging
8import optparse 10import argparse
11import warnings
12warnings.simplefilter("default")
9 13
10sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib')) 14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
11 15
12import prserv 16import prserv
13import prserv.serv 17import prserv.serv
14 18
15__version__="1.0.0" 19VERSION = "2.0.0"
16 20
17PRHOST_DEFAULT='0.0.0.0' 21PRHOST_DEFAULT="0.0.0.0"
18PRPORT_DEFAULT=8585 22PRPORT_DEFAULT=8585
19 23
24def init_logger(logfile, loglevel):
25 numeric_level = getattr(logging, loglevel.upper(), None)
26 if not isinstance(numeric_level, int):
27 raise ValueError("Invalid log level: %s" % loglevel)
28 FORMAT = "%(asctime)-15s %(message)s"
29 logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
30
20def main(): 31def main():
21 parser = optparse.OptionParser( 32 parser = argparse.ArgumentParser(
22 version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__), 33 description="BitBake PR Server. Version=%s" % VERSION,
23 usage = "%prog < --start | --stop > [options]") 34 formatter_class=argparse.RawTextHelpFormatter)
24 35
25 parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store", 36 parser.add_argument(
26 dest="dbfile", type="string", default="prserv.sqlite3") 37 "-f",
27 parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store", 38 "--file",
28 dest="logfile", type="string", default="prserv.log") 39 default="prserv.sqlite3",
29 parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG", 40 help="database filename (default: prserv.sqlite3)",
30 action = "store", type="string", dest="loglevel", default = "INFO") 41 )
31 parser.add_option("--start", help="start daemon", 42 parser.add_argument(
32 action="store_true", dest="start") 43 "-l",
33 parser.add_option("--stop", help="stop daemon", 44 "--log",
34 action="store_true", dest="stop") 45 default="prserv.log",
35 parser.add_option("--host", help="ip address to bind", action="store", 46 help="log filename(default: prserv.log)",
36 dest="host", type="string", default=PRHOST_DEFAULT) 47 )
37 parser.add_option("--port", help="port number(default: 8585)", action="store", 48 parser.add_argument(
38 dest="port", type="int", default=PRPORT_DEFAULT) 49 "--loglevel",
39 50 default="INFO",
40 options, args = parser.parse_args(sys.argv) 51 help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
41 prserv.init_logger(os.path.abspath(options.logfile),options.loglevel) 52 )
42 53 parser.add_argument(
43 if options.start: 54 "--start",
44 ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile)) 55 action="store_true",
45 elif options.stop: 56 help="start daemon",
46 ret=prserv.serv.stop_daemon(options.host, options.port) 57 )
58 parser.add_argument(
59 "--stop",
60 action="store_true",
61 help="stop daemon",
62 )
63 parser.add_argument(
64 "--host",
65 help="ip address to bind",
66 default=PRHOST_DEFAULT,
67 )
68 parser.add_argument(
69 "--port",
70 type=int,
71 default=PRPORT_DEFAULT,
72 help="port number (default: 8585)",
73 )
74 parser.add_argument(
75 "-r",
76 "--read-only",
77 action="store_true",
78 help="open database in read-only mode",
79 )
80 parser.add_argument(
81 "-u",
82 "--upstream",
83 default=os.environ.get("PRSERV_UPSTREAM", None),
84 help="Upstream PR service (host:port)",
85 )
86
87 args = parser.parse_args()
88 init_logger(os.path.abspath(args.log), args.loglevel)
89
90 if args.start:
91 ret=prserv.serv.start_daemon(
92 args.file,
93 args.host,
94 args.port,
95 os.path.abspath(args.log),
96 args.read_only,
97 args.upstream
98 )
99 elif args.stop:
100 ret=prserv.serv.stop_daemon(args.host, args.port)
47 else: 101 else:
48 ret=parser.print_help() 102 ret=parser.print_help()
49 return ret 103 return ret
diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest
index 6c0737416b..1b7a783fdc 100755
--- a/bitbake/bin/bitbake-selftest
+++ b/bitbake/bin/bitbake-selftest
@@ -7,12 +7,15 @@
7 7
8import os 8import os
9import sys, logging 9import sys, logging
10import warnings
11warnings.simplefilter("default")
10sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) 12sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
11 13
12import unittest 14import unittest
13try: 15try:
14 import bb 16 import bb
15 import hashserv 17 import hashserv
18 import prserv
16 import layerindexlib 19 import layerindexlib
17except RuntimeError as exc: 20except RuntimeError as exc:
18 sys.exit(str(exc)) 21 sys.exit(str(exc))
@@ -25,11 +28,12 @@ tests = ["bb.tests.codeparser",
25 "bb.tests.event", 28 "bb.tests.event",
26 "bb.tests.fetch", 29 "bb.tests.fetch",
27 "bb.tests.parse", 30 "bb.tests.parse",
28 "bb.tests.persist_data",
29 "bb.tests.runqueue", 31 "bb.tests.runqueue",
30 "bb.tests.siggen", 32 "bb.tests.siggen",
31 "bb.tests.utils", 33 "bb.tests.utils",
34 "bb.tests.compression",
32 "hashserv.tests", 35 "hashserv.tests",
36 "prserv.tests",
33 "layerindexlib.tests.layerindexobj", 37 "layerindexlib.tests.layerindexobj",
34 "layerindexlib.tests.restapi", 38 "layerindexlib.tests.restapi",
35 "layerindexlib.tests.cooker"] 39 "layerindexlib.tests.cooker"]
diff --git a/bitbake/bin/bitbake-server b/bitbake/bin/bitbake-server
index ffbc7894ef..a559109e3f 100755
--- a/bitbake/bin/bitbake-server
+++ b/bitbake/bin/bitbake-server
@@ -8,14 +8,17 @@
8import os 8import os
9import sys 9import sys
10import warnings 10import warnings
11warnings.simplefilter("default")
12warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*")
11import logging 13import logging
12sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) 14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
13 15
14if sys.getfilesystemencoding() != "utf-8": 16import bb
15 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.") 17
18bb.utils.check_system_locale()
16 19
17# Users shouldn't be running this code directly 20# Users shouldn't be running this code directly
18if len(sys.argv) != 10 or not sys.argv[1].startswith("decafbad"): 21if len(sys.argv) != 11 or not sys.argv[1].startswith("decafbad"):
19 print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.") 22 print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.")
20 sys.exit(1) 23 sys.exit(1)
21 24
@@ -26,20 +29,19 @@ readypipeinfd = int(sys.argv[3])
26logfile = sys.argv[4] 29logfile = sys.argv[4]
27lockname = sys.argv[5] 30lockname = sys.argv[5]
28sockname = sys.argv[6] 31sockname = sys.argv[6]
29timeout = sys.argv[7] 32timeout = float(sys.argv[7])
30xmlrpcinterface = (sys.argv[8], int(sys.argv[9])) 33profile = bool(int(sys.argv[8]))
34xmlrpcinterface = (sys.argv[9], int(sys.argv[10]))
31if xmlrpcinterface[0] == "None": 35if xmlrpcinterface[0] == "None":
32 xmlrpcinterface = (None, xmlrpcinterface[1]) 36 xmlrpcinterface = (None, xmlrpcinterface[1])
33if timeout == "None":
34 timeout = None
35 37
36# Replace standard fds with our own 38# Replace standard fds with our own
37with open('/dev/null', 'r') as si: 39with open('/dev/null', 'r') as si:
38 os.dup2(si.fileno(), sys.stdin.fileno()) 40 os.dup2(si.fileno(), sys.stdin.fileno())
39 41
40so = open(logfile, 'a+') 42with open(logfile, 'a+') as so:
41os.dup2(so.fileno(), sys.stdout.fileno()) 43 os.dup2(so.fileno(), sys.stdout.fileno())
42os.dup2(so.fileno(), sys.stderr.fileno()) 44 os.dup2(so.fileno(), sys.stderr.fileno())
43 45
44# Have stdout and stderr be the same so log output matches chronologically 46# Have stdout and stderr be the same so log output matches chronologically
45# and there aren't two seperate buffers 47# and there aren't two seperate buffers
@@ -50,5 +52,5 @@ logger = logging.getLogger("BitBake")
50handler = bb.event.LogHandler() 52handler = bb.event.LogHandler()
51logger.addHandler(handler) 53logger.addHandler(handler)
52 54
53bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface) 55bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface, profile)
54 56
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index 7765b9368b..d2b146a6a9 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -1,11 +1,15 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
6import os 8import os
7import sys 9import sys
8import warnings 10import warnings
11warnings.simplefilter("default")
12warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*")
9sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) 13sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
10from bb import fetch2 14from bb import fetch2
11import logging 15import logging
@@ -16,11 +20,17 @@ import signal
16import pickle 20import pickle
17import traceback 21import traceback
18import queue 22import queue
23import shlex
24import subprocess
25import fcntl
19from multiprocessing import Lock 26from multiprocessing import Lock
20from threading import Thread 27from threading import Thread
21 28
22if sys.getfilesystemencoding() != "utf-8": 29# Remove when we have a minimum of python 3.10
23 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.") 30if not hasattr(fcntl, 'F_SETPIPE_SZ'):
31 fcntl.F_SETPIPE_SZ = 1031
32
33bb.utils.check_system_locale()
24 34
25# Users shouldn't be running this code directly 35# Users shouldn't be running this code directly
26if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"): 36if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
@@ -40,7 +50,6 @@ if sys.argv[1].startswith("decafbadbad"):
40# updates to log files for use with tail 50# updates to log files for use with tail
41try: 51try:
42 if sys.stdout.name == '<stdout>': 52 if sys.stdout.name == '<stdout>':
43 import fcntl
44 fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL) 53 fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
45 fl |= os.O_SYNC 54 fl |= os.O_SYNC
46 fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl) 55 fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
@@ -52,6 +61,12 @@ logger = logging.getLogger("BitBake")
52 61
53worker_pipe = sys.stdout.fileno() 62worker_pipe = sys.stdout.fileno()
54bb.utils.nonblockingfd(worker_pipe) 63bb.utils.nonblockingfd(worker_pipe)
64# Try to make the pipe buffers larger as it is much more efficient. If we can't
65# e.g. out of buffer space (/proc/sys/fs/pipe-user-pages-soft) then just pass over.
66try:
67 fcntl.fcntl(worker_pipe, fcntl.F_SETPIPE_SZ, 512 * 1024)
68except:
69 pass
55# Need to guard against multiprocessing being used in child processes 70# Need to guard against multiprocessing being used in child processes
56# and multiple processes trying to write to the parent at the same time 71# and multiple processes trying to write to the parent at the same time
57worker_pipe_lock = None 72worker_pipe_lock = None
@@ -87,21 +102,21 @@ def worker_fire_prepickled(event):
87worker_thread_exit = False 102worker_thread_exit = False
88 103
89def worker_flush(worker_queue): 104def worker_flush(worker_queue):
90 worker_queue_int = b"" 105 worker_queue_int = bytearray()
91 global worker_pipe, worker_thread_exit 106 global worker_pipe, worker_thread_exit
92 107
93 while True: 108 while True:
94 try: 109 try:
95 worker_queue_int = worker_queue_int + worker_queue.get(True, 1) 110 worker_queue_int.extend(worker_queue.get(True, 1))
96 except queue.Empty: 111 except queue.Empty:
97 pass 112 pass
98 while (worker_queue_int or not worker_queue.empty()): 113 while (worker_queue_int or not worker_queue.empty()):
99 try: 114 try:
100 (_, ready, _) = select.select([], [worker_pipe], [], 1) 115 (_, ready, _) = select.select([], [worker_pipe], [], 1)
101 if not worker_queue.empty(): 116 if not worker_queue.empty():
102 worker_queue_int = worker_queue_int + worker_queue.get() 117 worker_queue_int.extend(worker_queue.get())
103 written = os.write(worker_pipe, worker_queue_int) 118 written = os.write(worker_pipe, worker_queue_int)
104 worker_queue_int = worker_queue_int[written:] 119 del worker_queue_int[0:written]
105 except (IOError, OSError) as e: 120 except (IOError, OSError) as e:
106 if e.errno != errno.EAGAIN and e.errno != errno.EPIPE: 121 if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
107 raise 122 raise
@@ -117,11 +132,10 @@ def worker_child_fire(event, d):
117 132
118 data = b"<event>" + pickle.dumps(event) + b"</event>" 133 data = b"<event>" + pickle.dumps(event) + b"</event>"
119 try: 134 try:
120 worker_pipe_lock.acquire() 135 with bb.utils.lock_timeout(worker_pipe_lock):
121 while(len(data)): 136 while(len(data)):
122 written = worker_pipe.write(data) 137 written = worker_pipe.write(data)
123 data = data[written:] 138 data = data[written:]
124 worker_pipe_lock.release()
125 except IOError: 139 except IOError:
126 sigterm_handler(None, None) 140 sigterm_handler(None, None)
127 raise 141 raise
@@ -140,44 +154,56 @@ def sigterm_handler(signum, frame):
140 os.killpg(0, signal.SIGTERM) 154 os.killpg(0, signal.SIGTERM)
141 sys.exit() 155 sys.exit()
142 156
143def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False): 157def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
158
159 fn = runtask['fn']
160 task = runtask['task']
161 taskname = runtask['taskname']
162 taskhash = runtask['taskhash']
163 unihash = runtask['unihash']
164 appends = runtask['appends']
165 layername = runtask['layername']
166 taskdepdata = runtask['taskdepdata']
167 quieterrors = runtask['quieterrors']
144 # We need to setup the environment BEFORE the fork, since 168 # We need to setup the environment BEFORE the fork, since
145 # a fork() or exec*() activates PSEUDO... 169 # a fork() or exec*() activates PSEUDO...
146 170
147 envbackup = {} 171 envbackup = {}
172 fakeroot = False
148 fakeenv = {} 173 fakeenv = {}
149 umask = None 174 umask = None
150 175
151 taskdep = workerdata["taskdeps"][fn] 176 uid = os.getuid()
177 gid = os.getgid()
178
179 taskdep = runtask['taskdep']
152 if 'umask' in taskdep and taskname in taskdep['umask']: 180 if 'umask' in taskdep and taskname in taskdep['umask']:
153 umask = taskdep['umask'][taskname] 181 umask = taskdep['umask'][taskname]
154 elif workerdata["umask"]: 182 elif workerdata["umask"]:
155 umask = workerdata["umask"] 183 umask = workerdata["umask"]
156 if umask: 184 if umask:
157 # umask might come in as a number or text string.. 185 # Convert to a python numeric value as it could be a string
158 try: 186 umask = bb.utils.to_filemode(umask)
159 umask = int(umask, 8)
160 except TypeError:
161 pass
162 187
163 dry_run = cfg.dry_run or dry_run_exec 188 dry_run = cfg.dry_run or runtask['dry_run']
164 189
165 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built 190 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built
166 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run: 191 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
167 envvars = (workerdata["fakerootenv"][fn] or "").split() 192 fakeroot = True
168 for key, value in (var.split('=') for var in envvars): 193 envvars = (runtask['fakerootenv'] or "").split()
194 for key, value in (var.split('=',1) for var in envvars):
169 envbackup[key] = os.environ.get(key) 195 envbackup[key] = os.environ.get(key)
170 os.environ[key] = value 196 os.environ[key] = value
171 fakeenv[key] = value 197 fakeenv[key] = value
172 198
173 fakedirs = (workerdata["fakerootdirs"][fn] or "").split() 199 fakedirs = (runtask['fakerootdirs'] or "").split()
174 for p in fakedirs: 200 for p in fakedirs:
175 bb.utils.mkdirhier(p) 201 bb.utils.mkdirhier(p)
176 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' % 202 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
177 (fn, taskname, ', '.join(fakedirs))) 203 (fn, taskname, ', '.join(fakedirs)))
178 else: 204 else:
179 envvars = (workerdata["fakerootnoenv"][fn] or "").split() 205 envvars = (runtask['fakerootnoenv'] or "").split()
180 for key, value in (var.split('=') for var in envvars): 206 for key, value in (var.split('=',1) for var in envvars):
181 envbackup[key] = os.environ.get(key) 207 envbackup[key] = os.environ.get(key)
182 os.environ[key] = value 208 os.environ[key] = value
183 fakeenv[key] = value 209 fakeenv[key] = value
@@ -219,19 +245,21 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
219 # Let SIGHUP exit as SIGTERM 245 # Let SIGHUP exit as SIGTERM
220 signal.signal(signal.SIGHUP, sigterm_handler) 246 signal.signal(signal.SIGHUP, sigterm_handler)
221 247
222 # No stdin 248 # No stdin & stdout
223 newsi = os.open(os.devnull, os.O_RDWR) 249 # stdout is used as a status report channel and must not be used by child processes.
224 os.dup2(newsi, sys.stdin.fileno()) 250 dumbio = os.open(os.devnull, os.O_RDWR)
251 os.dup2(dumbio, sys.stdin.fileno())
252 os.dup2(dumbio, sys.stdout.fileno())
225 253
226 if umask: 254 if umask is not None:
227 os.umask(umask) 255 os.umask(umask)
228 256
229 try: 257 try:
230 bb_cache = bb.cache.NoCache(databuilder)
231 (realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn) 258 (realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
232 the_data = databuilder.mcdata[mc] 259 the_data = databuilder.mcdata[mc]
233 the_data.setVar("BB_WORKERCONTEXT", "1") 260 the_data.setVar("BB_WORKERCONTEXT", "1")
234 the_data.setVar("BB_TASKDEPDATA", taskdepdata) 261 the_data.setVar("BB_TASKDEPDATA", taskdepdata)
262 the_data.setVar('BB_CURRENTTASK', taskname.replace("do_", ""))
235 if cfg.limited_deps: 263 if cfg.limited_deps:
236 the_data.setVar("BB_LIMITEDDEPS", "1") 264 the_data.setVar("BB_LIMITEDDEPS", "1")
237 the_data.setVar("BUILDNAME", workerdata["buildname"]) 265 the_data.setVar("BUILDNAME", workerdata["buildname"])
@@ -245,12 +273,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
245 bb.parse.siggen.set_taskhashes(workerdata["newhashes"]) 273 bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
246 ret = 0 274 ret = 0
247 275
248 the_data = bb_cache.loadDataFull(fn, appends) 276 the_data = databuilder.parseRecipe(fn, appends, layername)
249 the_data.setVar('BB_TASKHASH', taskhash) 277 the_data.setVar('BB_TASKHASH', taskhash)
250 the_data.setVar('BB_UNIHASH', unihash) 278 the_data.setVar('BB_UNIHASH', unihash)
279 bb.parse.siggen.setup_datacache_from_datastore(fn, the_data)
251 280
252 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", ""))) 281 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
253 282
283 if not bb.utils.to_boolean(the_data.getVarFlag(taskname, 'network')):
284 if bb.utils.is_local_uid(uid):
285 logger.debug("Attempting to disable network for %s" % taskname)
286 bb.utils.disable_network(uid, gid)
287 else:
288 logger.debug("Skipping disable network for %s since %s is not a local uid." % (taskname, uid))
289
254 # exported_vars() returns a generator which *cannot* be passed to os.environ.update() 290 # exported_vars() returns a generator which *cannot* be passed to os.environ.update()
255 # successfully. We also need to unset anything from the environment which shouldn't be there 291 # successfully. We also need to unset anything from the environment which shouldn't be there
256 exports = bb.data.exported_vars(the_data) 292 exports = bb.data.exported_vars(the_data)
@@ -279,10 +315,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
279 if not quieterrors: 315 if not quieterrors:
280 logger.critical(traceback.format_exc()) 316 logger.critical(traceback.format_exc())
281 os._exit(1) 317 os._exit(1)
318
319 sys.stdout.flush()
320 sys.stderr.flush()
321
282 try: 322 try:
283 if dry_run: 323 if dry_run:
284 return 0 324 return 0
285 return bb.build.exec_task(fn, taskname, the_data, cfg.profile) 325 try:
326 ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
327 finally:
328 if fakeroot:
329 fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
330 subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
331 return ret
286 except: 332 except:
287 os._exit(1) 333 os._exit(1)
288 if not profiling: 334 if not profiling:
@@ -314,12 +360,12 @@ class runQueueWorkerPipe():
314 if pipeout: 360 if pipeout:
315 pipeout.close() 361 pipeout.close()
316 bb.utils.nonblockingfd(self.input) 362 bb.utils.nonblockingfd(self.input)
317 self.queue = b"" 363 self.queue = bytearray()
318 364
319 def read(self): 365 def read(self):
320 start = len(self.queue) 366 start = len(self.queue)
321 try: 367 try:
322 self.queue = self.queue + (self.input.read(102400) or b"") 368 self.queue.extend(self.input.read(512*1024) or b"")
323 except (OSError, IOError) as e: 369 except (OSError, IOError) as e:
324 if e.errno != errno.EAGAIN: 370 if e.errno != errno.EAGAIN:
325 raise 371 raise
@@ -347,7 +393,7 @@ class BitbakeWorker(object):
347 def __init__(self, din): 393 def __init__(self, din):
348 self.input = din 394 self.input = din
349 bb.utils.nonblockingfd(self.input) 395 bb.utils.nonblockingfd(self.input)
350 self.queue = b"" 396 self.queue = bytearray()
351 self.cookercfg = None 397 self.cookercfg = None
352 self.databuilder = None 398 self.databuilder = None
353 self.data = None 399 self.data = None
@@ -381,7 +427,7 @@ class BitbakeWorker(object):
381 if len(r) == 0: 427 if len(r) == 0:
382 # EOF on pipe, server must have terminated 428 # EOF on pipe, server must have terminated
383 self.sigterm_exception(signal.SIGTERM, None) 429 self.sigterm_exception(signal.SIGTERM, None)
384 self.queue = self.queue + r 430 self.queue.extend(r)
385 except (OSError, IOError): 431 except (OSError, IOError):
386 pass 432 pass
387 if len(self.queue): 433 if len(self.queue):
@@ -401,19 +447,35 @@ class BitbakeWorker(object):
401 while self.process_waitpid(): 447 while self.process_waitpid():
402 continue 448 continue
403 449
404
405 def handle_item(self, item, func): 450 def handle_item(self, item, func):
406 if self.queue.startswith(b"<" + item + b">"): 451 opening_tag = b"<" + item + b">"
407 index = self.queue.find(b"</" + item + b">") 452 if not self.queue.startswith(opening_tag):
408 while index != -1: 453 return
409 func(self.queue[(len(item) + 2):index]) 454
410 self.queue = self.queue[(index + len(item) + 3):] 455 tag_len = len(opening_tag)
411 index = self.queue.find(b"</" + item + b">") 456 if len(self.queue) < tag_len + 4:
457 # we need to receive more data
458 return
459 header = self.queue[tag_len:tag_len + 4]
460 payload_len = int.from_bytes(header, 'big')
461 # closing tag has length (tag_len + 1)
462 if len(self.queue) < tag_len * 2 + 1 + payload_len:
463 # we need to receive more data
464 return
465
466 index = self.queue.find(b"</" + item + b">")
467 if index != -1:
468 try:
469 func(self.queue[(tag_len + 4):index])
470 except pickle.UnpicklingError:
471 workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
472 raise
473 self.queue = self.queue[(index + len(b"</") + len(item) + len(b">")):]
412 474
413 def handle_cookercfg(self, data): 475 def handle_cookercfg(self, data):
414 self.cookercfg = pickle.loads(data) 476 self.cookercfg = pickle.loads(data)
415 self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True) 477 self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
416 self.databuilder.parseBaseConfiguration() 478 self.databuilder.parseBaseConfiguration(worker=True)
417 self.data = self.databuilder.data 479 self.data = self.databuilder.data
418 480
419 def handle_extraconfigdata(self, data): 481 def handle_extraconfigdata(self, data):
@@ -428,6 +490,7 @@ class BitbakeWorker(object):
428 for mc in self.databuilder.mcdata: 490 for mc in self.databuilder.mcdata:
429 self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"]) 491 self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
430 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"]) 492 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
493 self.databuilder.mcdata[mc].setVar("__bbclasstype", "recipe")
431 494
432 def handle_newtaskhashes(self, data): 495 def handle_newtaskhashes(self, data):
433 self.workerdata["newhashes"] = pickle.loads(data) 496 self.workerdata["newhashes"] = pickle.loads(data)
@@ -445,11 +508,15 @@ class BitbakeWorker(object):
445 sys.exit(0) 508 sys.exit(0)
446 509
447 def handle_runtask(self, data): 510 def handle_runtask(self, data):
448 fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data) 511 runtask = pickle.loads(data)
449 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname)) 512
513 fn = runtask['fn']
514 task = runtask['task']
515 taskname = runtask['taskname']
450 516
451 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec) 517 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
452 518
519 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
453 self.build_pids[pid] = task 520 self.build_pids[pid] = task
454 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) 521 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
455 522
@@ -513,9 +580,11 @@ except BaseException as e:
513 import traceback 580 import traceback
514 sys.stderr.write(traceback.format_exc()) 581 sys.stderr.write(traceback.format_exc())
515 sys.stderr.write(str(e)) 582 sys.stderr.write(str(e))
583finally:
584 worker_thread_exit = True
585 worker_thread.join()
516 586
517worker_thread_exit = True 587workerlog_write("exiting")
518worker_thread.join() 588if not normalexit:
519 589 sys.exit(1)
520workerlog_write("exitting")
521sys.exit(0) 590sys.exit(0)
diff --git a/bitbake/bin/git-make-shallow b/bitbake/bin/git-make-shallow
index 57069f7edf..e6c180b4d6 100755
--- a/bitbake/bin/git-make-shallow
+++ b/bitbake/bin/git-make-shallow
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright BitBake Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
@@ -16,19 +18,23 @@ import itertools
16import os 18import os
17import subprocess 19import subprocess
18import sys 20import sys
21import warnings
22warnings.simplefilter("default")
19 23
20version = 1.0 24version = 1.0
21 25
22 26
27git_cmd = ['git', '-c', 'safe.bareRepository=all']
28
23def main(): 29def main():
24 if sys.version_info < (3, 4, 0): 30 if sys.version_info < (3, 4, 0):
25 sys.exit('Python 3.4 or greater is required') 31 sys.exit('Python 3.4 or greater is required')
26 32
27 git_dir = check_output(['git', 'rev-parse', '--git-dir']).rstrip() 33 git_dir = check_output(git_cmd + ['rev-parse', '--git-dir']).rstrip()
28 shallow_file = os.path.join(git_dir, 'shallow') 34 shallow_file = os.path.join(git_dir, 'shallow')
29 if os.path.exists(shallow_file): 35 if os.path.exists(shallow_file):
30 try: 36 try:
31 check_output(['git', 'fetch', '--unshallow']) 37 check_output(git_cmd + ['fetch', '--unshallow'])
32 except subprocess.CalledProcessError: 38 except subprocess.CalledProcessError:
33 try: 39 try:
34 os.unlink(shallow_file) 40 os.unlink(shallow_file)
@@ -37,21 +43,21 @@ def main():
37 raise 43 raise
38 44
39 args = process_args() 45 args = process_args()
40 revs = check_output(['git', 'rev-list'] + args.revisions).splitlines() 46 revs = check_output(git_cmd + ['rev-list'] + args.revisions).splitlines()
41 47
42 make_shallow(shallow_file, args.revisions, args.refs) 48 make_shallow(shallow_file, args.revisions, args.refs)
43 49
44 ref_revs = check_output(['git', 'rev-list'] + args.refs).splitlines() 50 ref_revs = check_output(git_cmd + ['rev-list'] + args.refs).splitlines()
45 remaining_history = set(revs) & set(ref_revs) 51 remaining_history = set(revs) & set(ref_revs)
46 for rev in remaining_history: 52 for rev in remaining_history:
47 if check_output(['git', 'rev-parse', '{}^@'.format(rev)]): 53 if check_output(git_cmd + ['rev-parse', '{}^@'.format(rev)]):
48 sys.exit('Error: %s was not made shallow' % rev) 54 sys.exit('Error: %s was not made shallow' % rev)
49 55
50 filter_refs(args.refs) 56 filter_refs(args.refs)
51 57
52 if args.shrink: 58 if args.shrink:
53 shrink_repo(git_dir) 59 shrink_repo(git_dir)
54 subprocess.check_call(['git', 'fsck', '--unreachable']) 60 subprocess.check_call(git_cmd + ['fsck', '--unreachable'])
55 61
56 62
57def process_args(): 63def process_args():
@@ -68,12 +74,12 @@ def process_args():
68 args = parser.parse_args() 74 args = parser.parse_args()
69 75
70 if args.refs: 76 if args.refs:
71 args.refs = check_output(['git', 'rev-parse', '--symbolic-full-name'] + args.refs).splitlines() 77 args.refs = check_output(git_cmd + ['rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
72 else: 78 else:
73 args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit') 79 args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit')
74 80
75 args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs)) 81 args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs))
76 args.revisions = check_output(['git', 'rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines() 82 args.revisions = check_output(git_cmd + ['rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
77 return args 83 return args
78 84
79 85
@@ -91,7 +97,7 @@ def make_shallow(shallow_file, revisions, refs):
91 97
92def get_all_refs(ref_filter=None): 98def get_all_refs(ref_filter=None):
93 """Return all the existing refs in this repository, optionally filtering the refs.""" 99 """Return all the existing refs in this repository, optionally filtering the refs."""
94 ref_output = check_output(['git', 'for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)']) 100 ref_output = check_output(git_cmd + ['for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
95 ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()] 101 ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()]
96 if ref_filter: 102 if ref_filter:
97 ref_split = (e for e in ref_split if ref_filter(*e)) 103 ref_split = (e for e in ref_split if ref_filter(*e))
@@ -109,8 +115,8 @@ def filter_refs(refs):
109 all_refs = get_all_refs() 115 all_refs = get_all_refs()
110 to_remove = set(all_refs) - set(refs) 116 to_remove = set(all_refs) - set(refs)
111 if to_remove: 117 if to_remove:
112 check_output(['xargs', '-0', '-n', '1', 'git', 'update-ref', '-d', '--no-deref'], 118 check_output(git_cmd + ['update-ref', '--no-deref', '--stdin', '-z'],
113 input=''.join(l + '\0' for l in to_remove)) 119 input=''.join('delete ' + l + '\0\0' for l in to_remove))
114 120
115 121
116def follow_history_intersections(revisions, refs): 122def follow_history_intersections(revisions, refs):
@@ -122,7 +128,7 @@ def follow_history_intersections(revisions, refs):
122 if rev in seen: 128 if rev in seen:
123 continue 129 continue
124 130
125 parents = check_output(['git', 'rev-parse', '%s^@' % rev]).splitlines() 131 parents = check_output(git_cmd + ['rev-parse', '%s^@' % rev]).splitlines()
126 132
127 yield rev 133 yield rev
128 seen.add(rev) 134 seen.add(rev)
@@ -130,12 +136,12 @@ def follow_history_intersections(revisions, refs):
130 if not parents: 136 if not parents:
131 continue 137 continue
132 138
133 check_refs = check_output(['git', 'merge-base', '--independent'] + sorted(refs)).splitlines() 139 check_refs = check_output(git_cmd + ['merge-base', '--independent'] + sorted(refs)).splitlines()
134 for parent in parents: 140 for parent in parents:
135 for ref in check_refs: 141 for ref in check_refs:
136 print("Checking %s vs %s" % (parent, ref)) 142 print("Checking %s vs %s" % (parent, ref))
137 try: 143 try:
138 merge_base = check_output(['git', 'merge-base', parent, ref]).rstrip() 144 merge_base = check_output(git_cmd + ['merge-base', parent, ref]).rstrip()
139 except subprocess.CalledProcessError: 145 except subprocess.CalledProcessError:
140 continue 146 continue
141 else: 147 else:
@@ -155,14 +161,14 @@ def iter_except(func, exception, start=None):
155 161
156def shrink_repo(git_dir): 162def shrink_repo(git_dir):
157 """Shrink the newly shallow repository, removing the unreachable objects.""" 163 """Shrink the newly shallow repository, removing the unreachable objects."""
158 subprocess.check_call(['git', 'reflog', 'expire', '--expire-unreachable=now', '--all']) 164 subprocess.check_call(git_cmd + ['reflog', 'expire', '--expire-unreachable=now', '--all'])
159 subprocess.check_call(['git', 'repack', '-ad']) 165 subprocess.check_call(git_cmd + ['repack', '-ad'])
160 try: 166 try:
161 os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates')) 167 os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates'))
162 except OSError as exc: 168 except OSError as exc:
163 if exc.errno != errno.ENOENT: 169 if exc.errno != errno.ENOENT:
164 raise 170 raise
165 subprocess.check_call(['git', 'prune', '--expire', 'now']) 171 subprocess.check_call(git_cmd + ['prune', '--expire', 'now'])
166 172
167 173
168if __name__ == '__main__': 174if __name__ == '__main__':
diff --git a/bitbake/bin/toaster b/bitbake/bin/toaster
index 6b90ee187e..f002c8c159 100755
--- a/bitbake/bin/toaster
+++ b/bitbake/bin/toaster
@@ -33,7 +33,7 @@ databaseCheck()
33 $MANAGE migrate --noinput || retval=1 33 $MANAGE migrate --noinput || retval=1
34 34
35 if [ $retval -eq 1 ]; then 35 if [ $retval -eq 1 ]; then
36 echo "Failed migrations, aborting system start" 1>&2 36 echo "Failed migrations, halting system start" 1>&2
37 return $retval 37 return $retval
38 fi 38 fi
39 # Make sure that checksettings can pick up any value for TEMPLATECONF 39 # Make sure that checksettings can pick up any value for TEMPLATECONF
@@ -41,7 +41,7 @@ databaseCheck()
41 $MANAGE checksettings --traceback || retval=1 41 $MANAGE checksettings --traceback || retval=1
42 42
43 if [ $retval -eq 1 ]; then 43 if [ $retval -eq 1 ]; then
44 printf "\nError while checking settings; aborting\n" 44 printf "\nError while checking settings; exiting\n"
45 return $retval 45 return $retval
46 fi 46 fi
47 47
@@ -84,7 +84,7 @@ webserverStartAll()
84 echo "Starting webserver..." 84 echo "Starting webserver..."
85 85
86 $MANAGE runserver --noreload "$ADDR_PORT" \ 86 $MANAGE runserver --noreload "$ADDR_PORT" \
87 </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \ 87 </dev/null >>${TOASTER_LOGS_DIR}/web.log 2>&1 \
88 & echo $! >${BUILDDIR}/.toastermain.pid 88 & echo $! >${BUILDDIR}/.toastermain.pid
89 89
90 sleep 1 90 sleep 1
@@ -181,6 +181,14 @@ WEBSERVER=1
181export TOASTER_BUILDSERVER=1 181export TOASTER_BUILDSERVER=1
182ADDR_PORT="localhost:8000" 182ADDR_PORT="localhost:8000"
183TOASTERDIR=`dirname $BUILDDIR` 183TOASTERDIR=`dirname $BUILDDIR`
184# ${BUILDDIR}/toaster_logs/ became the default location for toaster logs
185# This is needed for implemented django-log-viewer: https://pypi.org/project/django-log-viewer/
186# If the directory does not exist, create it.
187TOASTER_LOGS_DIR="${BUILDDIR}/toaster_logs/"
188if [ ! -d $TOASTER_LOGS_DIR ]
189then
190 mkdir $TOASTER_LOGS_DIR
191fi
184unset CMD 192unset CMD
185for param in $*; do 193for param in $*; do
186 case $param in 194 case $param in
@@ -248,7 +256,7 @@ fi
248# 3) the sqlite db if that is being used. 256# 3) the sqlite db if that is being used.
249# 4) pid's we need to clean up on exit/shutdown 257# 4) pid's we need to clean up on exit/shutdown
250export TOASTER_DIR=$TOASTERDIR 258export TOASTER_DIR=$TOASTERDIR
251export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE TOASTER_DIR" 259export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS TOASTER_DIR"
252 260
253# Determine the action. If specified by arguments, fine, if not, toggle it 261# Determine the action. If specified by arguments, fine, if not, toggle it
254if [ "$CMD" = "start" ] ; then 262if [ "$CMD" = "start" ] ; then
@@ -299,7 +307,7 @@ case $CMD in
299 export BITBAKE_UI='toasterui' 307 export BITBAKE_UI='toasterui'
300 if [ $TOASTER_BUILDSERVER -eq 1 ] ; then 308 if [ $TOASTER_BUILDSERVER -eq 1 ] ; then
301 $MANAGE runbuilds \ 309 $MANAGE runbuilds \
302 </dev/null >>${BUILDDIR}/toaster_runbuilds.log 2>&1 \ 310 </dev/null >>${TOASTER_LOGS_DIR}/toaster_runbuilds.log 2>&1 \
303 & echo $! >${BUILDDIR}/.runbuilds.pid 311 & echo $! >${BUILDDIR}/.runbuilds.pid
304 else 312 else
305 echo "Toaster build server not started." 313 echo "Toaster build server not started."
diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay
index 8fa4ab7116..74a319320e 100755
--- a/bitbake/bin/toaster-eventreplay
+++ b/bitbake/bin/toaster-eventreplay
@@ -19,6 +19,8 @@ import sys
19import json 19import json
20import pickle 20import pickle
21import codecs 21import codecs
22import warnings
23warnings.simplefilter("default")
22 24
23from collections import namedtuple 25from collections import namedtuple
24 26
@@ -28,79 +30,23 @@ sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
28 30
29import bb.cooker 31import bb.cooker
30from bb.ui import toasterui 32from bb.ui import toasterui
31 33from bb.ui import eventreplay
32class EventPlayer:
33 """Emulate a connection to a bitbake server."""
34
35 def __init__(self, eventfile, variables):
36 self.eventfile = eventfile
37 self.variables = variables
38 self.eventmask = []
39
40 def waitEvent(self, _timeout):
41 """Read event from the file."""
42 line = self.eventfile.readline().strip()
43 if not line:
44 return
45 try:
46 event_str = json.loads(line)['vars'].encode('utf-8')
47 event = pickle.loads(codecs.decode(event_str, 'base64'))
48 event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
49 if event_name not in self.eventmask:
50 return
51 return event
52 except ValueError as err:
53 print("Failed loading ", line)
54 raise err
55
56 def runCommand(self, command_line):
57 """Emulate running a command on the server."""
58 name = command_line[0]
59
60 if name == "getVariable":
61 var_name = command_line[1]
62 variable = self.variables.get(var_name)
63 if variable:
64 return variable['v'], None
65 return None, "Missing variable %s" % var_name
66
67 elif name == "getAllKeysWithFlags":
68 dump = {}
69 flaglist = command_line[1]
70 for key, val in self.variables.items():
71 try:
72 if not key.startswith("__"):
73 dump[key] = {
74 'v': val['v'],
75 'history' : val['history'],
76 }
77 for flag in flaglist:
78 dump[key][flag] = val[flag]
79 except Exception as err:
80 print(err)
81 return (dump, None)
82
83 elif name == 'setEventMask':
84 self.eventmask = command_line[-1]
85 return True, None
86
87 else:
88 raise Exception("Command %s not implemented" % command_line[0])
89
90 def getEventHandle(self):
91 """
92 This method is called by toasterui.
93 The return value is passed to self.runCommand but not used there.
94 """
95 pass
96 34
97def main(argv): 35def main(argv):
98 with open(argv[-1]) as eventfile: 36 with open(argv[-1]) as eventfile:
99 # load variables from the first line 37 # load variables from the first line
100 variables = json.loads(eventfile.readline().strip())['allvariables'] 38 variables = None
101 39 while line := eventfile.readline().strip():
40 try:
41 variables = json.loads(line)['allvariables']
42 break
43 except (KeyError, json.JSONDecodeError):
44 continue
45 if not variables:
46 sys.exit("Cannot find allvariables entry in event log file %s" % argv[-1])
47 eventfile.seek(0)
102 params = namedtuple('ConfigParams', ['observe_only'])(True) 48 params = namedtuple('ConfigParams', ['observe_only'])(True)
103 player = EventPlayer(eventfile, variables) 49 player = eventreplay.EventPlayer(eventfile, variables)
104 50
105 return toasterui.main(player, player, params) 51 return toasterui.main(player, player, params)
106 52