summaryrefslogtreecommitdiffstats
path: root/bitbake/bin/bitbake-hashclient
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/bin/bitbake-hashclient')
-rwxr-xr-xbitbake/bin/bitbake-hashclient107
1 files changed, 98 insertions, 9 deletions
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient
index 610787ed2b..b8755c5797 100755
--- a/bitbake/bin/bitbake-hashclient
+++ b/bitbake/bin/bitbake-hashclient
@@ -16,6 +16,8 @@ import time
16import warnings 16import warnings
17import netrc 17import netrc
18import json 18import json
19import statistics
20import textwrap
19warnings.simplefilter("default") 21warnings.simplefilter("default")
20 22
21try: 23try:
@@ -81,6 +83,7 @@ def main():
81 nonlocal found_hashes 83 nonlocal found_hashes
82 nonlocal missed_hashes 84 nonlocal missed_hashes
83 nonlocal max_time 85 nonlocal max_time
86 nonlocal times
84 87
85 with hashserv.create_client(args.address) as client: 88 with hashserv.create_client(args.address) as client:
86 for i in range(args.requests): 89 for i in range(args.requests):
@@ -98,29 +101,41 @@ def main():
98 else: 101 else:
99 missed_hashes += 1 102 missed_hashes += 1
100 103
101 max_time = max(elapsed, max_time) 104 times.append(elapsed)
102 pbar.update() 105 pbar.update()
103 106
104 max_time = 0 107 max_time = 0
105 found_hashes = 0 108 found_hashes = 0
106 missed_hashes = 0 109 missed_hashes = 0
107 lock = threading.Lock() 110 lock = threading.Lock()
108 total_requests = args.clients * args.requests 111 times = []
109 start_time = time.perf_counter() 112 start_time = time.perf_counter()
110 with ProgressBar(total=total_requests) as pbar: 113 with ProgressBar(total=args.clients * args.requests) as pbar:
111 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] 114 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)]
112 for t in threads: 115 for t in threads:
113 t.start() 116 t.start()
114 117
115 for t in threads: 118 for t in threads:
116 t.join() 119 t.join()
120 total_elapsed = time.perf_counter() - start_time
117 121
118 elapsed = time.perf_counter() - start_time
119 with lock: 122 with lock:
120 print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed)) 123 mean = statistics.mean(times)
121 print("Average request time %.8fs" % (elapsed / total_requests)) 124 median = statistics.median(times)
122 print("Max request time was %.8fs" % max_time) 125 stddev = statistics.pstdev(times)
123 print("Found %d hashes, missed %d" % (found_hashes, missed_hashes)) 126
127 print(f"Number of clients: {args.clients}")
128 print(f"Requests per client: {args.requests}")
129 print(f"Number of requests: {len(times)}")
130 print(f"Total elapsed time: {total_elapsed:.3f}s")
131 print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s")
132 print(f"Average request time: {mean:.3f}s")
133 print(f"Median request time: {median:.3f}s")
134 print(f"Request time std dev: {stddev:.3f}s")
135 print(f"Maximum request time: {max(times):.3f}s")
136 print(f"Minimum request time: {min(times):.3f}s")
137 print(f"Hashes found: {found_hashes}")
138 print(f"Hashes missed: {missed_hashes}")
124 139
125 if args.report: 140 if args.report:
126 with ProgressBar(total=args.requests) as pbar: 141 with ProgressBar(total=args.requests) as pbar:
@@ -212,6 +227,27 @@ def main():
212 print("New hashes marked: %d" % result["count"]) 227 print("New hashes marked: %d" % result["count"])
213 return 0 228 return 0
214 229
230 def handle_gc_mark_stream(args, client):
231 stdin = (l.strip() for l in sys.stdin)
232 marked_hashes = 0
233
234 try:
235 result = client.gc_mark_stream(args.mark, stdin)
236 marked_hashes = result["count"]
237 except ConnectionError:
238 logger.warning(
239 "Server doesn't seem to support `gc-mark-stream`. Sending "
240 "hashes sequentially using `gc-mark` API."
241 )
242 for line in stdin:
243 pairs = line.split()
244 condition = dict(zip(pairs[::2], pairs[1::2]))
245 result = client.gc_mark(args.mark, condition)
246 marked_hashes += result["count"]
247
248 print("New hashes marked: %d" % marked_hashes)
249 return 0
250
215 def handle_gc_sweep(args, client): 251 def handle_gc_sweep(args, client):
216 result = client.gc_sweep(args.mark) 252 result = client.gc_sweep(args.mark)
217 print("Removed %d rows" % result["count"]) 253 print("Removed %d rows" % result["count"])
@@ -225,7 +261,45 @@ def main():
225 print("true" if result else "false") 261 print("true" if result else "false")
226 return 0 262 return 0
227 263
228 parser = argparse.ArgumentParser(description='Hash Equivalence Client') 264 def handle_ping(args, client):
265 times = []
266 for i in range(1, args.count + 1):
267 if not args.quiet:
268 print(f"Ping {i} of {args.count}... ", end="")
269 start_time = time.perf_counter()
270 client.ping()
271 elapsed = time.perf_counter() - start_time
272 times.append(elapsed)
273 if not args.quiet:
274 print(f"{elapsed:.3f}s")
275
276 mean = statistics.mean(times)
277 median = statistics.median(times)
278 std_dev = statistics.pstdev(times)
279
280 if not args.quiet:
281 print("------------------------")
282 print(f"Number of pings: {len(times)}")
283 print(f"Average round trip time: {mean:.3f}s")
284 print(f"Median round trip time: {median:.3f}s")
285 print(f"Round trip time std dev: {std_dev:.3f}s")
286 print(f"Min time is: {min(times):.3f}s")
287 print(f"Max time is: {max(times):.3f}s")
288 return 0
289
290 parser = argparse.ArgumentParser(
291 formatter_class=argparse.RawDescriptionHelpFormatter,
292 description='Hash Equivalence Client',
293 epilog=textwrap.dedent(
294 """
295 Possible ADDRESS options are:
296 unix://PATH Connect to UNIX domain socket at PATH
297 ws://HOST[:PORT] Connect to websocket at HOST:PORT (default port is 80)
298 wss://HOST[:PORT] Connect to secure websocket at HOST:PORT (default port is 443)
299 HOST:PORT Connect to TCP server at HOST:PORT
300 """
301 ),
302 )
229 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') 303 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
230 parser.add_argument('--log', default='WARNING', help='Set logging level') 304 parser.add_argument('--log', default='WARNING', help='Set logging level')
231 parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME") 305 parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
@@ -313,6 +387,16 @@ def main():
313 help="Keep entries in table where KEY == VALUE") 387 help="Keep entries in table where KEY == VALUE")
314 gc_mark_parser.set_defaults(func=handle_gc_mark) 388 gc_mark_parser.set_defaults(func=handle_gc_mark)
315 389
390 gc_mark_parser_stream = subparsers.add_parser(
391 'gc-mark-stream',
392 help=(
393 "Mark multiple hashes to be retained for garbage collection. Input should be provided via stdin, "
394 "with each line formatted as key-value pairs separated by spaces, for example 'column1 foo column2 bar'."
395 )
396 )
397 gc_mark_parser_stream.add_argument("mark", help="Mark for this garbage collection operation")
398 gc_mark_parser_stream.set_defaults(func=handle_gc_mark_stream)
399
316 gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked") 400 gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
317 gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation") 401 gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
318 gc_sweep_parser.set_defaults(func=handle_gc_sweep) 402 gc_sweep_parser.set_defaults(func=handle_gc_sweep)
@@ -322,6 +406,11 @@ def main():
322 unihash_exists_parser.add_argument("unihash", help="Unihash to check") 406 unihash_exists_parser.add_argument("unihash", help="Unihash to check")
323 unihash_exists_parser.set_defaults(func=handle_unihash_exists) 407 unihash_exists_parser.set_defaults(func=handle_unihash_exists)
324 408
409 ping_parser = subparsers.add_parser('ping', help="Ping server")
410 ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10)
411 ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results")
412 ping_parser.set_defaults(func=handle_ping)
413
325 args = parser.parse_args() 414 args = parser.parse_args()
326 415
327 logger = logging.getLogger('hashserv') 416 logger = logging.getLogger('hashserv')