diff options
Diffstat (limited to 'bitbake/lib')
199 files changed, 18950 insertions, 4875 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py index 23c22b65ef..76bc08a3ea 100644 --- a/bitbake/lib/bb/COW.py +++ b/bitbake/lib/bb/COW.py | |||
@@ -3,6 +3,8 @@ | |||
3 | # | 3 | # |
4 | # Copyright (C) 2006 Tim Ansell | 4 | # Copyright (C) 2006 Tim Ansell |
5 | # | 5 | # |
6 | # SPDX-License-Identifier: GPL-2.0-only | ||
7 | # | ||
6 | # Please Note: | 8 | # Please Note: |
7 | # Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. | 9 | # Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. |
8 | # Assign a file to __warn__ to get warnings about slow operations. | 10 | # Assign a file to __warn__ to get warnings about slow operations. |
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index 84a9051c13..15013540c2 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -9,12 +9,19 @@ | |||
9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
10 | # | 10 | # |
11 | 11 | ||
12 | __version__ = "1.49.2" | 12 | __version__ = "2.9.0" |
13 | 13 | ||
14 | import sys | 14 | import sys |
15 | if sys.version_info < (3, 5, 0): | 15 | if sys.version_info < (3, 8, 0): |
16 | raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake") | 16 | raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake") |
17 | 17 | ||
18 | if sys.version_info < (3, 10, 0): | ||
19 | # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work" | ||
20 | # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work | ||
21 | # https://bugs.ams1.psf.io/issue42888 | ||
22 | # so ensure libgcc_s is loaded early on | ||
23 | import ctypes | ||
24 | libgcc_s = ctypes.CDLL('libgcc_s.so.1') | ||
18 | 25 | ||
19 | class BBHandledException(Exception): | 26 | class BBHandledException(Exception): |
20 | """ | 27 | """ |
@@ -58,8 +65,12 @@ class BBLoggerMixin(object): | |||
58 | if not bb.event.worker_pid: | 65 | if not bb.event.worker_pid: |
59 | if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): | 66 | if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): |
60 | return | 67 | return |
61 | if loglevel > bb.msg.loggerDefaultLogLevel: | 68 | if loglevel < bb.msg.loggerDefaultLogLevel: |
62 | return | 69 | return |
70 | |||
71 | if not isinstance(level, int) or not isinstance(msg, str): | ||
72 | mainlogger.warning("Invalid arguments in bbdebug: %s" % repr((level, msg,) + args)) | ||
73 | |||
63 | return self.log(loglevel, msg, *args, **kwargs) | 74 | return self.log(loglevel, msg, *args, **kwargs) |
64 | 75 | ||
65 | def plain(self, msg, *args, **kwargs): | 76 | def plain(self, msg, *args, **kwargs): |
@@ -71,6 +82,13 @@ class BBLoggerMixin(object): | |||
71 | def verbnote(self, msg, *args, **kwargs): | 82 | def verbnote(self, msg, *args, **kwargs): |
72 | return self.log(logging.INFO + 2, msg, *args, **kwargs) | 83 | return self.log(logging.INFO + 2, msg, *args, **kwargs) |
73 | 84 | ||
85 | def warnonce(self, msg, *args, **kwargs): | ||
86 | return self.log(logging.WARNING - 1, msg, *args, **kwargs) | ||
87 | |||
88 | def erroronce(self, msg, *args, **kwargs): | ||
89 | return self.log(logging.ERROR - 1, msg, *args, **kwargs) | ||
90 | |||
91 | |||
74 | Logger = logging.getLoggerClass() | 92 | Logger = logging.getLoggerClass() |
75 | class BBLogger(Logger, BBLoggerMixin): | 93 | class BBLogger(Logger, BBLoggerMixin): |
76 | def __init__(self, name, *args, **kwargs): | 94 | def __init__(self, name, *args, **kwargs): |
@@ -157,9 +175,15 @@ def verbnote(*args): | |||
157 | def warn(*args): | 175 | def warn(*args): |
158 | mainlogger.warning(''.join(args)) | 176 | mainlogger.warning(''.join(args)) |
159 | 177 | ||
178 | def warnonce(*args): | ||
179 | mainlogger.warnonce(''.join(args)) | ||
180 | |||
160 | def error(*args, **kwargs): | 181 | def error(*args, **kwargs): |
161 | mainlogger.error(''.join(args), extra=kwargs) | 182 | mainlogger.error(''.join(args), extra=kwargs) |
162 | 183 | ||
184 | def erroronce(*args): | ||
185 | mainlogger.erroronce(''.join(args)) | ||
186 | |||
163 | def fatal(*args, **kwargs): | 187 | def fatal(*args, **kwargs): |
164 | mainlogger.critical(''.join(args), extra=kwargs) | 188 | mainlogger.critical(''.join(args), extra=kwargs) |
165 | raise BBHandledException() | 189 | raise BBHandledException() |
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py new file mode 100755 index 0000000000..0f41b275cf --- /dev/null +++ b/bitbake/lib/bb/acl.py | |||
@@ -0,0 +1,215 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright 2023 by Garmin Ltd. or its subsidiaries | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | |||
7 | |||
8 | import sys | ||
9 | import ctypes | ||
10 | import os | ||
11 | import errno | ||
12 | import pwd | ||
13 | import grp | ||
14 | |||
15 | libacl = ctypes.CDLL("libacl.so.1", use_errno=True) | ||
16 | |||
17 | |||
18 | ACL_TYPE_ACCESS = 0x8000 | ||
19 | ACL_TYPE_DEFAULT = 0x4000 | ||
20 | |||
21 | ACL_FIRST_ENTRY = 0 | ||
22 | ACL_NEXT_ENTRY = 1 | ||
23 | |||
24 | ACL_UNDEFINED_TAG = 0x00 | ||
25 | ACL_USER_OBJ = 0x01 | ||
26 | ACL_USER = 0x02 | ||
27 | ACL_GROUP_OBJ = 0x04 | ||
28 | ACL_GROUP = 0x08 | ||
29 | ACL_MASK = 0x10 | ||
30 | ACL_OTHER = 0x20 | ||
31 | |||
32 | ACL_READ = 0x04 | ||
33 | ACL_WRITE = 0x02 | ||
34 | ACL_EXECUTE = 0x01 | ||
35 | |||
36 | acl_t = ctypes.c_void_p | ||
37 | acl_entry_t = ctypes.c_void_p | ||
38 | acl_permset_t = ctypes.c_void_p | ||
39 | acl_perm_t = ctypes.c_uint | ||
40 | |||
41 | acl_tag_t = ctypes.c_int | ||
42 | |||
43 | libacl.acl_free.argtypes = [acl_t] | ||
44 | |||
45 | |||
46 | def acl_free(acl): | ||
47 | libacl.acl_free(acl) | ||
48 | |||
49 | |||
50 | libacl.acl_get_file.restype = acl_t | ||
51 | libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint] | ||
52 | |||
53 | |||
54 | def acl_get_file(path, typ): | ||
55 | acl = libacl.acl_get_file(os.fsencode(path), typ) | ||
56 | if acl is None: | ||
57 | err = ctypes.get_errno() | ||
58 | raise OSError(err, os.strerror(err), str(path)) | ||
59 | |||
60 | return acl | ||
61 | |||
62 | |||
63 | libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p] | ||
64 | |||
65 | |||
66 | def acl_get_entry(acl, entry_id): | ||
67 | entry = acl_entry_t() | ||
68 | ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry)) | ||
69 | if ret < 0: | ||
70 | err = ctypes.get_errno() | ||
71 | raise OSError(err, os.strerror(err)) | ||
72 | |||
73 | if ret == 0: | ||
74 | return None | ||
75 | |||
76 | return entry | ||
77 | |||
78 | |||
79 | libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p] | ||
80 | |||
81 | |||
82 | def acl_get_tag_type(entry_d): | ||
83 | tag = acl_tag_t() | ||
84 | ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag)) | ||
85 | if ret < 0: | ||
86 | err = ctypes.get_errno() | ||
87 | raise OSError(err, os.strerror(err)) | ||
88 | return tag.value | ||
89 | |||
90 | |||
91 | libacl.acl_get_qualifier.restype = ctypes.c_void_p | ||
92 | libacl.acl_get_qualifier.argtypes = [acl_entry_t] | ||
93 | |||
94 | |||
95 | def acl_get_qualifier(entry_d): | ||
96 | ret = libacl.acl_get_qualifier(entry_d) | ||
97 | if ret is None: | ||
98 | err = ctypes.get_errno() | ||
99 | raise OSError(err, os.strerror(err)) | ||
100 | return ctypes.c_void_p(ret) | ||
101 | |||
102 | |||
103 | libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p] | ||
104 | |||
105 | |||
106 | def acl_get_permset(entry_d): | ||
107 | permset = acl_permset_t() | ||
108 | ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset)) | ||
109 | if ret < 0: | ||
110 | err = ctypes.get_errno() | ||
111 | raise OSError(err, os.strerror(err)) | ||
112 | |||
113 | return permset | ||
114 | |||
115 | |||
116 | libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t] | ||
117 | |||
118 | |||
119 | def acl_get_perm(permset_d, perm): | ||
120 | ret = libacl.acl_get_perm(permset_d, perm) | ||
121 | if ret < 0: | ||
122 | err = ctypes.get_errno() | ||
123 | raise OSError(err, os.strerror(err)) | ||
124 | return bool(ret) | ||
125 | |||
126 | |||
127 | class Entry(object): | ||
128 | def __init__(self, tag, qualifier, mode): | ||
129 | self.tag = tag | ||
130 | self.qualifier = qualifier | ||
131 | self.mode = mode | ||
132 | |||
133 | def __str__(self): | ||
134 | typ = "" | ||
135 | qual = "" | ||
136 | if self.tag == ACL_USER: | ||
137 | typ = "user" | ||
138 | qual = pwd.getpwuid(self.qualifier).pw_name | ||
139 | elif self.tag == ACL_GROUP: | ||
140 | typ = "group" | ||
141 | qual = grp.getgrgid(self.qualifier).gr_name | ||
142 | elif self.tag == ACL_USER_OBJ: | ||
143 | typ = "user" | ||
144 | elif self.tag == ACL_GROUP_OBJ: | ||
145 | typ = "group" | ||
146 | elif self.tag == ACL_MASK: | ||
147 | typ = "mask" | ||
148 | elif self.tag == ACL_OTHER: | ||
149 | typ = "other" | ||
150 | |||
151 | r = "r" if self.mode & ACL_READ else "-" | ||
152 | w = "w" if self.mode & ACL_WRITE else "-" | ||
153 | x = "x" if self.mode & ACL_EXECUTE else "-" | ||
154 | |||
155 | return f"{typ}:{qual}:{r}{w}{x}" | ||
156 | |||
157 | |||
158 | class ACL(object): | ||
159 | def __init__(self, acl): | ||
160 | self.acl = acl | ||
161 | |||
162 | def __del__(self): | ||
163 | acl_free(self.acl) | ||
164 | |||
165 | def entries(self): | ||
166 | entry_id = ACL_FIRST_ENTRY | ||
167 | while True: | ||
168 | entry = acl_get_entry(self.acl, entry_id) | ||
169 | if entry is None: | ||
170 | break | ||
171 | |||
172 | permset = acl_get_permset(entry) | ||
173 | |||
174 | mode = 0 | ||
175 | for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE): | ||
176 | if acl_get_perm(permset, m): | ||
177 | mode |= m | ||
178 | |||
179 | qualifier = None | ||
180 | tag = acl_get_tag_type(entry) | ||
181 | |||
182 | if tag == ACL_USER or tag == ACL_GROUP: | ||
183 | qual = acl_get_qualifier(entry) | ||
184 | qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0] | ||
185 | |||
186 | yield Entry(tag, qualifier, mode) | ||
187 | |||
188 | entry_id = ACL_NEXT_ENTRY | ||
189 | |||
190 | @classmethod | ||
191 | def from_path(cls, path, typ): | ||
192 | acl = acl_get_file(path, typ) | ||
193 | return cls(acl) | ||
194 | |||
195 | |||
196 | def main(): | ||
197 | import argparse | ||
198 | import pwd | ||
199 | import grp | ||
200 | from pathlib import Path | ||
201 | |||
202 | parser = argparse.ArgumentParser() | ||
203 | parser.add_argument("path", help="File Path", type=Path) | ||
204 | |||
205 | args = parser.parse_args() | ||
206 | |||
207 | acl = ACL.from_path(args.path, ACL_TYPE_ACCESS) | ||
208 | for entry in acl.entries(): | ||
209 | print(str(entry)) | ||
210 | |||
211 | return 0 | ||
212 | |||
213 | |||
214 | if __name__ == "__main__": | ||
215 | sys.exit(main()) | ||
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py new file mode 100644 index 0000000000..639e1607f8 --- /dev/null +++ b/bitbake/lib/bb/asyncrpc/__init__.py | |||
@@ -0,0 +1,16 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | |||
8 | from .client import AsyncClient, Client, ClientPool | ||
9 | from .serv import AsyncServer, AsyncServerConnection | ||
10 | from .connection import DEFAULT_MAX_CHUNK | ||
11 | from .exceptions import ( | ||
12 | ClientError, | ||
13 | ServerError, | ||
14 | ConnectionClosedError, | ||
15 | InvokeError, | ||
16 | ) | ||
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py new file mode 100644 index 0000000000..a350b4fb12 --- /dev/null +++ b/bitbake/lib/bb/asyncrpc/client.py | |||
@@ -0,0 +1,313 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import abc | ||
8 | import asyncio | ||
9 | import json | ||
10 | import os | ||
11 | import socket | ||
12 | import sys | ||
13 | import re | ||
14 | import contextlib | ||
15 | from threading import Thread | ||
16 | from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK | ||
17 | from .exceptions import ConnectionClosedError, InvokeError | ||
18 | |||
19 | UNIX_PREFIX = "unix://" | ||
20 | WS_PREFIX = "ws://" | ||
21 | WSS_PREFIX = "wss://" | ||
22 | |||
23 | ADDR_TYPE_UNIX = 0 | ||
24 | ADDR_TYPE_TCP = 1 | ||
25 | ADDR_TYPE_WS = 2 | ||
26 | |||
27 | def parse_address(addr): | ||
28 | if addr.startswith(UNIX_PREFIX): | ||
29 | return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) | ||
30 | elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX): | ||
31 | return (ADDR_TYPE_WS, (addr,)) | ||
32 | else: | ||
33 | m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr) | ||
34 | if m is not None: | ||
35 | host = m.group("host") | ||
36 | port = m.group("port") | ||
37 | else: | ||
38 | host, port = addr.split(":") | ||
39 | |||
40 | return (ADDR_TYPE_TCP, (host, int(port))) | ||
41 | |||
42 | class AsyncClient(object): | ||
43 | def __init__( | ||
44 | self, | ||
45 | proto_name, | ||
46 | proto_version, | ||
47 | logger, | ||
48 | timeout=30, | ||
49 | server_headers=False, | ||
50 | headers={}, | ||
51 | ): | ||
52 | self.socket = None | ||
53 | self.max_chunk = DEFAULT_MAX_CHUNK | ||
54 | self.proto_name = proto_name | ||
55 | self.proto_version = proto_version | ||
56 | self.logger = logger | ||
57 | self.timeout = timeout | ||
58 | self.needs_server_headers = server_headers | ||
59 | self.server_headers = {} | ||
60 | self.headers = headers | ||
61 | |||
62 | async def connect_tcp(self, address, port): | ||
63 | async def connect_sock(): | ||
64 | reader, writer = await asyncio.open_connection(address, port) | ||
65 | return StreamConnection(reader, writer, self.timeout, self.max_chunk) | ||
66 | |||
67 | self._connect_sock = connect_sock | ||
68 | |||
69 | async def connect_unix(self, path): | ||
70 | async def connect_sock(): | ||
71 | # AF_UNIX has path length issues so chdir here to workaround | ||
72 | cwd = os.getcwd() | ||
73 | try: | ||
74 | os.chdir(os.path.dirname(path)) | ||
75 | # The socket must be opened synchronously so that CWD doesn't get | ||
76 | # changed out from underneath us so we pass as a sock into asyncio | ||
77 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0) | ||
78 | sock.connect(os.path.basename(path)) | ||
79 | finally: | ||
80 | os.chdir(cwd) | ||
81 | reader, writer = await asyncio.open_unix_connection(sock=sock) | ||
82 | return StreamConnection(reader, writer, self.timeout, self.max_chunk) | ||
83 | |||
84 | self._connect_sock = connect_sock | ||
85 | |||
86 | async def connect_websocket(self, uri): | ||
87 | import websockets | ||
88 | |||
89 | async def connect_sock(): | ||
90 | websocket = await websockets.connect(uri, ping_interval=None) | ||
91 | return WebsocketConnection(websocket, self.timeout) | ||
92 | |||
93 | self._connect_sock = connect_sock | ||
94 | |||
95 | async def setup_connection(self): | ||
96 | # Send headers | ||
97 | await self.socket.send("%s %s" % (self.proto_name, self.proto_version)) | ||
98 | await self.socket.send( | ||
99 | "needs-headers: %s" % ("true" if self.needs_server_headers else "false") | ||
100 | ) | ||
101 | for k, v in self.headers.items(): | ||
102 | await self.socket.send("%s: %s" % (k, v)) | ||
103 | |||
104 | # End of headers | ||
105 | await self.socket.send("") | ||
106 | |||
107 | self.server_headers = {} | ||
108 | if self.needs_server_headers: | ||
109 | while True: | ||
110 | line = await self.socket.recv() | ||
111 | if not line: | ||
112 | # End headers | ||
113 | break | ||
114 | tag, value = line.split(":", 1) | ||
115 | self.server_headers[tag.lower()] = value.strip() | ||
116 | |||
117 | async def get_header(self, tag, default): | ||
118 | await self.connect() | ||
119 | return self.server_headers.get(tag, default) | ||
120 | |||
121 | async def connect(self): | ||
122 | if self.socket is None: | ||
123 | self.socket = await self._connect_sock() | ||
124 | await self.setup_connection() | ||
125 | |||
126 | async def disconnect(self): | ||
127 | if self.socket is not None: | ||
128 | await self.socket.close() | ||
129 | self.socket = None | ||
130 | |||
131 | async def close(self): | ||
132 | await self.disconnect() | ||
133 | |||
134 | async def _send_wrapper(self, proc): | ||
135 | count = 0 | ||
136 | while True: | ||
137 | try: | ||
138 | await self.connect() | ||
139 | return await proc() | ||
140 | except ( | ||
141 | OSError, | ||
142 | ConnectionError, | ||
143 | ConnectionClosedError, | ||
144 | json.JSONDecodeError, | ||
145 | UnicodeDecodeError, | ||
146 | ) as e: | ||
147 | self.logger.warning("Error talking to server: %s" % e) | ||
148 | if count >= 3: | ||
149 | if not isinstance(e, ConnectionError): | ||
150 | raise ConnectionError(str(e)) | ||
151 | raise e | ||
152 | await self.close() | ||
153 | count += 1 | ||
154 | |||
155 | def check_invoke_error(self, msg): | ||
156 | if isinstance(msg, dict) and "invoke-error" in msg: | ||
157 | raise InvokeError(msg["invoke-error"]["message"]) | ||
158 | |||
159 | async def invoke(self, msg): | ||
160 | async def proc(): | ||
161 | await self.socket.send_message(msg) | ||
162 | return await self.socket.recv_message() | ||
163 | |||
164 | result = await self._send_wrapper(proc) | ||
165 | self.check_invoke_error(result) | ||
166 | return result | ||
167 | |||
168 | async def ping(self): | ||
169 | return await self.invoke({"ping": {}}) | ||
170 | |||
171 | async def __aenter__(self): | ||
172 | return self | ||
173 | |||
174 | async def __aexit__(self, exc_type, exc_value, traceback): | ||
175 | await self.close() | ||
176 | |||
177 | |||
178 | class Client(object): | ||
179 | def __init__(self): | ||
180 | self.client = self._get_async_client() | ||
181 | self.loop = asyncio.new_event_loop() | ||
182 | |||
183 | # Override any pre-existing loop. | ||
184 | # Without this, the PR server export selftest triggers a hang | ||
185 | # when running with Python 3.7. The drawback is that there is | ||
186 | # potential for issues if the PR and hash equiv (or some new) | ||
187 | # clients need to both be instantiated in the same process. | ||
188 | # This should be revisited if/when Python 3.9 becomes the | ||
189 | # minimum required version for BitBake, as it seems not | ||
190 | # required (but harmless) with it. | ||
191 | asyncio.set_event_loop(self.loop) | ||
192 | |||
193 | self._add_methods("connect_tcp", "ping") | ||
194 | |||
195 | @abc.abstractmethod | ||
196 | def _get_async_client(self): | ||
197 | pass | ||
198 | |||
199 | def _get_downcall_wrapper(self, downcall): | ||
200 | def wrapper(*args, **kwargs): | ||
201 | return self.loop.run_until_complete(downcall(*args, **kwargs)) | ||
202 | |||
203 | return wrapper | ||
204 | |||
205 | def _add_methods(self, *methods): | ||
206 | for m in methods: | ||
207 | downcall = getattr(self.client, m) | ||
208 | setattr(self, m, self._get_downcall_wrapper(downcall)) | ||
209 | |||
210 | def connect_unix(self, path): | ||
211 | self.loop.run_until_complete(self.client.connect_unix(path)) | ||
212 | self.loop.run_until_complete(self.client.connect()) | ||
213 | |||
214 | @property | ||
215 | def max_chunk(self): | ||
216 | return self.client.max_chunk | ||
217 | |||
218 | @max_chunk.setter | ||
219 | def max_chunk(self, value): | ||
220 | self.client.max_chunk = value | ||
221 | |||
222 | def disconnect(self): | ||
223 | self.loop.run_until_complete(self.client.close()) | ||
224 | |||
225 | def close(self): | ||
226 | if self.loop: | ||
227 | self.loop.run_until_complete(self.client.close()) | ||
228 | if sys.version_info >= (3, 6): | ||
229 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
230 | self.loop.close() | ||
231 | self.loop = None | ||
232 | |||
233 | def __enter__(self): | ||
234 | return self | ||
235 | |||
236 | def __exit__(self, exc_type, exc_value, traceback): | ||
237 | self.close() | ||
238 | return False | ||
239 | |||
240 | |||
241 | class ClientPool(object): | ||
242 | def __init__(self, max_clients): | ||
243 | self.avail_clients = [] | ||
244 | self.num_clients = 0 | ||
245 | self.max_clients = max_clients | ||
246 | self.loop = None | ||
247 | self.client_condition = None | ||
248 | |||
249 | @abc.abstractmethod | ||
250 | async def _new_client(self): | ||
251 | raise NotImplementedError("Must be implemented in derived class") | ||
252 | |||
253 | def close(self): | ||
254 | if self.client_condition: | ||
255 | self.client_condition = None | ||
256 | |||
257 | if self.loop: | ||
258 | self.loop.run_until_complete(self.__close_clients()) | ||
259 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
260 | self.loop.close() | ||
261 | self.loop = None | ||
262 | |||
263 | def run_tasks(self, tasks): | ||
264 | if not self.loop: | ||
265 | self.loop = asyncio.new_event_loop() | ||
266 | |||
267 | thread = Thread(target=self.__thread_main, args=(tasks,)) | ||
268 | thread.start() | ||
269 | thread.join() | ||
270 | |||
271 | @contextlib.asynccontextmanager | ||
272 | async def get_client(self): | ||
273 | async with self.client_condition: | ||
274 | if self.avail_clients: | ||
275 | client = self.avail_clients.pop() | ||
276 | elif self.num_clients < self.max_clients: | ||
277 | self.num_clients += 1 | ||
278 | client = await self._new_client() | ||
279 | else: | ||
280 | while not self.avail_clients: | ||
281 | await self.client_condition.wait() | ||
282 | client = self.avail_clients.pop() | ||
283 | |||
284 | try: | ||
285 | yield client | ||
286 | finally: | ||
287 | async with self.client_condition: | ||
288 | self.avail_clients.append(client) | ||
289 | self.client_condition.notify() | ||
290 | |||
291 | def __thread_main(self, tasks): | ||
292 | async def process_task(task): | ||
293 | async with self.get_client() as client: | ||
294 | await task(client) | ||
295 | |||
296 | asyncio.set_event_loop(self.loop) | ||
297 | if not self.client_condition: | ||
298 | self.client_condition = asyncio.Condition() | ||
299 | tasks = [process_task(t) for t in tasks] | ||
300 | self.loop.run_until_complete(asyncio.gather(*tasks)) | ||
301 | |||
302 | async def __close_clients(self): | ||
303 | for c in self.avail_clients: | ||
304 | await c.close() | ||
305 | self.avail_clients = [] | ||
306 | self.num_clients = 0 | ||
307 | |||
308 | def __enter__(self): | ||
309 | return self | ||
310 | |||
311 | def __exit__(self, exc_type, exc_value, traceback): | ||
312 | self.close() | ||
313 | return False | ||
diff --git a/bitbake/lib/bb/asyncrpc/connection.py b/bitbake/lib/bb/asyncrpc/connection.py new file mode 100644 index 0000000000..7f0cf6ba96 --- /dev/null +++ b/bitbake/lib/bb/asyncrpc/connection.py | |||
@@ -0,0 +1,146 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import asyncio | ||
8 | import itertools | ||
9 | import json | ||
10 | from datetime import datetime | ||
11 | from .exceptions import ClientError, ConnectionClosedError | ||
12 | |||
13 | |||
14 | # The Python async server defaults to a 64K receive buffer, so we hardcode our | ||
15 | # maximum chunk size. It would be better if the client and server reported to | ||
16 | # each other what the maximum chunk sizes were, but that will slow down the | ||
17 | # connection setup with a round trip delay so I'd rather not do that unless it | ||
18 | # is necessary | ||
19 | DEFAULT_MAX_CHUNK = 32 * 1024 | ||
20 | |||
21 | |||
22 | def chunkify(msg, max_chunk): | ||
23 | if len(msg) < max_chunk - 1: | ||
24 | yield "".join((msg, "\n")) | ||
25 | else: | ||
26 | yield "".join((json.dumps({"chunk-stream": None}), "\n")) | ||
27 | |||
28 | args = [iter(msg)] * (max_chunk - 1) | ||
29 | for m in map("".join, itertools.zip_longest(*args, fillvalue="")): | ||
30 | yield "".join(itertools.chain(m, "\n")) | ||
31 | yield "\n" | ||
32 | |||
33 | |||
34 | def json_serialize(obj): | ||
35 | if isinstance(obj, datetime): | ||
36 | return obj.isoformat() | ||
37 | raise TypeError("Type %s not serializeable" % type(obj)) | ||
38 | |||
39 | |||
40 | class StreamConnection(object): | ||
41 | def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK): | ||
42 | self.reader = reader | ||
43 | self.writer = writer | ||
44 | self.timeout = timeout | ||
45 | self.max_chunk = max_chunk | ||
46 | |||
47 | @property | ||
48 | def address(self): | ||
49 | return self.writer.get_extra_info("peername") | ||
50 | |||
51 | async def send_message(self, msg): | ||
52 | for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk): | ||
53 | self.writer.write(c.encode("utf-8")) | ||
54 | await self.writer.drain() | ||
55 | |||
56 | async def recv_message(self): | ||
57 | l = await self.recv() | ||
58 | |||
59 | m = json.loads(l) | ||
60 | if not m: | ||
61 | return m | ||
62 | |||
63 | if "chunk-stream" in m: | ||
64 | lines = [] | ||
65 | while True: | ||
66 | l = await self.recv() | ||
67 | if not l: | ||
68 | break | ||
69 | lines.append(l) | ||
70 | |||
71 | m = json.loads("".join(lines)) | ||
72 | |||
73 | return m | ||
74 | |||
75 | async def send(self, msg): | ||
76 | self.writer.write(("%s\n" % msg).encode("utf-8")) | ||
77 | await self.writer.drain() | ||
78 | |||
79 | async def recv(self): | ||
80 | if self.timeout < 0: | ||
81 | line = await self.reader.readline() | ||
82 | else: | ||
83 | try: | ||
84 | line = await asyncio.wait_for(self.reader.readline(), self.timeout) | ||
85 | except asyncio.TimeoutError: | ||
86 | raise ConnectionError("Timed out waiting for data") | ||
87 | |||
88 | if not line: | ||
89 | raise ConnectionClosedError("Connection closed") | ||
90 | |||
91 | line = line.decode("utf-8") | ||
92 | |||
93 | if not line.endswith("\n"): | ||
94 | raise ConnectionError("Bad message %r" % (line)) | ||
95 | |||
96 | return line.rstrip() | ||
97 | |||
98 | async def close(self): | ||
99 | self.reader = None | ||
100 | if self.writer is not None: | ||
101 | self.writer.close() | ||
102 | self.writer = None | ||
103 | |||
104 | |||
105 | class WebsocketConnection(object): | ||
106 | def __init__(self, socket, timeout): | ||
107 | self.socket = socket | ||
108 | self.timeout = timeout | ||
109 | |||
110 | @property | ||
111 | def address(self): | ||
112 | return ":".join(str(s) for s in self.socket.remote_address) | ||
113 | |||
114 | async def send_message(self, msg): | ||
115 | await self.send(json.dumps(msg, default=json_serialize)) | ||
116 | |||
117 | async def recv_message(self): | ||
118 | m = await self.recv() | ||
119 | return json.loads(m) | ||
120 | |||
121 | async def send(self, msg): | ||
122 | import websockets.exceptions | ||
123 | |||
124 | try: | ||
125 | await self.socket.send(msg) | ||
126 | except websockets.exceptions.ConnectionClosed: | ||
127 | raise ConnectionClosedError("Connection closed") | ||
128 | |||
129 | async def recv(self): | ||
130 | import websockets.exceptions | ||
131 | |||
132 | try: | ||
133 | if self.timeout < 0: | ||
134 | return await self.socket.recv() | ||
135 | |||
136 | try: | ||
137 | return await asyncio.wait_for(self.socket.recv(), self.timeout) | ||
138 | except asyncio.TimeoutError: | ||
139 | raise ConnectionError("Timed out waiting for data") | ||
140 | except websockets.exceptions.ConnectionClosed: | ||
141 | raise ConnectionClosedError("Connection closed") | ||
142 | |||
143 | async def close(self): | ||
144 | if self.socket is not None: | ||
145 | await self.socket.close() | ||
146 | self.socket = None | ||
diff --git a/bitbake/lib/bb/asyncrpc/exceptions.py b/bitbake/lib/bb/asyncrpc/exceptions.py new file mode 100644 index 0000000000..ae1043a38b --- /dev/null +++ b/bitbake/lib/bb/asyncrpc/exceptions.py | |||
@@ -0,0 +1,21 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | |||
8 | class ClientError(Exception): | ||
9 | pass | ||
10 | |||
11 | |||
12 | class InvokeError(Exception): | ||
13 | pass | ||
14 | |||
15 | |||
16 | class ServerError(Exception): | ||
17 | pass | ||
18 | |||
19 | |||
20 | class ConnectionClosedError(Exception): | ||
21 | pass | ||
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py new file mode 100644 index 0000000000..a66117acad --- /dev/null +++ b/bitbake/lib/bb/asyncrpc/serv.py | |||
@@ -0,0 +1,391 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import abc | ||
8 | import asyncio | ||
9 | import json | ||
10 | import os | ||
11 | import signal | ||
12 | import socket | ||
13 | import sys | ||
14 | import multiprocessing | ||
15 | import logging | ||
16 | from .connection import StreamConnection, WebsocketConnection | ||
17 | from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError | ||
18 | |||
19 | |||
20 | class ClientLoggerAdapter(logging.LoggerAdapter): | ||
21 | def process(self, msg, kwargs): | ||
22 | return f"[Client {self.extra['address']}] {msg}", kwargs | ||
23 | |||
24 | |||
25 | class AsyncServerConnection(object): | ||
26 | # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no | ||
27 | # return message will be automatically be sent back to the client | ||
28 | NO_RESPONSE = object() | ||
29 | |||
30 | def __init__(self, socket, proto_name, logger): | ||
31 | self.socket = socket | ||
32 | self.proto_name = proto_name | ||
33 | self.handlers = { | ||
34 | "ping": self.handle_ping, | ||
35 | } | ||
36 | self.logger = ClientLoggerAdapter( | ||
37 | logger, | ||
38 | { | ||
39 | "address": socket.address, | ||
40 | }, | ||
41 | ) | ||
42 | self.client_headers = {} | ||
43 | |||
44 | async def close(self): | ||
45 | await self.socket.close() | ||
46 | |||
47 | async def handle_headers(self, headers): | ||
48 | return {} | ||
49 | |||
50 | async def process_requests(self): | ||
51 | try: | ||
52 | self.logger.info("Client %r connected" % (self.socket.address,)) | ||
53 | |||
54 | # Read protocol and version | ||
55 | client_protocol = await self.socket.recv() | ||
56 | if not client_protocol: | ||
57 | return | ||
58 | |||
59 | (client_proto_name, client_proto_version) = client_protocol.split() | ||
60 | if client_proto_name != self.proto_name: | ||
61 | self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name)) | ||
62 | return | ||
63 | |||
64 | self.proto_version = tuple(int(v) for v in client_proto_version.split(".")) | ||
65 | if not self.validate_proto_version(): | ||
66 | self.logger.debug( | ||
67 | "Rejecting invalid protocol version %s" % (client_proto_version) | ||
68 | ) | ||
69 | return | ||
70 | |||
71 | # Read headers | ||
72 | self.client_headers = {} | ||
73 | while True: | ||
74 | header = await self.socket.recv() | ||
75 | if not header: | ||
76 | # Empty line. End of headers | ||
77 | break | ||
78 | tag, value = header.split(":", 1) | ||
79 | self.client_headers[tag.lower()] = value.strip() | ||
80 | |||
81 | if self.client_headers.get("needs-headers", "false") == "true": | ||
82 | for k, v in (await self.handle_headers(self.client_headers)).items(): | ||
83 | await self.socket.send("%s: %s" % (k, v)) | ||
84 | await self.socket.send("") | ||
85 | |||
86 | # Handle messages | ||
87 | while True: | ||
88 | d = await self.socket.recv_message() | ||
89 | if d is None: | ||
90 | break | ||
91 | try: | ||
92 | response = await self.dispatch_message(d) | ||
93 | except InvokeError as e: | ||
94 | await self.socket.send_message( | ||
95 | {"invoke-error": {"message": str(e)}} | ||
96 | ) | ||
97 | break | ||
98 | |||
99 | if response is not self.NO_RESPONSE: | ||
100 | await self.socket.send_message(response) | ||
101 | |||
102 | except ConnectionClosedError as e: | ||
103 | self.logger.info(str(e)) | ||
104 | except (ClientError, ConnectionError) as e: | ||
105 | self.logger.error(str(e)) | ||
106 | finally: | ||
107 | await self.close() | ||
108 | |||
109 | async def dispatch_message(self, msg): | ||
110 | for k in self.handlers.keys(): | ||
111 | if k in msg: | ||
112 | self.logger.debug("Handling %s" % k) | ||
113 | return await self.handlers[k](msg[k]) | ||
114 | |||
115 | raise ClientError("Unrecognized command %r" % msg) | ||
116 | |||
117 | async def handle_ping(self, request): | ||
118 | return {"alive": True} | ||
119 | |||
120 | |||
121 | class StreamServer(object): | ||
122 | def __init__(self, handler, logger): | ||
123 | self.handler = handler | ||
124 | self.logger = logger | ||
125 | self.closed = False | ||
126 | |||
127 | async def handle_stream_client(self, reader, writer): | ||
128 | # writer.transport.set_write_buffer_limits(0) | ||
129 | socket = StreamConnection(reader, writer, -1) | ||
130 | if self.closed: | ||
131 | await socket.close() | ||
132 | return | ||
133 | |||
134 | await self.handler(socket) | ||
135 | |||
136 | async def stop(self): | ||
137 | self.closed = True | ||
138 | |||
139 | |||
140 | class TCPStreamServer(StreamServer): | ||
141 | def __init__(self, host, port, handler, logger): | ||
142 | super().__init__(handler, logger) | ||
143 | self.host = host | ||
144 | self.port = port | ||
145 | |||
146 | def start(self, loop): | ||
147 | self.server = loop.run_until_complete( | ||
148 | asyncio.start_server(self.handle_stream_client, self.host, self.port) | ||
149 | ) | ||
150 | |||
151 | for s in self.server.sockets: | ||
152 | self.logger.debug("Listening on %r" % (s.getsockname(),)) | ||
153 | # Newer python does this automatically. Do it manually here for | ||
154 | # maximum compatibility | ||
155 | s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) | ||
156 | s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1) | ||
157 | |||
158 | # Enable keep alives. This prevents broken client connections | ||
159 | # from persisting on the server for long periods of time. | ||
160 | s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) | ||
161 | s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30) | ||
162 | s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15) | ||
163 | s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4) | ||
164 | |||
165 | name = self.server.sockets[0].getsockname() | ||
166 | if self.server.sockets[0].family == socket.AF_INET6: | ||
167 | self.address = "[%s]:%d" % (name[0], name[1]) | ||
168 | else: | ||
169 | self.address = "%s:%d" % (name[0], name[1]) | ||
170 | |||
171 | return [self.server.wait_closed()] | ||
172 | |||
173 | async def stop(self): | ||
174 | await super().stop() | ||
175 | self.server.close() | ||
176 | |||
177 | def cleanup(self): | ||
178 | pass | ||
179 | |||
180 | |||
181 | class UnixStreamServer(StreamServer): | ||
182 | def __init__(self, path, handler, logger): | ||
183 | super().__init__(handler, logger) | ||
184 | self.path = path | ||
185 | |||
186 | def start(self, loop): | ||
187 | cwd = os.getcwd() | ||
188 | try: | ||
189 | # Work around path length limits in AF_UNIX | ||
190 | os.chdir(os.path.dirname(self.path)) | ||
191 | self.server = loop.run_until_complete( | ||
192 | asyncio.start_unix_server( | ||
193 | self.handle_stream_client, os.path.basename(self.path) | ||
194 | ) | ||
195 | ) | ||
196 | finally: | ||
197 | os.chdir(cwd) | ||
198 | |||
199 | self.logger.debug("Listening on %r" % self.path) | ||
200 | self.address = "unix://%s" % os.path.abspath(self.path) | ||
201 | return [self.server.wait_closed()] | ||
202 | |||
203 | async def stop(self): | ||
204 | await super().stop() | ||
205 | self.server.close() | ||
206 | |||
207 | def cleanup(self): | ||
208 | os.unlink(self.path) | ||
209 | |||
210 | |||
211 | class WebsocketsServer(object): | ||
212 | def __init__(self, host, port, handler, logger): | ||
213 | self.host = host | ||
214 | self.port = port | ||
215 | self.handler = handler | ||
216 | self.logger = logger | ||
217 | |||
218 | def start(self, loop): | ||
219 | import websockets.server | ||
220 | |||
221 | self.server = loop.run_until_complete( | ||
222 | websockets.server.serve( | ||
223 | self.client_handler, | ||
224 | self.host, | ||
225 | self.port, | ||
226 | ping_interval=None, | ||
227 | ) | ||
228 | ) | ||
229 | |||
230 | for s in self.server.sockets: | ||
231 | self.logger.debug("Listening on %r" % (s.getsockname(),)) | ||
232 | |||
233 | # Enable keep alives. This prevents broken client connections | ||
234 | # from persisting on the server for long periods of time. | ||
235 | s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) | ||
236 | s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30) | ||
237 | s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15) | ||
238 | s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4) | ||
239 | |||
240 | name = self.server.sockets[0].getsockname() | ||
241 | if self.server.sockets[0].family == socket.AF_INET6: | ||
242 | self.address = "ws://[%s]:%d" % (name[0], name[1]) | ||
243 | else: | ||
244 | self.address = "ws://%s:%d" % (name[0], name[1]) | ||
245 | |||
246 | return [self.server.wait_closed()] | ||
247 | |||
248 | async def stop(self): | ||
249 | self.server.close() | ||
250 | |||
251 | def cleanup(self): | ||
252 | pass | ||
253 | |||
254 | async def client_handler(self, websocket): | ||
255 | socket = WebsocketConnection(websocket, -1) | ||
256 | await self.handler(socket) | ||
257 | |||
258 | |||
259 | class AsyncServer(object): | ||
260 | def __init__(self, logger): | ||
261 | self.logger = logger | ||
262 | self.loop = None | ||
263 | self.run_tasks = [] | ||
264 | |||
265 | def start_tcp_server(self, host, port): | ||
266 | self.server = TCPStreamServer(host, port, self._client_handler, self.logger) | ||
267 | |||
268 | def start_unix_server(self, path): | ||
269 | self.server = UnixStreamServer(path, self._client_handler, self.logger) | ||
270 | |||
271 | def start_websocket_server(self, host, port): | ||
272 | self.server = WebsocketsServer(host, port, self._client_handler, self.logger) | ||
273 | |||
274 | async def _client_handler(self, socket): | ||
275 | address = socket.address | ||
276 | try: | ||
277 | client = self.accept_client(socket) | ||
278 | await client.process_requests() | ||
279 | except Exception as e: | ||
280 | import traceback | ||
281 | |||
282 | self.logger.error( | ||
283 | "Error from client %s: %s" % (address, str(e)), exc_info=True | ||
284 | ) | ||
285 | traceback.print_exc() | ||
286 | finally: | ||
287 | self.logger.debug("Client %s disconnected", address) | ||
288 | await socket.close() | ||
289 | |||
290 | @abc.abstractmethod | ||
291 | def accept_client(self, socket): | ||
292 | pass | ||
293 | |||
294 | async def stop(self): | ||
295 | self.logger.debug("Stopping server") | ||
296 | await self.server.stop() | ||
297 | |||
298 | def start(self): | ||
299 | tasks = self.server.start(self.loop) | ||
300 | self.address = self.server.address | ||
301 | return tasks | ||
302 | |||
303 | def signal_handler(self): | ||
304 | self.logger.debug("Got exit signal") | ||
305 | self.loop.create_task(self.stop()) | ||
306 | |||
307 | def _serve_forever(self, tasks): | ||
308 | try: | ||
309 | self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler) | ||
310 | self.loop.add_signal_handler(signal.SIGINT, self.signal_handler) | ||
311 | self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler) | ||
312 | signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM]) | ||
313 | |||
314 | self.loop.run_until_complete(asyncio.gather(*tasks)) | ||
315 | |||
316 | self.logger.debug("Server shutting down") | ||
317 | finally: | ||
318 | self.server.cleanup() | ||
319 | |||
320 | def serve_forever(self): | ||
321 | """ | ||
322 | Serve requests in the current process | ||
323 | """ | ||
324 | self._create_loop() | ||
325 | tasks = self.start() | ||
326 | self._serve_forever(tasks) | ||
327 | self.loop.close() | ||
328 | |||
329 | def _create_loop(self): | ||
330 | # Create loop and override any loop that may have existed in | ||
331 | # a parent process. It is possible that the usecases of | ||
332 | # serve_forever might be constrained enough to allow using | ||
333 | # get_event_loop here, but better safe than sorry for now. | ||
334 | self.loop = asyncio.new_event_loop() | ||
335 | asyncio.set_event_loop(self.loop) | ||
336 | |||
337 | def serve_as_process(self, *, prefunc=None, args=(), log_level=None): | ||
338 | """ | ||
339 | Serve requests in a child process | ||
340 | """ | ||
341 | |||
342 | def run(queue): | ||
343 | # Create loop and override any loop that may have existed | ||
344 | # in a parent process. Without doing this and instead | ||
345 | # using get_event_loop, at the very minimum the hashserv | ||
346 | # unit tests will hang when running the second test. | ||
347 | # This happens since get_event_loop in the spawned server | ||
348 | # process for the second testcase ends up with the loop | ||
349 | # from the hashserv client created in the unit test process | ||
350 | # when running the first testcase. The problem is somewhat | ||
351 | # more general, though, as any potential use of asyncio in | ||
352 | # Cooker could create a loop that needs to replaced in this | ||
353 | # new process. | ||
354 | self._create_loop() | ||
355 | try: | ||
356 | self.address = None | ||
357 | tasks = self.start() | ||
358 | finally: | ||
359 | # Always put the server address to wake up the parent task | ||
360 | queue.put(self.address) | ||
361 | queue.close() | ||
362 | |||
363 | if prefunc is not None: | ||
364 | prefunc(self, *args) | ||
365 | |||
366 | if log_level is not None: | ||
367 | self.logger.setLevel(log_level) | ||
368 | |||
369 | self._serve_forever(tasks) | ||
370 | |||
371 | if sys.version_info >= (3, 6): | ||
372 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
373 | self.loop.close() | ||
374 | |||
375 | queue = multiprocessing.Queue() | ||
376 | |||
377 | # Temporarily block SIGTERM. The server process will inherit this | ||
378 | # block which will ensure it doesn't receive the SIGTERM until the | ||
379 | # handler is ready for it | ||
380 | mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGTERM]) | ||
381 | try: | ||
382 | self.process = multiprocessing.Process(target=run, args=(queue,)) | ||
383 | self.process.start() | ||
384 | |||
385 | self.address = queue.get() | ||
386 | queue.close() | ||
387 | queue.join_thread() | ||
388 | |||
389 | return self.process | ||
390 | finally: | ||
391 | signal.pthread_sigmask(signal.SIG_SETMASK, mask) | ||
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index f4f897e41a..44d08f5c55 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
@@ -20,10 +20,12 @@ import itertools | |||
20 | import time | 20 | import time |
21 | import re | 21 | import re |
22 | import stat | 22 | import stat |
23 | import datetime | ||
23 | import bb | 24 | import bb |
24 | import bb.msg | 25 | import bb.msg |
25 | import bb.process | 26 | import bb.process |
26 | import bb.progress | 27 | import bb.progress |
28 | from io import StringIO | ||
27 | from bb import data, event, utils | 29 | from bb import data, event, utils |
28 | 30 | ||
29 | bblogger = logging.getLogger('BitBake') | 31 | bblogger = logging.getLogger('BitBake') |
@@ -176,7 +178,9 @@ class StdoutNoopContextManager: | |||
176 | 178 | ||
177 | @property | 179 | @property |
178 | def name(self): | 180 | def name(self): |
179 | return sys.stdout.name | 181 | if "name" in dir(sys.stdout): |
182 | return sys.stdout.name | ||
183 | return "<mem>" | ||
180 | 184 | ||
181 | 185 | ||
182 | def exec_func(func, d, dirs = None): | 186 | def exec_func(func, d, dirs = None): |
@@ -295,9 +299,25 @@ def exec_func_python(func, d, runfile, cwd=None): | |||
295 | lineno = int(d.getVarFlag(func, "lineno", False)) | 299 | lineno = int(d.getVarFlag(func, "lineno", False)) |
296 | bb.methodpool.insert_method(func, text, fn, lineno - 1) | 300 | bb.methodpool.insert_method(func, text, fn, lineno - 1) |
297 | 301 | ||
298 | comp = utils.better_compile(code, func, "exec_python_func() autogenerated") | 302 | if verboseStdoutLogging: |
299 | utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated") | 303 | sys.stdout.flush() |
304 | sys.stderr.flush() | ||
305 | currout = sys.stdout | ||
306 | currerr = sys.stderr | ||
307 | sys.stderr = sys.stdout = execio = StringIO() | ||
308 | comp = utils.better_compile(code, func, "exec_func_python() autogenerated") | ||
309 | utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated") | ||
300 | finally: | 310 | finally: |
311 | if verboseStdoutLogging: | ||
312 | execio.flush() | ||
313 | logger.plain("%s" % execio.getvalue()) | ||
314 | sys.stdout = currout | ||
315 | sys.stderr = currerr | ||
316 | execio.close() | ||
317 | # We want any stdout/stderr to be printed before any other log messages to make debugging | ||
318 | # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this. | ||
319 | sys.stdout.flush() | ||
320 | sys.stderr.flush() | ||
301 | bb.debug(2, "Python function %s finished" % func) | 321 | bb.debug(2, "Python function %s finished" % func) |
302 | 322 | ||
303 | if cwd and olddir: | 323 | if cwd and olddir: |
@@ -436,7 +456,11 @@ exit $ret | |||
436 | if fakerootcmd: | 456 | if fakerootcmd: |
437 | cmd = [fakerootcmd, runfile] | 457 | cmd = [fakerootcmd, runfile] |
438 | 458 | ||
439 | if verboseStdoutLogging: | 459 | # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either |
460 | # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively", | ||
461 | # ie. inside another function, in which case stdout is already being captured so we don't | ||
462 | # want to Tee here as output would be printed twice, and out of order. | ||
463 | if verboseStdoutLogging and sys.stdout == sys.__stdout__: | ||
440 | logfile = LogTee(logger, StdoutNoopContextManager()) | 464 | logfile = LogTee(logger, StdoutNoopContextManager()) |
441 | else: | 465 | else: |
442 | logfile = StdoutNoopContextManager() | 466 | logfile = StdoutNoopContextManager() |
@@ -565,10 +589,8 @@ exit $ret | |||
565 | def _task_data(fn, task, d): | 589 | def _task_data(fn, task, d): |
566 | localdata = bb.data.createCopy(d) | 590 | localdata = bb.data.createCopy(d) |
567 | localdata.setVar('BB_FILENAME', fn) | 591 | localdata.setVar('BB_FILENAME', fn) |
568 | localdata.setVar('BB_CURRENTTASK', task[3:]) | ||
569 | localdata.setVar('OVERRIDES', 'task-%s:%s' % | 592 | localdata.setVar('OVERRIDES', 'task-%s:%s' % |
570 | (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) | 593 | (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) |
571 | localdata.finalize() | ||
572 | bb.data.expandKeys(localdata) | 594 | bb.data.expandKeys(localdata) |
573 | return localdata | 595 | return localdata |
574 | 596 | ||
@@ -579,7 +601,7 @@ def _exec_task(fn, task, d, quieterr): | |||
579 | running it with its own local metadata, and with some useful variables set. | 601 | running it with its own local metadata, and with some useful variables set. |
580 | """ | 602 | """ |
581 | if not d.getVarFlag(task, 'task', False): | 603 | if not d.getVarFlag(task, 'task', False): |
582 | event.fire(TaskInvalid(task, d), d) | 604 | event.fire(TaskInvalid(task, fn, d), d) |
583 | logger.error("No such task: %s" % task) | 605 | logger.error("No such task: %s" % task) |
584 | return 1 | 606 | return 1 |
585 | 607 | ||
@@ -615,7 +637,8 @@ def _exec_task(fn, task, d, quieterr): | |||
615 | logorder = os.path.join(tempdir, 'log.task_order') | 637 | logorder = os.path.join(tempdir, 'log.task_order') |
616 | try: | 638 | try: |
617 | with open(logorder, 'a') as logorderfile: | 639 | with open(logorder, 'a') as logorderfile: |
618 | logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) | 640 | timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f") |
641 | logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase)) | ||
619 | except OSError: | 642 | except OSError: |
620 | logger.exception("Opening log file '%s'", logorder) | 643 | logger.exception("Opening log file '%s'", logorder) |
621 | pass | 644 | pass |
@@ -682,47 +705,55 @@ def _exec_task(fn, task, d, quieterr): | |||
682 | try: | 705 | try: |
683 | try: | 706 | try: |
684 | event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata) | 707 | event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata) |
685 | except (bb.BBHandledException, SystemExit): | ||
686 | return 1 | ||
687 | 708 | ||
688 | try: | ||
689 | for func in (prefuncs or '').split(): | 709 | for func in (prefuncs or '').split(): |
690 | exec_func(func, localdata) | 710 | exec_func(func, localdata) |
691 | exec_func(task, localdata) | 711 | exec_func(task, localdata) |
692 | for func in (postfuncs or '').split(): | 712 | for func in (postfuncs or '').split(): |
693 | exec_func(func, localdata) | 713 | exec_func(func, localdata) |
694 | except bb.BBHandledException: | 714 | finally: |
695 | event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata) | 715 | # Need to flush and close the logs before sending events where the |
696 | return 1 | 716 | # UI may try to look at the logs. |
697 | except Exception as exc: | 717 | sys.stdout.flush() |
698 | if quieterr: | 718 | sys.stderr.flush() |
699 | event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) | 719 | |
700 | else: | 720 | bblogger.removeHandler(handler) |
701 | errprinted = errchk.triggered | 721 | |
702 | logger.error(str(exc)) | 722 | # Restore the backup fds |
703 | event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) | 723 | os.dup2(osi[0], osi[1]) |
704 | return 1 | 724 | os.dup2(oso[0], oso[1]) |
705 | finally: | 725 | os.dup2(ose[0], ose[1]) |
706 | sys.stdout.flush() | 726 | |
707 | sys.stderr.flush() | 727 | # Close the backup fds |
708 | 728 | os.close(osi[0]) | |
709 | bblogger.removeHandler(handler) | 729 | os.close(oso[0]) |
710 | 730 | os.close(ose[0]) | |
711 | # Restore the backup fds | 731 | |
712 | os.dup2(osi[0], osi[1]) | 732 | logfile.close() |
713 | os.dup2(oso[0], oso[1]) | 733 | if os.path.exists(logfn) and os.path.getsize(logfn) == 0: |
714 | os.dup2(ose[0], ose[1]) | 734 | logger.debug2("Zero size logfn %s, removing", logfn) |
715 | 735 | bb.utils.remove(logfn) | |
716 | # Close the backup fds | 736 | bb.utils.remove(loglink) |
717 | os.close(osi[0]) | 737 | except (Exception, SystemExit) as exc: |
718 | os.close(oso[0]) | 738 | handled = False |
719 | os.close(ose[0]) | 739 | if isinstance(exc, bb.BBHandledException): |
740 | handled = True | ||
741 | |||
742 | if quieterr: | ||
743 | if not handled: | ||
744 | logger.warning(repr(exc)) | ||
745 | event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) | ||
746 | else: | ||
747 | errprinted = errchk.triggered | ||
748 | # If the output is already on stdout, we've printed the information in the | ||
749 | # logs once already so don't duplicate | ||
750 | if verboseStdoutLogging or handled: | ||
751 | errprinted = True | ||
752 | if not handled: | ||
753 | logger.error(repr(exc)) | ||
754 | event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) | ||
755 | return 1 | ||
720 | 756 | ||
721 | logfile.close() | ||
722 | if os.path.exists(logfn) and os.path.getsize(logfn) == 0: | ||
723 | logger.debug2("Zero size logfn %s, removing", logfn) | ||
724 | bb.utils.remove(logfn) | ||
725 | bb.utils.remove(loglink) | ||
726 | event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) | 757 | event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) |
727 | 758 | ||
728 | if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False): | 759 | if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False): |
@@ -760,132 +791,92 @@ def exec_task(fn, task, d, profile = False): | |||
760 | event.fire(failedevent, d) | 791 | event.fire(failedevent, d) |
761 | return 1 | 792 | return 1 |
762 | 793 | ||
763 | def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): | 794 | def _get_cleanmask(taskname, mcfn): |
764 | """ | 795 | """ |
765 | Internal stamp helper function | 796 | Internal stamp helper function to generate stamp cleaning mask |
766 | Makes sure the stamp directory exists | ||
767 | Returns the stamp path+filename | 797 | Returns the stamp path+filename |
768 | 798 | ||
769 | In the bitbake core, d can be a CacheData and file_name will be set. | 799 | In the bitbake core, d can be a CacheData and file_name will be set. |
770 | When called in task context, d will be a data store, file_name will not be set | 800 | When called in task context, d will be a data store, file_name will not be set |
771 | """ | 801 | """ |
772 | taskflagname = taskname | 802 | cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn) |
773 | if taskname.endswith("_setscene") and taskname != "do_setscene": | 803 | taskflagname = taskname.replace("_setscene", "") |
774 | taskflagname = taskname.replace("_setscene", "") | 804 | if cleanmask: |
775 | 805 | return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")] | |
776 | if file_name: | 806 | return [] |
777 | stamp = d.stamp[file_name] | 807 | |
778 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" | 808 | def clean_stamp_mcfn(task, mcfn): |
779 | else: | 809 | cleanmask = _get_cleanmask(task, mcfn) |
780 | stamp = d.getVar('STAMP') | 810 | for mask in cleanmask: |
781 | file_name = d.getVar('BB_FILENAME') | 811 | for name in glob.glob(mask): |
782 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" | 812 | # Preserve sigdata files in the stamps directory |
813 | if "sigdata" in name or "sigbasedata" in name: | ||
814 | continue | ||
815 | # Preserve taint files in the stamps directory | ||
816 | if name.endswith('.taint'): | ||
817 | continue | ||
818 | os.unlink(name) | ||
783 | 819 | ||
784 | if baseonly: | 820 | def clean_stamp(task, d): |
785 | return stamp | 821 | mcfn = d.getVar('BB_FILENAME') |
786 | if noextra: | 822 | clean_stamp_mcfn(task, mcfn) |
787 | extrainfo = "" | ||
788 | 823 | ||
789 | if not stamp: | 824 | def make_stamp_mcfn(task, mcfn): |
790 | return | ||
791 | 825 | ||
792 | stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) | 826 | basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn) |
793 | 827 | ||
794 | stampdir = os.path.dirname(stamp) | 828 | stampdir = os.path.dirname(basestamp) |
795 | if cached_mtime_noerror(stampdir) == 0: | 829 | if cached_mtime_noerror(stampdir) == 0: |
796 | bb.utils.mkdirhier(stampdir) | 830 | bb.utils.mkdirhier(stampdir) |
797 | 831 | ||
798 | return stamp | 832 | clean_stamp_mcfn(task, mcfn) |
799 | 833 | ||
800 | def stamp_cleanmask_internal(taskname, d, file_name): | 834 | # Remove the file and recreate to force timestamp |
801 | """ | 835 | # change on broken NFS filesystems |
802 | Internal stamp helper function to generate stamp cleaning mask | 836 | if basestamp: |
803 | Returns the stamp path+filename | 837 | bb.utils.remove(basestamp) |
838 | open(basestamp, "w").close() | ||
804 | 839 | ||
805 | In the bitbake core, d can be a CacheData and file_name will be set. | 840 | def make_stamp(task, d): |
806 | When called in task context, d will be a data store, file_name will not be set | ||
807 | """ | 841 | """ |
808 | taskflagname = taskname | 842 | Creates/updates a stamp for a given task |
809 | if taskname.endswith("_setscene") and taskname != "do_setscene": | 843 | """ |
810 | taskflagname = taskname.replace("_setscene", "") | 844 | mcfn = d.getVar('BB_FILENAME') |
811 | |||
812 | if file_name: | ||
813 | stamp = d.stampclean[file_name] | ||
814 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" | ||
815 | else: | ||
816 | stamp = d.getVar('STAMPCLEAN') | ||
817 | file_name = d.getVar('BB_FILENAME') | ||
818 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" | ||
819 | 845 | ||
820 | if not stamp: | 846 | make_stamp_mcfn(task, mcfn) |
821 | return [] | ||
822 | 847 | ||
823 | cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) | 848 | # If we're in task context, write out a signature file for each task |
849 | # as it completes | ||
850 | if not task.endswith("_setscene"): | ||
851 | stampbase = bb.parse.siggen.stampfile_base(mcfn) | ||
852 | bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True) | ||
824 | 853 | ||
825 | return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")] | ||
826 | 854 | ||
827 | def make_stamp(task, d, file_name = None): | 855 | def find_stale_stamps(task, mcfn): |
828 | """ | 856 | current = bb.parse.siggen.stampfile_mcfn(task, mcfn) |
829 | Creates/updates a stamp for a given task | 857 | current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn) |
830 | (d can be a data dict or dataCache) | 858 | cleanmask = _get_cleanmask(task, mcfn) |
831 | """ | 859 | found = [] |
832 | cleanmask = stamp_cleanmask_internal(task, d, file_name) | ||
833 | for mask in cleanmask: | 860 | for mask in cleanmask: |
834 | for name in glob.glob(mask): | 861 | for name in glob.glob(mask): |
835 | # Preserve sigdata files in the stamps directory | ||
836 | if "sigdata" in name or "sigbasedata" in name: | 862 | if "sigdata" in name or "sigbasedata" in name: |
837 | continue | 863 | continue |
838 | # Preserve taint files in the stamps directory | ||
839 | if name.endswith('.taint'): | 864 | if name.endswith('.taint'): |
840 | continue | 865 | continue |
841 | os.unlink(name) | 866 | if name == current or name == current2: |
842 | 867 | continue | |
843 | stamp = stamp_internal(task, d, file_name) | 868 | logger.debug2("Stampfile %s does not match %s or %s" % (name, current, current2)) |
844 | # Remove the file and recreate to force timestamp | 869 | found.append(name) |
845 | # change on broken NFS filesystems | 870 | return found |
846 | if stamp: | ||
847 | bb.utils.remove(stamp) | ||
848 | open(stamp, "w").close() | ||
849 | |||
850 | # If we're in task context, write out a signature file for each task | ||
851 | # as it completes | ||
852 | if not task.endswith("_setscene") and task != "do_setscene" and not file_name: | ||
853 | stampbase = stamp_internal(task, d, None, True) | ||
854 | file_name = d.getVar('BB_FILENAME') | ||
855 | bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) | ||
856 | |||
857 | def del_stamp(task, d, file_name = None): | ||
858 | """ | ||
859 | Removes a stamp for a given task | ||
860 | (d can be a data dict or dataCache) | ||
861 | """ | ||
862 | stamp = stamp_internal(task, d, file_name) | ||
863 | bb.utils.remove(stamp) | ||
864 | 871 | ||
865 | def write_taint(task, d, file_name = None): | 872 | def write_taint(task, d): |
866 | """ | 873 | """ |
867 | Creates a "taint" file which will force the specified task and its | 874 | Creates a "taint" file which will force the specified task and its |
868 | dependents to be re-run the next time by influencing the value of its | 875 | dependents to be re-run the next time by influencing the value of its |
869 | taskhash. | 876 | taskhash. |
870 | (d can be a data dict or dataCache) | ||
871 | """ | 877 | """ |
872 | import uuid | 878 | mcfn = d.getVar('BB_FILENAME') |
873 | if file_name: | 879 | bb.parse.siggen.invalidate_task(task, mcfn) |
874 | taintfn = d.stamp[file_name] + '.' + task + '.taint' | ||
875 | else: | ||
876 | taintfn = d.getVar('STAMP') + '.' + task + '.taint' | ||
877 | bb.utils.mkdirhier(os.path.dirname(taintfn)) | ||
878 | # The specific content of the taint file is not really important, | ||
879 | # we just need it to be random, so a random UUID is used | ||
880 | with open(taintfn, 'w') as taintf: | ||
881 | taintf.write(str(uuid.uuid4())) | ||
882 | |||
883 | def stampfile(taskname, d, file_name = None, noextra=False): | ||
884 | """ | ||
885 | Return the stamp for a given task | ||
886 | (d can be a data dict or dataCache) | ||
887 | """ | ||
888 | return stamp_internal(taskname, d, file_name, noextra=noextra) | ||
889 | 880 | ||
890 | def add_tasks(tasklist, d): | 881 | def add_tasks(tasklist, d): |
891 | task_deps = d.getVar('_task_deps', False) | 882 | task_deps = d.getVar('_task_deps', False) |
@@ -910,6 +901,11 @@ def add_tasks(tasklist, d): | |||
910 | task_deps[name] = {} | 901 | task_deps[name] = {} |
911 | if name in flags: | 902 | if name in flags: |
912 | deptask = d.expand(flags[name]) | 903 | deptask = d.expand(flags[name]) |
904 | if name in ['noexec', 'fakeroot', 'nostamp']: | ||
905 | if deptask != '1': | ||
906 | bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' " | ||
907 | "will result in the flag not being set. See YP bug #13808.".format(name)) | ||
908 | |||
913 | task_deps[name][task] = deptask | 909 | task_deps[name][task] = deptask |
914 | getTask('mcdepends') | 910 | getTask('mcdepends') |
915 | getTask('depends') | 911 | getTask('depends') |
@@ -1008,6 +1004,8 @@ def tasksbetween(task_start, task_end, d): | |||
1008 | def follow_chain(task, endtask, chain=None): | 1004 | def follow_chain(task, endtask, chain=None): |
1009 | if not chain: | 1005 | if not chain: |
1010 | chain = [] | 1006 | chain = [] |
1007 | if task in chain: | ||
1008 | bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain))) | ||
1011 | chain.append(task) | 1009 | chain.append(task) |
1012 | for othertask in tasks: | 1010 | for othertask in tasks: |
1013 | if othertask == task: | 1011 | if othertask == task: |
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index aea2b8bc11..18d5574a31 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -19,14 +19,16 @@ | |||
19 | import os | 19 | import os |
20 | import logging | 20 | import logging |
21 | import pickle | 21 | import pickle |
22 | from collections import defaultdict, Mapping | 22 | from collections import defaultdict |
23 | from collections.abc import Mapping | ||
23 | import bb.utils | 24 | import bb.utils |
24 | from bb import PrefixLoggerAdapter | 25 | from bb import PrefixLoggerAdapter |
25 | import re | 26 | import re |
27 | import shutil | ||
26 | 28 | ||
27 | logger = logging.getLogger("BitBake.Cache") | 29 | logger = logging.getLogger("BitBake.Cache") |
28 | 30 | ||
29 | __cache_version__ = "154" | 31 | __cache_version__ = "155" |
30 | 32 | ||
31 | def getCacheFile(path, filename, mc, data_hash): | 33 | def getCacheFile(path, filename, mc, data_hash): |
32 | mcspec = '' | 34 | mcspec = '' |
@@ -53,12 +55,12 @@ class RecipeInfoCommon(object): | |||
53 | 55 | ||
54 | @classmethod | 56 | @classmethod |
55 | def pkgvar(cls, var, packages, metadata): | 57 | def pkgvar(cls, var, packages, metadata): |
56 | return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) | 58 | return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata)) |
57 | for pkg in packages) | 59 | for pkg in packages) |
58 | 60 | ||
59 | @classmethod | 61 | @classmethod |
60 | def taskvar(cls, var, tasks, metadata): | 62 | def taskvar(cls, var, tasks, metadata): |
61 | return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) | 63 | return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata)) |
62 | for task in tasks) | 64 | for task in tasks) |
63 | 65 | ||
64 | @classmethod | 66 | @classmethod |
@@ -103,7 +105,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
103 | 105 | ||
104 | self.tasks = metadata.getVar('__BBTASKS', False) | 106 | self.tasks = metadata.getVar('__BBTASKS', False) |
105 | 107 | ||
106 | self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) | 108 | self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {} |
107 | self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) | 109 | self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) |
108 | 110 | ||
109 | self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} | 111 | self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} |
@@ -126,6 +128,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
126 | self.inherits = self.getvar('__inherit_cache', metadata, expand=False) | 128 | self.inherits = self.getvar('__inherit_cache', metadata, expand=False) |
127 | self.fakerootenv = self.getvar('FAKEROOTENV', metadata) | 129 | self.fakerootenv = self.getvar('FAKEROOTENV', metadata) |
128 | self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) | 130 | self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) |
131 | self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata) | ||
129 | self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) | 132 | self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) |
130 | self.extradepsfunc = self.getvar('calculate_extra_depends', metadata) | 133 | self.extradepsfunc = self.getvar('calculate_extra_depends', metadata) |
131 | 134 | ||
@@ -163,6 +166,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
163 | cachedata.fakerootenv = {} | 166 | cachedata.fakerootenv = {} |
164 | cachedata.fakerootnoenv = {} | 167 | cachedata.fakerootnoenv = {} |
165 | cachedata.fakerootdirs = {} | 168 | cachedata.fakerootdirs = {} |
169 | cachedata.fakerootlogs = {} | ||
166 | cachedata.extradepsfunc = {} | 170 | cachedata.extradepsfunc = {} |
167 | 171 | ||
168 | def add_cacheData(self, cachedata, fn): | 172 | def add_cacheData(self, cachedata, fn): |
@@ -212,7 +216,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
212 | 216 | ||
213 | # Collect files we may need for possible world-dep | 217 | # Collect files we may need for possible world-dep |
214 | # calculations | 218 | # calculations |
215 | if not self.not_world: | 219 | if not bb.utils.to_boolean(self.not_world): |
216 | cachedata.possible_world.append(fn) | 220 | cachedata.possible_world.append(fn) |
217 | #else: | 221 | #else: |
218 | # logger.debug2("EXCLUDE FROM WORLD: %s", fn) | 222 | # logger.debug2("EXCLUDE FROM WORLD: %s", fn) |
@@ -231,17 +235,116 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
231 | cachedata.fakerootenv[fn] = self.fakerootenv | 235 | cachedata.fakerootenv[fn] = self.fakerootenv |
232 | cachedata.fakerootnoenv[fn] = self.fakerootnoenv | 236 | cachedata.fakerootnoenv[fn] = self.fakerootnoenv |
233 | cachedata.fakerootdirs[fn] = self.fakerootdirs | 237 | cachedata.fakerootdirs[fn] = self.fakerootdirs |
238 | cachedata.fakerootlogs[fn] = self.fakerootlogs | ||
234 | cachedata.extradepsfunc[fn] = self.extradepsfunc | 239 | cachedata.extradepsfunc[fn] = self.extradepsfunc |
235 | 240 | ||
241 | |||
242 | class SiggenRecipeInfo(RecipeInfoCommon): | ||
243 | __slots__ = () | ||
244 | |||
245 | classname = "SiggenRecipeInfo" | ||
246 | cachefile = "bb_cache_" + classname +".dat" | ||
247 | # we don't want to show this information in graph files so don't set cachefields | ||
248 | #cachefields = [] | ||
249 | |||
250 | def __init__(self, filename, metadata): | ||
251 | self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False) | ||
252 | self.siggen_varvals = metadata.getVar("__siggen_varvals", False) | ||
253 | self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False) | ||
254 | |||
255 | @classmethod | ||
256 | def init_cacheData(cls, cachedata): | ||
257 | cachedata.siggen_taskdeps = {} | ||
258 | cachedata.siggen_gendeps = {} | ||
259 | cachedata.siggen_varvals = {} | ||
260 | |||
261 | def add_cacheData(self, cachedata, fn): | ||
262 | cachedata.siggen_gendeps[fn] = self.siggen_gendeps | ||
263 | cachedata.siggen_varvals[fn] = self.siggen_varvals | ||
264 | cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps | ||
265 | |||
266 | # The siggen variable data is large and impacts: | ||
267 | # - bitbake's overall memory usage | ||
268 | # - the amount of data sent over IPC between parsing processes and the server | ||
269 | # - the size of the cache files on disk | ||
270 | # - the size of "sigdata" hash information files on disk | ||
271 | # The data consists of strings (some large) or frozenset lists of variables | ||
272 | # As such, we a) deplicate the data here and b) pass references to the object at second | ||
273 | # access (e.g. over IPC or saving into pickle). | ||
274 | |||
275 | store = {} | ||
276 | save_map = {} | ||
277 | save_count = 1 | ||
278 | restore_map = {} | ||
279 | restore_count = {} | ||
280 | |||
281 | @classmethod | ||
282 | def reset(cls): | ||
283 | # Needs to be called before starting new streamed data in a given process | ||
284 | # (e.g. writing out the cache again) | ||
285 | cls.save_map = {} | ||
286 | cls.save_count = 1 | ||
287 | cls.restore_map = {} | ||
288 | |||
289 | @classmethod | ||
290 | def _save(cls, deps): | ||
291 | ret = [] | ||
292 | if not deps: | ||
293 | return deps | ||
294 | for dep in deps: | ||
295 | fs = deps[dep] | ||
296 | if fs is None: | ||
297 | ret.append((dep, None, None)) | ||
298 | elif fs in cls.save_map: | ||
299 | ret.append((dep, None, cls.save_map[fs])) | ||
300 | else: | ||
301 | cls.save_map[fs] = cls.save_count | ||
302 | ret.append((dep, fs, cls.save_count)) | ||
303 | cls.save_count = cls.save_count + 1 | ||
304 | return ret | ||
305 | |||
306 | @classmethod | ||
307 | def _restore(cls, deps, pid): | ||
308 | ret = {} | ||
309 | if not deps: | ||
310 | return deps | ||
311 | if pid not in cls.restore_map: | ||
312 | cls.restore_map[pid] = {} | ||
313 | map = cls.restore_map[pid] | ||
314 | for dep, fs, mapnum in deps: | ||
315 | if fs is None and mapnum is None: | ||
316 | ret[dep] = None | ||
317 | elif fs is None: | ||
318 | ret[dep] = map[mapnum] | ||
319 | else: | ||
320 | try: | ||
321 | fs = cls.store[fs] | ||
322 | except KeyError: | ||
323 | cls.store[fs] = fs | ||
324 | map[mapnum] = fs | ||
325 | ret[dep] = fs | ||
326 | return ret | ||
327 | |||
328 | def __getstate__(self): | ||
329 | ret = {} | ||
330 | for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]: | ||
331 | ret[key] = self._save(self.__dict__[key]) | ||
332 | ret['pid'] = os.getpid() | ||
333 | return ret | ||
334 | |||
335 | def __setstate__(self, state): | ||
336 | pid = state['pid'] | ||
337 | for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]: | ||
338 | setattr(self, key, self._restore(state[key], pid)) | ||
339 | |||
340 | |||
236 | def virtualfn2realfn(virtualfn): | 341 | def virtualfn2realfn(virtualfn): |
237 | """ | 342 | """ |
238 | Convert a virtual file name to a real one + the associated subclass keyword | 343 | Convert a virtual file name to a real one + the associated subclass keyword |
239 | """ | 344 | """ |
240 | mc = "" | 345 | mc = "" |
241 | if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2: | 346 | if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2: |
242 | elems = virtualfn.split(':') | 347 | (_, mc, virtualfn) = virtualfn.split(':', 2) |
243 | mc = elems[1] | ||
244 | virtualfn = ":".join(elems[2:]) | ||
245 | 348 | ||
246 | fn = virtualfn | 349 | fn = virtualfn |
247 | cls = "" | 350 | cls = "" |
@@ -264,7 +367,7 @@ def realfn2virtual(realfn, cls, mc): | |||
264 | 367 | ||
265 | def variant2virtual(realfn, variant): | 368 | def variant2virtual(realfn, variant): |
266 | """ | 369 | """ |
267 | Convert a real filename + the associated subclass keyword to a virtual filename | 370 | Convert a real filename + a variant to a virtual filename |
268 | """ | 371 | """ |
269 | if variant == "": | 372 | if variant == "": |
270 | return realfn | 373 | return realfn |
@@ -275,96 +378,18 @@ def variant2virtual(realfn, variant): | |||
275 | return "mc:" + elems[1] + ":" + realfn | 378 | return "mc:" + elems[1] + ":" + realfn |
276 | return "virtual:" + variant + ":" + realfn | 379 | return "virtual:" + variant + ":" + realfn |
277 | 380 | ||
278 | def parse_recipe(bb_data, bbfile, appends, mc=''): | 381 | # |
279 | """ | 382 | # Cooker calls cacheValid on its recipe list, then either calls loadCached |
280 | Parse a recipe | 383 | # from it's main thread or parse from separate processes to generate an up to |
281 | """ | 384 | # date cache |
282 | 385 | # | |
283 | chdir_back = False | 386 | class Cache(object): |
284 | |||
285 | bb_data.setVar("__BBMULTICONFIG", mc) | ||
286 | |||
287 | # expand tmpdir to include this topdir | ||
288 | bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "") | ||
289 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
290 | oldpath = os.path.abspath(os.getcwd()) | ||
291 | bb.parse.cached_mtime_noerror(bbfile_loc) | ||
292 | |||
293 | # The ConfHandler first looks if there is a TOPDIR and if not | ||
294 | # then it would call getcwd(). | ||
295 | # Previously, we chdir()ed to bbfile_loc, called the handler | ||
296 | # and finally chdir()ed back, a couple of thousand times. We now | ||
297 | # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. | ||
298 | if not bb_data.getVar('TOPDIR', False): | ||
299 | chdir_back = True | ||
300 | bb_data.setVar('TOPDIR', bbfile_loc) | ||
301 | try: | ||
302 | if appends: | ||
303 | bb_data.setVar('__BBAPPEND', " ".join(appends)) | ||
304 | bb_data = bb.parse.handle(bbfile, bb_data) | ||
305 | if chdir_back: | ||
306 | os.chdir(oldpath) | ||
307 | return bb_data | ||
308 | except: | ||
309 | if chdir_back: | ||
310 | os.chdir(oldpath) | ||
311 | raise | ||
312 | |||
313 | |||
314 | |||
315 | class NoCache(object): | ||
316 | |||
317 | def __init__(self, databuilder): | ||
318 | self.databuilder = databuilder | ||
319 | self.data = databuilder.data | ||
320 | |||
321 | def loadDataFull(self, virtualfn, appends): | ||
322 | """ | ||
323 | Return a complete set of data for fn. | ||
324 | To do this, we need to parse the file. | ||
325 | """ | ||
326 | logger.debug("Parsing %s (full)" % virtualfn) | ||
327 | (fn, virtual, mc) = virtualfn2realfn(virtualfn) | ||
328 | bb_data = self.load_bbfile(virtualfn, appends, virtonly=True) | ||
329 | return bb_data[virtual] | ||
330 | |||
331 | def load_bbfile(self, bbfile, appends, virtonly = False, mc=None): | ||
332 | """ | ||
333 | Load and parse one .bb build file | ||
334 | Return the data and whether parsing resulted in the file being skipped | ||
335 | """ | ||
336 | |||
337 | if virtonly: | ||
338 | (bbfile, virtual, mc) = virtualfn2realfn(bbfile) | ||
339 | bb_data = self.databuilder.mcdata[mc].createCopy() | ||
340 | bb_data.setVar("__ONLYFINALISE", virtual or "default") | ||
341 | datastores = parse_recipe(bb_data, bbfile, appends, mc) | ||
342 | return datastores | ||
343 | |||
344 | if mc is not None: | ||
345 | bb_data = self.databuilder.mcdata[mc].createCopy() | ||
346 | return parse_recipe(bb_data, bbfile, appends, mc) | ||
347 | |||
348 | bb_data = self.data.createCopy() | ||
349 | datastores = parse_recipe(bb_data, bbfile, appends) | ||
350 | |||
351 | for mc in self.databuilder.mcdata: | ||
352 | if not mc: | ||
353 | continue | ||
354 | bb_data = self.databuilder.mcdata[mc].createCopy() | ||
355 | newstores = parse_recipe(bb_data, bbfile, appends, mc) | ||
356 | for ns in newstores: | ||
357 | datastores["mc:%s:%s" % (mc, ns)] = newstores[ns] | ||
358 | |||
359 | return datastores | ||
360 | |||
361 | class Cache(NoCache): | ||
362 | """ | 387 | """ |
363 | BitBake Cache implementation | 388 | BitBake Cache implementation |
364 | """ | 389 | """ |
365 | def __init__(self, databuilder, mc, data_hash, caches_array): | 390 | def __init__(self, databuilder, mc, data_hash, caches_array): |
366 | super().__init__(databuilder) | 391 | self.databuilder = databuilder |
367 | data = databuilder.data | 392 | self.data = databuilder.data |
368 | 393 | ||
369 | # Pass caches_array information into Cache Constructor | 394 | # Pass caches_array information into Cache Constructor |
370 | # It will be used later for deciding whether we | 395 | # It will be used later for deciding whether we |
@@ -372,7 +397,7 @@ class Cache(NoCache): | |||
372 | self.mc = mc | 397 | self.mc = mc |
373 | self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) | 398 | self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) |
374 | self.caches_array = caches_array | 399 | self.caches_array = caches_array |
375 | self.cachedir = data.getVar("CACHE") | 400 | self.cachedir = self.data.getVar("CACHE") |
376 | self.clean = set() | 401 | self.clean = set() |
377 | self.checked = set() | 402 | self.checked = set() |
378 | self.depends_cache = {} | 403 | self.depends_cache = {} |
@@ -382,20 +407,12 @@ class Cache(NoCache): | |||
382 | self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+') | 407 | self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+') |
383 | 408 | ||
384 | if self.cachedir in [None, '']: | 409 | if self.cachedir in [None, '']: |
385 | self.has_cache = False | 410 | bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use") |
386 | self.logger.info("Not using a cache. " | ||
387 | "Set CACHE = <directory> to enable.") | ||
388 | return | ||
389 | |||
390 | self.has_cache = True | ||
391 | 411 | ||
392 | def getCacheFile(self, cachefile): | 412 | def getCacheFile(self, cachefile): |
393 | return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) | 413 | return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) |
394 | 414 | ||
395 | def prepare_cache(self, progress): | 415 | def prepare_cache(self, progress): |
396 | if not self.has_cache: | ||
397 | return 0 | ||
398 | |||
399 | loaded = 0 | 416 | loaded = 0 |
400 | 417 | ||
401 | self.cachefile = self.getCacheFile("bb_cache.dat") | 418 | self.cachefile = self.getCacheFile("bb_cache.dat") |
@@ -434,9 +451,6 @@ class Cache(NoCache): | |||
434 | return loaded | 451 | return loaded |
435 | 452 | ||
436 | def cachesize(self): | 453 | def cachesize(self): |
437 | if not self.has_cache: | ||
438 | return 0 | ||
439 | |||
440 | cachesize = 0 | 454 | cachesize = 0 |
441 | for cache_class in self.caches_array: | 455 | for cache_class in self.caches_array: |
442 | cachefile = self.getCacheFile(cache_class.cachefile) | 456 | cachefile = self.getCacheFile(cache_class.cachefile) |
@@ -498,11 +512,11 @@ class Cache(NoCache): | |||
498 | 512 | ||
499 | return len(self.depends_cache) | 513 | return len(self.depends_cache) |
500 | 514 | ||
501 | def parse(self, filename, appends): | 515 | def parse(self, filename, appends, layername): |
502 | """Parse the specified filename, returning the recipe information""" | 516 | """Parse the specified filename, returning the recipe information""" |
503 | self.logger.debug("Parsing %s", filename) | 517 | self.logger.debug("Parsing %s", filename) |
504 | infos = [] | 518 | infos = [] |
505 | datastores = self.load_bbfile(filename, appends, mc=self.mc) | 519 | datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername) |
506 | depends = [] | 520 | depends = [] |
507 | variants = [] | 521 | variants = [] |
508 | # Process the "real" fn last so we can store variants list | 522 | # Process the "real" fn last so we can store variants list |
@@ -524,43 +538,19 @@ class Cache(NoCache): | |||
524 | 538 | ||
525 | return infos | 539 | return infos |
526 | 540 | ||
527 | def load(self, filename, appends): | 541 | def loadCached(self, filename, appends): |
528 | """Obtain the recipe information for the specified filename, | 542 | """Obtain the recipe information for the specified filename, |
529 | using cached values if available, otherwise parsing. | 543 | using cached values. |
530 | 544 | """ | |
531 | Note that if it does parse to obtain the info, it will not | ||
532 | automatically add the information to the cache or to your | ||
533 | CacheData. Use the add or add_info method to do so after | ||
534 | running this, or use loadData instead.""" | ||
535 | cached = self.cacheValid(filename, appends) | ||
536 | if cached: | ||
537 | infos = [] | ||
538 | # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo] | ||
539 | info_array = self.depends_cache[filename] | ||
540 | for variant in info_array[0].variants: | ||
541 | virtualfn = variant2virtual(filename, variant) | ||
542 | infos.append((virtualfn, self.depends_cache[virtualfn])) | ||
543 | else: | ||
544 | return self.parse(filename, appends, configdata, self.caches_array) | ||
545 | |||
546 | return cached, infos | ||
547 | |||
548 | def loadData(self, fn, appends, cacheData): | ||
549 | """Load the recipe info for the specified filename, | ||
550 | parsing and adding to the cache if necessary, and adding | ||
551 | the recipe information to the supplied CacheData instance.""" | ||
552 | skipped, virtuals = 0, 0 | ||
553 | 545 | ||
554 | cached, infos = self.load(fn, appends) | 546 | infos = [] |
555 | for virtualfn, info_array in infos: | 547 | # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo] |
556 | if info_array[0].skipped: | 548 | info_array = self.depends_cache[filename] |
557 | self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason) | 549 | for variant in info_array[0].variants: |
558 | skipped += 1 | 550 | virtualfn = variant2virtual(filename, variant) |
559 | else: | 551 | infos.append((virtualfn, self.depends_cache[virtualfn])) |
560 | self.add_info(virtualfn, info_array, cacheData, not cached) | ||
561 | virtuals += 1 | ||
562 | 552 | ||
563 | return cached, skipped, virtuals | 553 | return infos |
564 | 554 | ||
565 | def cacheValid(self, fn, appends): | 555 | def cacheValid(self, fn, appends): |
566 | """ | 556 | """ |
@@ -569,10 +559,6 @@ class Cache(NoCache): | |||
569 | """ | 559 | """ |
570 | if fn not in self.checked: | 560 | if fn not in self.checked: |
571 | self.cacheValidUpdate(fn, appends) | 561 | self.cacheValidUpdate(fn, appends) |
572 | |||
573 | # Is cache enabled? | ||
574 | if not self.has_cache: | ||
575 | return False | ||
576 | if fn in self.clean: | 562 | if fn in self.clean: |
577 | return True | 563 | return True |
578 | return False | 564 | return False |
@@ -582,10 +568,6 @@ class Cache(NoCache): | |||
582 | Is the cache valid for fn? | 568 | Is the cache valid for fn? |
583 | Make thorough (slower) checks including timestamps. | 569 | Make thorough (slower) checks including timestamps. |
584 | """ | 570 | """ |
585 | # Is cache enabled? | ||
586 | if not self.has_cache: | ||
587 | return False | ||
588 | |||
589 | self.checked.add(fn) | 571 | self.checked.add(fn) |
590 | 572 | ||
591 | # File isn't in depends_cache | 573 | # File isn't in depends_cache |
@@ -636,7 +618,7 @@ class Cache(NoCache): | |||
636 | for f in flist: | 618 | for f in flist: |
637 | if not f: | 619 | if not f: |
638 | continue | 620 | continue |
639 | f, exist = f.split(":") | 621 | f, exist = f.rsplit(":", 1) |
640 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): | 622 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): |
641 | self.logger.debug2("%s's file checksum list file %s changed", | 623 | self.logger.debug2("%s's file checksum list file %s changed", |
642 | fn, f) | 624 | fn, f) |
@@ -692,10 +674,6 @@ class Cache(NoCache): | |||
692 | Save the cache | 674 | Save the cache |
693 | Called from the parser when complete (or exiting) | 675 | Called from the parser when complete (or exiting) |
694 | """ | 676 | """ |
695 | |||
696 | if not self.has_cache: | ||
697 | return | ||
698 | |||
699 | if self.cacheclean: | 677 | if self.cacheclean: |
700 | self.logger.debug2("Cache is clean, not saving.") | 678 | self.logger.debug2("Cache is clean, not saving.") |
701 | return | 679 | return |
@@ -716,6 +694,7 @@ class Cache(NoCache): | |||
716 | p.dump(info) | 694 | p.dump(info) |
717 | 695 | ||
718 | del self.depends_cache | 696 | del self.depends_cache |
697 | SiggenRecipeInfo.reset() | ||
719 | 698 | ||
720 | @staticmethod | 699 | @staticmethod |
721 | def mtime(cachefile): | 700 | def mtime(cachefile): |
@@ -738,26 +717,11 @@ class Cache(NoCache): | |||
738 | if watcher: | 717 | if watcher: |
739 | watcher(info_array[0].file_depends) | 718 | watcher(info_array[0].file_depends) |
740 | 719 | ||
741 | if not self.has_cache: | ||
742 | return | ||
743 | |||
744 | if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: | 720 | if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: |
745 | if parsed: | 721 | if parsed: |
746 | self.cacheclean = False | 722 | self.cacheclean = False |
747 | self.depends_cache[filename] = info_array | 723 | self.depends_cache[filename] = info_array |
748 | 724 | ||
749 | def add(self, file_name, data, cacheData, parsed=None): | ||
750 | """ | ||
751 | Save data we need into the cache | ||
752 | """ | ||
753 | |||
754 | realfn = virtualfn2realfn(file_name)[0] | ||
755 | |||
756 | info_array = [] | ||
757 | for cache_class in self.caches_array: | ||
758 | info_array.append(cache_class(realfn, data)) | ||
759 | self.add_info(file_name, info_array, cacheData, parsed) | ||
760 | |||
761 | class MulticonfigCache(Mapping): | 725 | class MulticonfigCache(Mapping): |
762 | def __init__(self, databuilder, data_hash, caches_array): | 726 | def __init__(self, databuilder, data_hash, caches_array): |
763 | def progress(p): | 727 | def progress(p): |
@@ -794,6 +758,7 @@ class MulticonfigCache(Mapping): | |||
794 | loaded = 0 | 758 | loaded = 0 |
795 | 759 | ||
796 | for c in self.__caches.values(): | 760 | for c in self.__caches.values(): |
761 | SiggenRecipeInfo.reset() | ||
797 | loaded += c.prepare_cache(progress) | 762 | loaded += c.prepare_cache(progress) |
798 | previous_progress = current_progress | 763 | previous_progress = current_progress |
799 | 764 | ||
@@ -871,11 +836,10 @@ class MultiProcessCache(object): | |||
871 | self.cachedata = self.create_cachedata() | 836 | self.cachedata = self.create_cachedata() |
872 | self.cachedata_extras = self.create_cachedata() | 837 | self.cachedata_extras = self.create_cachedata() |
873 | 838 | ||
874 | def init_cache(self, d, cache_file_name=None): | 839 | def init_cache(self, cachedir, cache_file_name=None): |
875 | cachedir = (d.getVar("PERSISTENT_DIR") or | 840 | if not cachedir: |
876 | d.getVar("CACHE")) | ||
877 | if cachedir in [None, '']: | ||
878 | return | 841 | return |
842 | |||
879 | bb.utils.mkdirhier(cachedir) | 843 | bb.utils.mkdirhier(cachedir) |
880 | self.cachefile = os.path.join(cachedir, | 844 | self.cachefile = os.path.join(cachedir, |
881 | cache_file_name or self.__class__.cache_file_name) | 845 | cache_file_name or self.__class__.cache_file_name) |
@@ -906,6 +870,10 @@ class MultiProcessCache(object): | |||
906 | if not self.cachefile: | 870 | if not self.cachefile: |
907 | return | 871 | return |
908 | 872 | ||
873 | have_data = any(self.cachedata_extras) | ||
874 | if not have_data: | ||
875 | return | ||
876 | |||
909 | glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) | 877 | glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) |
910 | 878 | ||
911 | i = os.getpid() | 879 | i = os.getpid() |
@@ -940,6 +908,8 @@ class MultiProcessCache(object): | |||
940 | 908 | ||
941 | data = self.cachedata | 909 | data = self.cachedata |
942 | 910 | ||
911 | have_data = False | ||
912 | |||
943 | for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: | 913 | for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: |
944 | f = os.path.join(os.path.dirname(self.cachefile), f) | 914 | f = os.path.join(os.path.dirname(self.cachefile), f) |
945 | try: | 915 | try: |
@@ -954,12 +924,14 @@ class MultiProcessCache(object): | |||
954 | os.unlink(f) | 924 | os.unlink(f) |
955 | continue | 925 | continue |
956 | 926 | ||
927 | have_data = True | ||
957 | self.merge_data(extradata, data) | 928 | self.merge_data(extradata, data) |
958 | os.unlink(f) | 929 | os.unlink(f) |
959 | 930 | ||
960 | with open(self.cachefile, "wb") as f: | 931 | if have_data: |
961 | p = pickle.Pickler(f, -1) | 932 | with open(self.cachefile, "wb") as f: |
962 | p.dump([data, self.__class__.CACHE_VERSION]) | 933 | p = pickle.Pickler(f, -1) |
934 | p.dump([data, self.__class__.CACHE_VERSION]) | ||
963 | 935 | ||
964 | bb.utils.unlockfile(glf) | 936 | bb.utils.unlockfile(glf) |
965 | 937 | ||
@@ -1015,3 +987,11 @@ class SimpleCache(object): | |||
1015 | p.dump([data, self.cacheversion]) | 987 | p.dump([data, self.cacheversion]) |
1016 | 988 | ||
1017 | bb.utils.unlockfile(glf) | 989 | bb.utils.unlockfile(glf) |
990 | |||
991 | def copyfile(self, target): | ||
992 | if not self.cachefile: | ||
993 | return | ||
994 | |||
995 | glf = bb.utils.lockfile(self.cachefile + ".lock") | ||
996 | shutil.copy(self.cachefile, target) | ||
997 | bb.utils.unlockfile(glf) | ||
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py index 1d50a26426..557793d366 100644 --- a/bitbake/lib/bb/checksum.py +++ b/bitbake/lib/bb/checksum.py | |||
@@ -11,10 +11,13 @@ import os | |||
11 | import stat | 11 | import stat |
12 | import bb.utils | 12 | import bb.utils |
13 | import logging | 13 | import logging |
14 | import re | ||
14 | from bb.cache import MultiProcessCache | 15 | from bb.cache import MultiProcessCache |
15 | 16 | ||
16 | logger = logging.getLogger("BitBake.Cache") | 17 | logger = logging.getLogger("BitBake.Cache") |
17 | 18 | ||
19 | filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+') | ||
20 | |||
18 | # mtime cache (non-persistent) | 21 | # mtime cache (non-persistent) |
19 | # based upon the assumption that files do not change during bitbake run | 22 | # based upon the assumption that files do not change during bitbake run |
20 | class FileMtimeCache(object): | 23 | class FileMtimeCache(object): |
@@ -50,6 +53,7 @@ class FileChecksumCache(MultiProcessCache): | |||
50 | MultiProcessCache.__init__(self) | 53 | MultiProcessCache.__init__(self) |
51 | 54 | ||
52 | def get_checksum(self, f): | 55 | def get_checksum(self, f): |
56 | f = os.path.normpath(f) | ||
53 | entry = self.cachedata[0].get(f) | 57 | entry = self.cachedata[0].get(f) |
54 | cmtime = self.mtime_cache.cached_mtime(f) | 58 | cmtime = self.mtime_cache.cached_mtime(f) |
55 | if entry: | 59 | if entry: |
@@ -84,22 +88,36 @@ class FileChecksumCache(MultiProcessCache): | |||
84 | return None | 88 | return None |
85 | return checksum | 89 | return checksum |
86 | 90 | ||
91 | # | ||
92 | # Changing the format of file-checksums is problematic as both OE and Bitbake have | ||
93 | # knowledge of them. We need to encode a new piece of data, the portion of the path | ||
94 | # we care about from a checksum perspective. This means that files that change subdirectory | ||
95 | # are tracked by the task hashes. To do this, we do something horrible and put a "/./" into | ||
96 | # the path. The filesystem handles it but it gives us a marker to know which subsection | ||
97 | # of the path to cache. | ||
98 | # | ||
87 | def checksum_dir(pth): | 99 | def checksum_dir(pth): |
88 | # Handle directories recursively | 100 | # Handle directories recursively |
89 | if pth == "/": | 101 | if pth == "/": |
90 | bb.fatal("Refusing to checksum /") | 102 | bb.fatal("Refusing to checksum /") |
103 | pth = pth.rstrip("/") | ||
91 | dirchecksums = [] | 104 | dirchecksums = [] |
92 | for root, dirs, files in os.walk(pth, topdown=True): | 105 | for root, dirs, files in os.walk(pth, topdown=True): |
93 | [dirs.remove(d) for d in list(dirs) if d in localdirsexclude] | 106 | [dirs.remove(d) for d in list(dirs) if d in localdirsexclude] |
94 | for name in files: | 107 | for name in files: |
95 | fullpth = os.path.join(root, name) | 108 | fullpth = os.path.join(root, name).replace(pth, os.path.join(pth, ".")) |
96 | checksum = checksum_file(fullpth) | 109 | checksum = checksum_file(fullpth) |
97 | if checksum: | 110 | if checksum: |
98 | dirchecksums.append((fullpth, checksum)) | 111 | dirchecksums.append((fullpth, checksum)) |
99 | return dirchecksums | 112 | return dirchecksums |
100 | 113 | ||
101 | checksums = [] | 114 | checksums = [] |
102 | for pth in filelist.split(): | 115 | for pth in filelist_regex.split(filelist): |
116 | if not pth: | ||
117 | continue | ||
118 | pth = pth.strip() | ||
119 | if not pth: | ||
120 | continue | ||
103 | exist = pth.split(":")[1] | 121 | exist = pth.split(":")[1] |
104 | if exist == "False": | 122 | if exist == "False": |
105 | continue | 123 | continue |
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 25a7ac69d3..2e8b7ced3c 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -25,6 +27,7 @@ import ast | |||
25 | import sys | 27 | import sys |
26 | import codegen | 28 | import codegen |
27 | import logging | 29 | import logging |
30 | import inspect | ||
28 | import bb.pysh as pysh | 31 | import bb.pysh as pysh |
29 | import bb.utils, bb.data | 32 | import bb.utils, bb.data |
30 | import hashlib | 33 | import hashlib |
@@ -56,10 +59,40 @@ def check_indent(codestr): | |||
56 | 59 | ||
57 | return codestr | 60 | return codestr |
58 | 61 | ||
59 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by | 62 | modulecode_deps = {} |
60 | # avoiding duplication of the attribute names! | ||
61 | 63 | ||
64 | def add_module_functions(fn, functions, namespace): | ||
65 | import os | ||
66 | fstat = os.stat(fn) | ||
67 | fixedhash = fn + ":" + str(fstat.st_size) + ":" + str(fstat.st_mtime) | ||
68 | for f in functions: | ||
69 | name = "%s.%s" % (namespace, f) | ||
70 | parser = PythonParser(name, logger) | ||
71 | try: | ||
72 | parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f) | ||
73 | #bb.warn("Cached %s" % f) | ||
74 | except KeyError: | ||
75 | lines, lineno = inspect.getsourcelines(functions[f]) | ||
76 | src = "".join(lines) | ||
77 | parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f) | ||
78 | #bb.warn("Not cached %s" % f) | ||
79 | execs = parser.execs.copy() | ||
80 | # Expand internal module exec references | ||
81 | for e in parser.execs: | ||
82 | if e in functions: | ||
83 | execs.remove(e) | ||
84 | execs.add(namespace + "." + e) | ||
85 | modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()] | ||
86 | #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains)) | ||
87 | |||
88 | def update_module_dependencies(d): | ||
89 | for mod in modulecode_deps: | ||
90 | excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split()) | ||
91 | if excludes: | ||
92 | modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]] | ||
62 | 93 | ||
94 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by | ||
95 | # avoiding duplication of the attribute names! | ||
63 | class SetCache(object): | 96 | class SetCache(object): |
64 | def __init__(self): | 97 | def __init__(self): |
65 | self.setcache = {} | 98 | self.setcache = {} |
@@ -152,12 +185,12 @@ class CodeParserCache(MultiProcessCache): | |||
152 | self.shellcachelines[h] = cacheline | 185 | self.shellcachelines[h] = cacheline |
153 | return cacheline | 186 | return cacheline |
154 | 187 | ||
155 | def init_cache(self, d): | 188 | def init_cache(self, cachedir): |
156 | # Check if we already have the caches | 189 | # Check if we already have the caches |
157 | if self.pythoncache: | 190 | if self.pythoncache: |
158 | return | 191 | return |
159 | 192 | ||
160 | MultiProcessCache.init_cache(self, d) | 193 | MultiProcessCache.init_cache(self, cachedir) |
161 | 194 | ||
162 | # cachedata gets re-assigned in the parent | 195 | # cachedata gets re-assigned in the parent |
163 | self.pythoncache = self.cachedata[0] | 196 | self.pythoncache = self.cachedata[0] |
@@ -169,8 +202,8 @@ class CodeParserCache(MultiProcessCache): | |||
169 | 202 | ||
170 | codeparsercache = CodeParserCache() | 203 | codeparsercache = CodeParserCache() |
171 | 204 | ||
172 | def parser_cache_init(d): | 205 | def parser_cache_init(cachedir): |
173 | codeparsercache.init_cache(d) | 206 | codeparsercache.init_cache(cachedir) |
174 | 207 | ||
175 | def parser_cache_save(): | 208 | def parser_cache_save(): |
176 | codeparsercache.save_extras() | 209 | codeparsercache.save_extras() |
@@ -195,6 +228,10 @@ class BufferedLogger(Logger): | |||
195 | self.target.handle(record) | 228 | self.target.handle(record) |
196 | self.buffer = [] | 229 | self.buffer = [] |
197 | 230 | ||
231 | class DummyLogger(): | ||
232 | def flush(self): | ||
233 | return | ||
234 | |||
198 | class PythonParser(): | 235 | class PythonParser(): |
199 | getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional") | 236 | getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional") |
200 | getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag") | 237 | getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag") |
@@ -212,26 +249,26 @@ class PythonParser(): | |||
212 | funcstr = codegen.to_source(func) | 249 | funcstr = codegen.to_source(func) |
213 | argstr = codegen.to_source(arg) | 250 | argstr = codegen.to_source(arg) |
214 | except TypeError: | 251 | except TypeError: |
215 | self.log.debug(2, 'Failed to convert function and argument to source form') | 252 | self.log.debug2('Failed to convert function and argument to source form') |
216 | else: | 253 | else: |
217 | self.log.debug(1, self.unhandled_message % (funcstr, argstr)) | 254 | self.log.debug(self.unhandled_message % (funcstr, argstr)) |
218 | 255 | ||
219 | def visit_Call(self, node): | 256 | def visit_Call(self, node): |
220 | name = self.called_node_name(node.func) | 257 | name = self.called_node_name(node.func) |
221 | if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): | 258 | if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): |
222 | if isinstance(node.args[0], ast.Str): | 259 | if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str): |
223 | varname = node.args[0].s | 260 | varname = node.args[0].value |
224 | if name in self.containsfuncs and isinstance(node.args[1], ast.Str): | 261 | if name in self.containsfuncs and isinstance(node.args[1], ast.Constant): |
225 | if varname not in self.contains: | 262 | if varname not in self.contains: |
226 | self.contains[varname] = set() | 263 | self.contains[varname] = set() |
227 | self.contains[varname].add(node.args[1].s) | 264 | self.contains[varname].add(node.args[1].value) |
228 | elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str): | 265 | elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Constant): |
229 | if varname not in self.contains: | 266 | if varname not in self.contains: |
230 | self.contains[varname] = set() | 267 | self.contains[varname] = set() |
231 | self.contains[varname].update(node.args[1].s.split()) | 268 | self.contains[varname].update(node.args[1].value.split()) |
232 | elif name.endswith(self.getvarflags): | 269 | elif name.endswith(self.getvarflags): |
233 | if isinstance(node.args[1], ast.Str): | 270 | if isinstance(node.args[1], ast.Constant): |
234 | self.references.add('%s[%s]' % (varname, node.args[1].s)) | 271 | self.references.add('%s[%s]' % (varname, node.args[1].value)) |
235 | else: | 272 | else: |
236 | self.warn(node.func, node.args[1]) | 273 | self.warn(node.func, node.args[1]) |
237 | else: | 274 | else: |
@@ -239,8 +276,8 @@ class PythonParser(): | |||
239 | else: | 276 | else: |
240 | self.warn(node.func, node.args[0]) | 277 | self.warn(node.func, node.args[0]) |
241 | elif name and name.endswith(".expand"): | 278 | elif name and name.endswith(".expand"): |
242 | if isinstance(node.args[0], ast.Str): | 279 | if isinstance(node.args[0], ast.Constant): |
243 | value = node.args[0].s | 280 | value = node.args[0].value |
244 | d = bb.data.init() | 281 | d = bb.data.init() |
245 | parser = d.expandWithRefs(value, self.name) | 282 | parser = d.expandWithRefs(value, self.name) |
246 | self.references |= parser.references | 283 | self.references |= parser.references |
@@ -250,8 +287,8 @@ class PythonParser(): | |||
250 | self.contains[varname] = set() | 287 | self.contains[varname] = set() |
251 | self.contains[varname] |= parser.contains[varname] | 288 | self.contains[varname] |= parser.contains[varname] |
252 | elif name in self.execfuncs: | 289 | elif name in self.execfuncs: |
253 | if isinstance(node.args[0], ast.Str): | 290 | if isinstance(node.args[0], ast.Constant): |
254 | self.var_execs.add(node.args[0].s) | 291 | self.var_execs.add(node.args[0].value) |
255 | else: | 292 | else: |
256 | self.warn(node.func, node.args[0]) | 293 | self.warn(node.func, node.args[0]) |
257 | elif name and isinstance(node.func, (ast.Name, ast.Attribute)): | 294 | elif name and isinstance(node.func, (ast.Name, ast.Attribute)): |
@@ -276,16 +313,24 @@ class PythonParser(): | |||
276 | self.contains = {} | 313 | self.contains = {} |
277 | self.execs = set() | 314 | self.execs = set() |
278 | self.references = set() | 315 | self.references = set() |
279 | self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log) | 316 | self._log = log |
317 | # Defer init as expensive | ||
318 | self.log = DummyLogger() | ||
280 | 319 | ||
281 | self.unhandled_message = "in call of %s, argument '%s' is not a string literal" | 320 | self.unhandled_message = "in call of %s, argument '%s' is not a string literal" |
282 | self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) | 321 | self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) |
283 | 322 | ||
284 | def parse_python(self, node, lineno=0, filename="<string>"): | 323 | # For the python module code it is expensive to have the function text so it is |
285 | if not node or not node.strip(): | 324 | # uses a different fixedhash to cache against. We can take the hit on obtaining the |
325 | # text if it isn't in the cache. | ||
326 | def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None): | ||
327 | if not fixedhash and (not node or not node.strip()): | ||
286 | return | 328 | return |
287 | 329 | ||
288 | h = bbhash(str(node)) | 330 | if fixedhash: |
331 | h = fixedhash | ||
332 | else: | ||
333 | h = bbhash(str(node)) | ||
289 | 334 | ||
290 | if h in codeparsercache.pythoncache: | 335 | if h in codeparsercache.pythoncache: |
291 | self.references = set(codeparsercache.pythoncache[h].refs) | 336 | self.references = set(codeparsercache.pythoncache[h].refs) |
@@ -303,6 +348,12 @@ class PythonParser(): | |||
303 | self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) | 348 | self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) |
304 | return | 349 | return |
305 | 350 | ||
351 | if fixedhash and not node: | ||
352 | raise KeyError | ||
353 | |||
354 | # Need to parse so take the hit on the real log buffer | ||
355 | self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, self._log) | ||
356 | |||
306 | # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though | 357 | # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though |
307 | node = "\n" * int(lineno) + node | 358 | node = "\n" * int(lineno) + node |
308 | code = compile(check_indent(str(node)), filename, "exec", | 359 | code = compile(check_indent(str(node)), filename, "exec", |
@@ -321,7 +372,11 @@ class ShellParser(): | |||
321 | self.funcdefs = set() | 372 | self.funcdefs = set() |
322 | self.allexecs = set() | 373 | self.allexecs = set() |
323 | self.execs = set() | 374 | self.execs = set() |
324 | self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log) | 375 | self._name = name |
376 | self._log = log | ||
377 | # Defer init as expensive | ||
378 | self.log = DummyLogger() | ||
379 | |||
325 | self.unhandled_template = "unable to handle non-literal command '%s'" | 380 | self.unhandled_template = "unable to handle non-literal command '%s'" |
326 | self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) | 381 | self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) |
327 | 382 | ||
@@ -340,6 +395,9 @@ class ShellParser(): | |||
340 | self.execs = set(codeparsercache.shellcacheextras[h].execs) | 395 | self.execs = set(codeparsercache.shellcacheextras[h].execs) |
341 | return self.execs | 396 | return self.execs |
342 | 397 | ||
398 | # Need to parse so take the hit on the real log buffer | ||
399 | self.log = BufferedLogger('BitBake.Data.%s' % self._name, logging.DEBUG, self._log) | ||
400 | |||
343 | self._parse_shell(value) | 401 | self._parse_shell(value) |
344 | self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) | 402 | self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) |
345 | 403 | ||
@@ -450,7 +508,7 @@ class ShellParser(): | |||
450 | 508 | ||
451 | cmd = word[1] | 509 | cmd = word[1] |
452 | if cmd.startswith("$"): | 510 | if cmd.startswith("$"): |
453 | self.log.debug(1, self.unhandled_template % cmd) | 511 | self.log.debug(self.unhandled_template % cmd) |
454 | elif cmd == "eval": | 512 | elif cmd == "eval": |
455 | command = " ".join(word for _, word in words[1:]) | 513 | command = " ".join(word for _, word in words[1:]) |
456 | self._parse_shell(command) | 514 | self._parse_shell(command) |
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py index dd77cdd6e2..1fcb9bf14c 100644 --- a/bitbake/lib/bb/command.py +++ b/bitbake/lib/bb/command.py | |||
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue | |||
20 | 20 | ||
21 | from collections import OrderedDict, defaultdict | 21 | from collections import OrderedDict, defaultdict |
22 | 22 | ||
23 | import io | ||
23 | import bb.event | 24 | import bb.event |
24 | import bb.cooker | 25 | import bb.cooker |
25 | import bb.remotedata | 26 | import bb.remotedata |
@@ -50,23 +51,32 @@ class Command: | |||
50 | """ | 51 | """ |
51 | A queue of asynchronous commands for bitbake | 52 | A queue of asynchronous commands for bitbake |
52 | """ | 53 | """ |
53 | def __init__(self, cooker): | 54 | def __init__(self, cooker, process_server): |
54 | self.cooker = cooker | 55 | self.cooker = cooker |
55 | self.cmds_sync = CommandsSync() | 56 | self.cmds_sync = CommandsSync() |
56 | self.cmds_async = CommandsAsync() | 57 | self.cmds_async = CommandsAsync() |
57 | self.remotedatastores = None | 58 | self.remotedatastores = None |
58 | 59 | ||
59 | # FIXME Add lock for this | 60 | self.process_server = process_server |
61 | # Access with locking using process_server.{get/set/clear}_async_cmd() | ||
60 | self.currentAsyncCommand = None | 62 | self.currentAsyncCommand = None |
61 | 63 | ||
62 | def runCommand(self, commandline, ro_only = False): | 64 | def runCommand(self, commandline, process_server, ro_only=False): |
63 | command = commandline.pop(0) | 65 | command = commandline.pop(0) |
64 | 66 | ||
65 | # Ensure cooker is ready for commands | 67 | # Ensure cooker is ready for commands |
66 | if command != "updateConfig" and command != "setFeatures": | 68 | if command not in ["updateConfig", "setFeatures", "ping"]: |
67 | self.cooker.init_configdata() | 69 | try: |
68 | if not self.remotedatastores: | 70 | self.cooker.init_configdata() |
69 | self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker) | 71 | if not self.remotedatastores: |
72 | self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker) | ||
73 | except (Exception, SystemExit) as exc: | ||
74 | import traceback | ||
75 | if isinstance(exc, bb.BBHandledException): | ||
76 | # We need to start returning real exceptions here. Until we do, we can't | ||
77 | # tell if an exception is an instance of bb.BBHandledException | ||
78 | return None, "bb.BBHandledException()\n" + traceback.format_exc() | ||
79 | return None, traceback.format_exc() | ||
70 | 80 | ||
71 | if hasattr(CommandsSync, command): | 81 | if hasattr(CommandsSync, command): |
72 | # Can run synchronous commands straight away | 82 | # Can run synchronous commands straight away |
@@ -75,7 +85,6 @@ class Command: | |||
75 | if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'): | 85 | if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'): |
76 | return None, "Not able to execute not readonly commands in readonly mode" | 86 | return None, "Not able to execute not readonly commands in readonly mode" |
77 | try: | 87 | try: |
78 | self.cooker.process_inotify_updates() | ||
79 | if getattr(command_method, 'needconfig', True): | 88 | if getattr(command_method, 'needconfig', True): |
80 | self.cooker.updateCacheSync() | 89 | self.cooker.updateCacheSync() |
81 | result = command_method(self, commandline) | 90 | result = command_method(self, commandline) |
@@ -90,24 +99,23 @@ class Command: | |||
90 | return None, traceback.format_exc() | 99 | return None, traceback.format_exc() |
91 | else: | 100 | else: |
92 | return result, None | 101 | return result, None |
93 | if self.currentAsyncCommand is not None: | ||
94 | return None, "Busy (%s in progress)" % self.currentAsyncCommand[0] | ||
95 | if command not in CommandsAsync.__dict__: | 102 | if command not in CommandsAsync.__dict__: |
96 | return None, "No such command" | 103 | return None, "No such command" |
97 | self.currentAsyncCommand = (command, commandline) | 104 | if not process_server.set_async_cmd((command, commandline)): |
98 | self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker) | 105 | return None, "Busy (%s in progress)" % self.process_server.get_async_cmd()[0] |
106 | self.cooker.idleCallBackRegister(self.runAsyncCommand, process_server) | ||
99 | return True, None | 107 | return True, None |
100 | 108 | ||
101 | def runAsyncCommand(self): | 109 | def runAsyncCommand(self, _, process_server, halt): |
102 | try: | 110 | try: |
103 | self.cooker.process_inotify_updates() | ||
104 | if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): | 111 | if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): |
105 | # updateCache will trigger a shutdown of the parser | 112 | # updateCache will trigger a shutdown of the parser |
106 | # and then raise BBHandledException triggering an exit | 113 | # and then raise BBHandledException triggering an exit |
107 | self.cooker.updateCache() | 114 | self.cooker.updateCache() |
108 | return False | 115 | return bb.server.process.idleFinish("Cooker in error state") |
109 | if self.currentAsyncCommand is not None: | 116 | cmd = process_server.get_async_cmd() |
110 | (command, options) = self.currentAsyncCommand | 117 | if cmd is not None: |
118 | (command, options) = cmd | ||
111 | commandmethod = getattr(CommandsAsync, command) | 119 | commandmethod = getattr(CommandsAsync, command) |
112 | needcache = getattr( commandmethod, "needcache" ) | 120 | needcache = getattr( commandmethod, "needcache" ) |
113 | if needcache and self.cooker.state != bb.cooker.state.running: | 121 | if needcache and self.cooker.state != bb.cooker.state.running: |
@@ -117,24 +125,21 @@ class Command: | |||
117 | commandmethod(self.cmds_async, self, options) | 125 | commandmethod(self.cmds_async, self, options) |
118 | return False | 126 | return False |
119 | else: | 127 | else: |
120 | return False | 128 | return bb.server.process.idleFinish("Nothing to do, no async command?") |
121 | except KeyboardInterrupt as exc: | 129 | except KeyboardInterrupt as exc: |
122 | self.finishAsyncCommand("Interrupted") | 130 | return bb.server.process.idleFinish("Interrupted") |
123 | return False | ||
124 | except SystemExit as exc: | 131 | except SystemExit as exc: |
125 | arg = exc.args[0] | 132 | arg = exc.args[0] |
126 | if isinstance(arg, str): | 133 | if isinstance(arg, str): |
127 | self.finishAsyncCommand(arg) | 134 | return bb.server.process.idleFinish(arg) |
128 | else: | 135 | else: |
129 | self.finishAsyncCommand("Exited with %s" % arg) | 136 | return bb.server.process.idleFinish("Exited with %s" % arg) |
130 | return False | ||
131 | except Exception as exc: | 137 | except Exception as exc: |
132 | import traceback | 138 | import traceback |
133 | if isinstance(exc, bb.BBHandledException): | 139 | if isinstance(exc, bb.BBHandledException): |
134 | self.finishAsyncCommand("") | 140 | return bb.server.process.idleFinish("") |
135 | else: | 141 | else: |
136 | self.finishAsyncCommand(traceback.format_exc()) | 142 | return bb.server.process.idleFinish(traceback.format_exc()) |
137 | return False | ||
138 | 143 | ||
139 | def finishAsyncCommand(self, msg=None, code=None): | 144 | def finishAsyncCommand(self, msg=None, code=None): |
140 | if msg or msg == "": | 145 | if msg or msg == "": |
@@ -143,8 +148,8 @@ class Command: | |||
143 | bb.event.fire(CommandExit(code), self.cooker.data) | 148 | bb.event.fire(CommandExit(code), self.cooker.data) |
144 | else: | 149 | else: |
145 | bb.event.fire(CommandCompleted(), self.cooker.data) | 150 | bb.event.fire(CommandCompleted(), self.cooker.data) |
146 | self.currentAsyncCommand = None | ||
147 | self.cooker.finishcommand() | 151 | self.cooker.finishcommand() |
152 | self.process_server.clear_async_cmd() | ||
148 | 153 | ||
149 | def reset(self): | 154 | def reset(self): |
150 | if self.remotedatastores: | 155 | if self.remotedatastores: |
@@ -157,6 +162,14 @@ class CommandsSync: | |||
157 | These must not influence any running synchronous command. | 162 | These must not influence any running synchronous command. |
158 | """ | 163 | """ |
159 | 164 | ||
165 | def ping(self, command, params): | ||
166 | """ | ||
167 | Allow a UI to check the server is still alive | ||
168 | """ | ||
169 | return "Still alive!" | ||
170 | ping.needconfig = False | ||
171 | ping.readonly = True | ||
172 | |||
160 | def stateShutdown(self, command, params): | 173 | def stateShutdown(self, command, params): |
161 | """ | 174 | """ |
162 | Trigger cooker 'shutdown' mode | 175 | Trigger cooker 'shutdown' mode |
@@ -294,6 +307,11 @@ class CommandsSync: | |||
294 | return ret | 307 | return ret |
295 | getLayerPriorities.readonly = True | 308 | getLayerPriorities.readonly = True |
296 | 309 | ||
310 | def revalidateCaches(self, command, params): | ||
311 | """Called by UI clients when metadata may have changed""" | ||
312 | command.cooker.revalidateCaches() | ||
313 | parseConfiguration.needconfig = False | ||
314 | |||
297 | def getRecipes(self, command, params): | 315 | def getRecipes(self, command, params): |
298 | try: | 316 | try: |
299 | mc = params[0] | 317 | mc = params[0] |
@@ -500,6 +518,17 @@ class CommandsSync: | |||
500 | d = command.remotedatastores[dsindex].varhistory | 518 | d = command.remotedatastores[dsindex].varhistory |
501 | return getattr(d, method)(*args, **kwargs) | 519 | return getattr(d, method)(*args, **kwargs) |
502 | 520 | ||
521 | def dataStoreConnectorVarHistCmdEmit(self, command, params): | ||
522 | dsindex = params[0] | ||
523 | var = params[1] | ||
524 | oval = params[2] | ||
525 | val = params[3] | ||
526 | d = command.remotedatastores[params[4]] | ||
527 | |||
528 | o = io.StringIO() | ||
529 | command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d) | ||
530 | return o.getvalue() | ||
531 | |||
503 | def dataStoreConnectorIncHistCmd(self, command, params): | 532 | def dataStoreConnectorIncHistCmd(self, command, params): |
504 | dsindex = params[0] | 533 | dsindex = params[0] |
505 | method = params[1] | 534 | method = params[1] |
@@ -521,8 +550,8 @@ class CommandsSync: | |||
521 | and return a datastore object representing the environment | 550 | and return a datastore object representing the environment |
522 | for the recipe. | 551 | for the recipe. |
523 | """ | 552 | """ |
524 | fn = params[0] | 553 | virtualfn = params[0] |
525 | mc = bb.runqueue.mc_from_tid(fn) | 554 | (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn) |
526 | appends = params[1] | 555 | appends = params[1] |
527 | appendlist = params[2] | 556 | appendlist = params[2] |
528 | if len(params) > 3: | 557 | if len(params) > 3: |
@@ -537,6 +566,7 @@ class CommandsSync: | |||
537 | appendfiles = command.cooker.collections[mc].get_file_appends(fn) | 566 | appendfiles = command.cooker.collections[mc].get_file_appends(fn) |
538 | else: | 567 | else: |
539 | appendfiles = [] | 568 | appendfiles = [] |
569 | layername = command.cooker.collections[mc].calc_bbfile_priority(fn)[2] | ||
540 | # We are calling bb.cache locally here rather than on the server, | 570 | # We are calling bb.cache locally here rather than on the server, |
541 | # but that's OK because it doesn't actually need anything from | 571 | # but that's OK because it doesn't actually need anything from |
542 | # the server barring the global datastore (which we have a remote | 572 | # the server barring the global datastore (which we have a remote |
@@ -544,11 +574,10 @@ class CommandsSync: | |||
544 | if config_data: | 574 | if config_data: |
545 | # We have to use a different function here if we're passing in a datastore | 575 | # We have to use a different function here if we're passing in a datastore |
546 | # NOTE: we took a copy above, so we don't do it here again | 576 | # NOTE: we took a copy above, so we don't do it here again |
547 | envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)[''] | 577 | envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls] |
548 | else: | 578 | else: |
549 | # Use the standard path | 579 | # Use the standard path |
550 | parser = bb.cache.NoCache(command.cooker.databuilder) | 580 | envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername) |
551 | envdata = parser.loadDataFull(fn, appendfiles) | ||
552 | idx = command.remotedatastores.store(envdata) | 581 | idx = command.remotedatastores.store(envdata) |
553 | return DataStoreConnectionHandle(idx) | 582 | return DataStoreConnectionHandle(idx) |
554 | parseRecipeFile.readonly = True | 583 | parseRecipeFile.readonly = True |
@@ -647,6 +676,16 @@ class CommandsAsync: | |||
647 | command.finishAsyncCommand() | 676 | command.finishAsyncCommand() |
648 | findFilesMatchingInDir.needcache = False | 677 | findFilesMatchingInDir.needcache = False |
649 | 678 | ||
679 | def testCookerCommandEvent(self, command, params): | ||
680 | """ | ||
681 | Dummy command used by OEQA selftest to test tinfoil without IO | ||
682 | """ | ||
683 | pattern = params[0] | ||
684 | |||
685 | command.cooker.testCookerCommandEvent(pattern) | ||
686 | command.finishAsyncCommand() | ||
687 | testCookerCommandEvent.needcache = False | ||
688 | |||
650 | def findConfigFilePath(self, command, params): | 689 | def findConfigFilePath(self, command, params): |
651 | """ | 690 | """ |
652 | Find the path of the requested configuration file | 691 | Find the path of the requested configuration file |
@@ -711,7 +750,7 @@ class CommandsAsync: | |||
711 | """ | 750 | """ |
712 | event = params[0] | 751 | event = params[0] |
713 | bb.event.fire(eval(event), command.cooker.data) | 752 | bb.event.fire(eval(event), command.cooker.data) |
714 | command.currentAsyncCommand = None | 753 | process_server.clear_async_cmd() |
715 | triggerEvent.needcache = False | 754 | triggerEvent.needcache = False |
716 | 755 | ||
717 | def resetCooker(self, command, params): | 756 | def resetCooker(self, command, params): |
@@ -738,7 +777,14 @@ class CommandsAsync: | |||
738 | (mc, pn) = bb.runqueue.split_mc(params[0]) | 777 | (mc, pn) = bb.runqueue.split_mc(params[0]) |
739 | taskname = params[1] | 778 | taskname = params[1] |
740 | sigs = params[2] | 779 | sigs = params[2] |
780 | bb.siggen.check_siggen_version(bb.siggen) | ||
741 | res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc]) | 781 | res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc]) |
742 | bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc]) | 782 | bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc]) |
743 | command.finishAsyncCommand() | 783 | command.finishAsyncCommand() |
744 | findSigInfo.needcache = False | 784 | findSigInfo.needcache = False |
785 | |||
786 | def getTaskSignatures(self, command, params): | ||
787 | res = command.cooker.getTaskSignatures(params[0], params[1]) | ||
788 | bb.event.fire(bb.event.GetTaskSignatureResult(res), command.cooker.data) | ||
789 | command.finishAsyncCommand() | ||
790 | getTaskSignatures.needcache = True | ||
diff --git a/bitbake/lib/bb/compress/_pipecompress.py b/bitbake/lib/bb/compress/_pipecompress.py new file mode 100644 index 0000000000..4a403d62cf --- /dev/null +++ b/bitbake/lib/bb/compress/_pipecompress.py | |||
@@ -0,0 +1,196 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | # Helper library to implement streaming compression and decompression using an | ||
7 | # external process | ||
8 | # | ||
9 | # This library should be used directly by end users; a wrapper library for the | ||
10 | # specific compression tool should be created | ||
11 | |||
12 | import builtins | ||
13 | import io | ||
14 | import os | ||
15 | import subprocess | ||
16 | |||
17 | |||
18 | def open_wrap( | ||
19 | cls, filename, mode="rb", *, encoding=None, errors=None, newline=None, **kwargs | ||
20 | ): | ||
21 | """ | ||
22 | Open a compressed file in binary or text mode. | ||
23 | |||
24 | Users should not call this directly. A specific compression library can use | ||
25 | this helper to provide it's own "open" command | ||
26 | |||
27 | The filename argument can be an actual filename (a str or bytes object), or | ||
28 | an existing file object to read from or write to. | ||
29 | |||
30 | The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for | ||
31 | binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is | ||
32 | "rb". | ||
33 | |||
34 | For binary mode, this function is equivalent to the cls constructor: | ||
35 | cls(filename, mode). In this case, the encoding, errors and newline | ||
36 | arguments must not be provided. | ||
37 | |||
38 | For text mode, a cls object is created, and wrapped in an | ||
39 | io.TextIOWrapper instance with the specified encoding, error handling | ||
40 | behavior, and line ending(s). | ||
41 | """ | ||
42 | if "t" in mode: | ||
43 | if "b" in mode: | ||
44 | raise ValueError("Invalid mode: %r" % (mode,)) | ||
45 | else: | ||
46 | if encoding is not None: | ||
47 | raise ValueError("Argument 'encoding' not supported in binary mode") | ||
48 | if errors is not None: | ||
49 | raise ValueError("Argument 'errors' not supported in binary mode") | ||
50 | if newline is not None: | ||
51 | raise ValueError("Argument 'newline' not supported in binary mode") | ||
52 | |||
53 | file_mode = mode.replace("t", "") | ||
54 | if isinstance(filename, (str, bytes, os.PathLike, int)): | ||
55 | binary_file = cls(filename, file_mode, **kwargs) | ||
56 | elif hasattr(filename, "read") or hasattr(filename, "write"): | ||
57 | binary_file = cls(None, file_mode, fileobj=filename, **kwargs) | ||
58 | else: | ||
59 | raise TypeError("filename must be a str or bytes object, or a file") | ||
60 | |||
61 | if "t" in mode: | ||
62 | return io.TextIOWrapper( | ||
63 | binary_file, encoding, errors, newline, write_through=True | ||
64 | ) | ||
65 | else: | ||
66 | return binary_file | ||
67 | |||
68 | |||
69 | class CompressionError(OSError): | ||
70 | pass | ||
71 | |||
72 | |||
73 | class PipeFile(io.RawIOBase): | ||
74 | """ | ||
75 | Class that implements generically piping to/from a compression program | ||
76 | |||
77 | Derived classes should add the function get_compress() and get_decompress() | ||
78 | that return the required commands. Input will be piped into stdin and the | ||
79 | (de)compressed output should be written to stdout, e.g.: | ||
80 | |||
81 | class FooFile(PipeCompressionFile): | ||
82 | def get_decompress(self): | ||
83 | return ["fooc", "--decompress", "--stdout"] | ||
84 | |||
85 | def get_compress(self): | ||
86 | return ["fooc", "--compress", "--stdout"] | ||
87 | |||
88 | """ | ||
89 | |||
90 | READ = 0 | ||
91 | WRITE = 1 | ||
92 | |||
93 | def __init__(self, filename=None, mode="rb", *, stderr=None, fileobj=None): | ||
94 | if "t" in mode or "U" in mode: | ||
95 | raise ValueError("Invalid mode: {!r}".format(mode)) | ||
96 | |||
97 | if not "b" in mode: | ||
98 | mode += "b" | ||
99 | |||
100 | if mode.startswith("r"): | ||
101 | self.mode = self.READ | ||
102 | elif mode.startswith("w"): | ||
103 | self.mode = self.WRITE | ||
104 | else: | ||
105 | raise ValueError("Invalid mode %r" % mode) | ||
106 | |||
107 | if fileobj is not None: | ||
108 | self.fileobj = fileobj | ||
109 | else: | ||
110 | self.fileobj = builtins.open(filename, mode or "rb") | ||
111 | |||
112 | if self.mode == self.READ: | ||
113 | self.p = subprocess.Popen( | ||
114 | self.get_decompress(), | ||
115 | stdin=self.fileobj, | ||
116 | stdout=subprocess.PIPE, | ||
117 | stderr=stderr, | ||
118 | close_fds=True, | ||
119 | ) | ||
120 | self.pipe = self.p.stdout | ||
121 | else: | ||
122 | self.p = subprocess.Popen( | ||
123 | self.get_compress(), | ||
124 | stdin=subprocess.PIPE, | ||
125 | stdout=self.fileobj, | ||
126 | stderr=stderr, | ||
127 | close_fds=True, | ||
128 | ) | ||
129 | self.pipe = self.p.stdin | ||
130 | |||
131 | self.__closed = False | ||
132 | |||
133 | def _check_process(self): | ||
134 | if self.p is None: | ||
135 | return | ||
136 | |||
137 | returncode = self.p.wait() | ||
138 | if returncode: | ||
139 | raise CompressionError("Process died with %d" % returncode) | ||
140 | self.p = None | ||
141 | |||
142 | def close(self): | ||
143 | if self.closed: | ||
144 | return | ||
145 | |||
146 | self.pipe.close() | ||
147 | if self.p is not None: | ||
148 | self._check_process() | ||
149 | self.fileobj.close() | ||
150 | |||
151 | self.__closed = True | ||
152 | |||
153 | @property | ||
154 | def closed(self): | ||
155 | return self.__closed | ||
156 | |||
157 | def fileno(self): | ||
158 | return self.pipe.fileno() | ||
159 | |||
160 | def flush(self): | ||
161 | self.pipe.flush() | ||
162 | |||
163 | def isatty(self): | ||
164 | return self.pipe.isatty() | ||
165 | |||
166 | def readable(self): | ||
167 | return self.mode == self.READ | ||
168 | |||
169 | def writable(self): | ||
170 | return self.mode == self.WRITE | ||
171 | |||
172 | def readinto(self, b): | ||
173 | if self.mode != self.READ: | ||
174 | import errno | ||
175 | |||
176 | raise OSError( | ||
177 | errno.EBADF, "read() on write-only %s object" % self.__class__.__name__ | ||
178 | ) | ||
179 | size = self.pipe.readinto(b) | ||
180 | if size == 0: | ||
181 | self._check_process() | ||
182 | return size | ||
183 | |||
184 | def write(self, data): | ||
185 | if self.mode != self.WRITE: | ||
186 | import errno | ||
187 | |||
188 | raise OSError( | ||
189 | errno.EBADF, "write() on read-only %s object" % self.__class__.__name__ | ||
190 | ) | ||
191 | data = self.pipe.write(data) | ||
192 | |||
193 | if not data: | ||
194 | self._check_process() | ||
195 | |||
196 | return data | ||
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py new file mode 100644 index 0000000000..88b0989322 --- /dev/null +++ b/bitbake/lib/bb/compress/lz4.py | |||
@@ -0,0 +1,19 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import bb.compress._pipecompress | ||
8 | |||
9 | |||
10 | def open(*args, **kwargs): | ||
11 | return bb.compress._pipecompress.open_wrap(LZ4File, *args, **kwargs) | ||
12 | |||
13 | |||
14 | class LZ4File(bb.compress._pipecompress.PipeFile): | ||
15 | def get_compress(self): | ||
16 | return ["lz4c", "-z", "-c"] | ||
17 | |||
18 | def get_decompress(self): | ||
19 | return ["lz4c", "-d", "-c"] | ||
diff --git a/bitbake/lib/bb/compress/zstd.py b/bitbake/lib/bb/compress/zstd.py new file mode 100644 index 0000000000..cdbbe9d60f --- /dev/null +++ b/bitbake/lib/bb/compress/zstd.py | |||
@@ -0,0 +1,30 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import bb.compress._pipecompress | ||
8 | import shutil | ||
9 | |||
10 | |||
11 | def open(*args, **kwargs): | ||
12 | return bb.compress._pipecompress.open_wrap(ZstdFile, *args, **kwargs) | ||
13 | |||
14 | |||
15 | class ZstdFile(bb.compress._pipecompress.PipeFile): | ||
16 | def __init__(self, *args, num_threads=1, compresslevel=3, **kwargs): | ||
17 | self.num_threads = num_threads | ||
18 | self.compresslevel = compresslevel | ||
19 | super().__init__(*args, **kwargs) | ||
20 | |||
21 | def _get_zstd(self): | ||
22 | if self.num_threads == 1 or not shutil.which("pzstd"): | ||
23 | return ["zstd"] | ||
24 | return ["pzstd", "-p", "%d" % self.num_threads] | ||
25 | |||
26 | def get_compress(self): | ||
27 | return self._get_zstd() + ["-c", "-%d" % self.compresslevel] | ||
28 | |||
29 | def get_decompress(self): | ||
30 | return self._get_zstd() + ["-d", "-c"] | ||
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index f4ab797edf..c5bfef55d6 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time | |||
13 | import itertools | 13 | import itertools |
14 | import logging | 14 | import logging |
15 | import multiprocessing | 15 | import multiprocessing |
16 | import sre_constants | ||
17 | import threading | 16 | import threading |
18 | from io import StringIO, UnsupportedOperation | 17 | from io import StringIO, UnsupportedOperation |
19 | from contextlib import closing | 18 | from contextlib import closing |
@@ -23,7 +22,6 @@ from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, | |||
23 | import queue | 22 | import queue |
24 | import signal | 23 | import signal |
25 | import prserv.serv | 24 | import prserv.serv |
26 | import pyinotify | ||
27 | import json | 25 | import json |
28 | import pickle | 26 | import pickle |
29 | import codecs | 27 | import codecs |
@@ -81,7 +79,7 @@ class SkippedPackage: | |||
81 | 79 | ||
82 | 80 | ||
83 | class CookerFeatures(object): | 81 | class CookerFeatures(object): |
84 | _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) | 82 | _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4)) |
85 | 83 | ||
86 | def __init__(self): | 84 | def __init__(self): |
87 | self._features=set() | 85 | self._features=set() |
@@ -104,12 +102,15 @@ class CookerFeatures(object): | |||
104 | 102 | ||
105 | class EventWriter: | 103 | class EventWriter: |
106 | def __init__(self, cooker, eventfile): | 104 | def __init__(self, cooker, eventfile): |
107 | self.file_inited = None | ||
108 | self.cooker = cooker | 105 | self.cooker = cooker |
109 | self.eventfile = eventfile | 106 | self.eventfile = eventfile |
110 | self.event_queue = [] | 107 | self.event_queue = [] |
111 | 108 | ||
112 | def write_event(self, event): | 109 | def write_variables(self): |
110 | with open(self.eventfile, "a") as f: | ||
111 | f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])})) | ||
112 | |||
113 | def send(self, event): | ||
113 | with open(self.eventfile, "a") as f: | 114 | with open(self.eventfile, "a") as f: |
114 | try: | 115 | try: |
115 | str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') | 116 | str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') |
@@ -119,28 +120,6 @@ class EventWriter: | |||
119 | import traceback | 120 | import traceback |
120 | print(err, traceback.format_exc()) | 121 | print(err, traceback.format_exc()) |
121 | 122 | ||
122 | def send(self, event): | ||
123 | if self.file_inited: | ||
124 | # we have the file, just write the event | ||
125 | self.write_event(event) | ||
126 | else: | ||
127 | # init on bb.event.BuildStarted | ||
128 | name = "%s.%s" % (event.__module__, event.__class__.__name__) | ||
129 | if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"): | ||
130 | with open(self.eventfile, "w") as f: | ||
131 | f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])})) | ||
132 | |||
133 | self.file_inited = True | ||
134 | |||
135 | # write pending events | ||
136 | for evt in self.event_queue: | ||
137 | self.write_event(evt) | ||
138 | |||
139 | # also write the current event | ||
140 | self.write_event(event) | ||
141 | else: | ||
142 | # queue all events until the file is inited | ||
143 | self.event_queue.append(event) | ||
144 | 123 | ||
145 | #============================================================================# | 124 | #============================================================================# |
146 | # BBCooker | 125 | # BBCooker |
@@ -150,8 +129,10 @@ class BBCooker: | |||
150 | Manages one bitbake build run | 129 | Manages one bitbake build run |
151 | """ | 130 | """ |
152 | 131 | ||
153 | def __init__(self, featureSet=None, idleCallBackRegister=None): | 132 | def __init__(self, featureSet=None, server=None): |
154 | self.recipecaches = None | 133 | self.recipecaches = None |
134 | self.baseconfig_valid = False | ||
135 | self.parsecache_valid = False | ||
155 | self.eventlog = None | 136 | self.eventlog = None |
156 | self.skiplist = {} | 137 | self.skiplist = {} |
157 | self.featureset = CookerFeatures() | 138 | self.featureset = CookerFeatures() |
@@ -159,34 +140,22 @@ class BBCooker: | |||
159 | for f in featureSet: | 140 | for f in featureSet: |
160 | self.featureset.setFeature(f) | 141 | self.featureset.setFeature(f) |
161 | 142 | ||
143 | self.orig_syspath = sys.path.copy() | ||
144 | self.orig_sysmodules = [*sys.modules] | ||
145 | |||
162 | self.configuration = bb.cookerdata.CookerConfiguration() | 146 | self.configuration = bb.cookerdata.CookerConfiguration() |
163 | 147 | ||
164 | self.idleCallBackRegister = idleCallBackRegister | 148 | self.process_server = server |
149 | self.idleCallBackRegister = None | ||
150 | self.waitIdle = None | ||
151 | if server: | ||
152 | self.idleCallBackRegister = server.register_idle_function | ||
153 | self.waitIdle = server.wait_for_idle | ||
165 | 154 | ||
166 | bb.debug(1, "BBCooker starting %s" % time.time()) | 155 | bb.debug(1, "BBCooker starting %s" % time.time()) |
167 | sys.stdout.flush() | 156 | |
168 | 157 | self.configwatched = {} | |
169 | self.configwatcher = pyinotify.WatchManager() | 158 | self.parsewatched = {} |
170 | bb.debug(1, "BBCooker pyinotify1 %s" % time.time()) | ||
171 | sys.stdout.flush() | ||
172 | |||
173 | self.configwatcher.bbseen = set() | ||
174 | self.configwatcher.bbwatchedfiles = set() | ||
175 | self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications) | ||
176 | bb.debug(1, "BBCooker pyinotify2 %s" % time.time()) | ||
177 | sys.stdout.flush() | ||
178 | self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \ | ||
179 | pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \ | ||
180 | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO | ||
181 | self.watcher = pyinotify.WatchManager() | ||
182 | bb.debug(1, "BBCooker pyinotify3 %s" % time.time()) | ||
183 | sys.stdout.flush() | ||
184 | self.watcher.bbseen = set() | ||
185 | self.watcher.bbwatchedfiles = set() | ||
186 | self.notifier = pyinotify.Notifier(self.watcher, self.notifications) | ||
187 | |||
188 | bb.debug(1, "BBCooker pyinotify complete %s" % time.time()) | ||
189 | sys.stdout.flush() | ||
190 | 159 | ||
191 | # If being called by something like tinfoil, we need to clean cached data | 160 | # If being called by something like tinfoil, we need to clean cached data |
192 | # which may now be invalid | 161 | # which may now be invalid |
@@ -197,14 +166,6 @@ class BBCooker: | |||
197 | self.hashserv = None | 166 | self.hashserv = None |
198 | self.hashservaddr = None | 167 | self.hashservaddr = None |
199 | 168 | ||
200 | self.inotify_modified_files = [] | ||
201 | |||
202 | def _process_inotify_updates(server, cooker, abort): | ||
203 | cooker.process_inotify_updates() | ||
204 | return 1.0 | ||
205 | |||
206 | self.idleCallBackRegister(_process_inotify_updates, self) | ||
207 | |||
208 | # TOSTOP must not be set or our children will hang when they output | 169 | # TOSTOP must not be set or our children will hang when they output |
209 | try: | 170 | try: |
210 | fd = sys.stdout.fileno() | 171 | fd = sys.stdout.fileno() |
@@ -218,7 +179,7 @@ class BBCooker: | |||
218 | except UnsupportedOperation: | 179 | except UnsupportedOperation: |
219 | pass | 180 | pass |
220 | 181 | ||
221 | self.command = bb.command.Command(self) | 182 | self.command = bb.command.Command(self, self.process_server) |
222 | self.state = state.initial | 183 | self.state = state.initial |
223 | 184 | ||
224 | self.parser = None | 185 | self.parser = None |
@@ -228,84 +189,37 @@ class BBCooker: | |||
228 | signal.signal(signal.SIGHUP, self.sigterm_exception) | 189 | signal.signal(signal.SIGHUP, self.sigterm_exception) |
229 | 190 | ||
230 | bb.debug(1, "BBCooker startup complete %s" % time.time()) | 191 | bb.debug(1, "BBCooker startup complete %s" % time.time()) |
231 | sys.stdout.flush() | ||
232 | 192 | ||
233 | def init_configdata(self): | 193 | def init_configdata(self): |
234 | if not hasattr(self, "data"): | 194 | if not hasattr(self, "data"): |
235 | self.initConfigurationData() | 195 | self.initConfigurationData() |
236 | bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) | 196 | bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) |
237 | sys.stdout.flush() | ||
238 | self.handlePRServ() | 197 | self.handlePRServ() |
239 | 198 | ||
240 | def process_inotify_updates(self): | 199 | def _baseconfig_set(self, value): |
241 | for n in [self.confignotifier, self.notifier]: | 200 | if value and not self.baseconfig_valid: |
242 | if n.check_events(timeout=0): | 201 | bb.server.process.serverlog("Base config valid") |
243 | # read notified events and enqeue them | 202 | elif not value and self.baseconfig_valid: |
244 | n.read_events() | 203 | bb.server.process.serverlog("Base config invalidated") |
245 | n.process_events() | 204 | self.baseconfig_valid = value |
246 | 205 | ||
247 | def config_notifications(self, event): | 206 | def _parsecache_set(self, value): |
248 | if event.maskname == "IN_Q_OVERFLOW": | 207 | if value and not self.parsecache_valid: |
249 | bb.warn("inotify event queue overflowed, invalidating caches.") | 208 | bb.server.process.serverlog("Parse cache valid") |
250 | self.parsecache_valid = False | 209 | elif not value and self.parsecache_valid: |
251 | self.baseconfig_valid = False | 210 | bb.server.process.serverlog("Parse cache invalidated") |
252 | bb.parse.clear_cache() | 211 | self.parsecache_valid = value |
253 | return | 212 | |
254 | if not event.pathname in self.configwatcher.bbwatchedfiles: | 213 | def add_filewatch(self, deps, configwatcher=False): |
255 | return | 214 | if configwatcher: |
256 | if not event.pathname in self.inotify_modified_files: | 215 | watcher = self.configwatched |
257 | self.inotify_modified_files.append(event.pathname) | 216 | else: |
258 | self.baseconfig_valid = False | 217 | watcher = self.parsewatched |
259 | |||
260 | def notifications(self, event): | ||
261 | if event.maskname == "IN_Q_OVERFLOW": | ||
262 | bb.warn("inotify event queue overflowed, invalidating caches.") | ||
263 | self.parsecache_valid = False | ||
264 | bb.parse.clear_cache() | ||
265 | return | ||
266 | if event.pathname.endswith("bitbake-cookerdaemon.log") \ | ||
267 | or event.pathname.endswith("bitbake.lock"): | ||
268 | return | ||
269 | if not event.pathname in self.inotify_modified_files: | ||
270 | self.inotify_modified_files.append(event.pathname) | ||
271 | self.parsecache_valid = False | ||
272 | 218 | ||
273 | def add_filewatch(self, deps, watcher=None, dirs=False): | ||
274 | if not watcher: | ||
275 | watcher = self.watcher | ||
276 | for i in deps: | 219 | for i in deps: |
277 | watcher.bbwatchedfiles.add(i[0]) | 220 | f = i[0] |
278 | if dirs: | 221 | mtime = i[1] |
279 | f = i[0] | 222 | watcher[f] = mtime |
280 | else: | ||
281 | f = os.path.dirname(i[0]) | ||
282 | if f in watcher.bbseen: | ||
283 | continue | ||
284 | watcher.bbseen.add(f) | ||
285 | watchtarget = None | ||
286 | while True: | ||
287 | # We try and add watches for files that don't exist but if they did, would influence | ||
288 | # the parser. The parent directory of these files may not exist, in which case we need | ||
289 | # to watch any parent that does exist for changes. | ||
290 | try: | ||
291 | watcher.add_watch(f, self.watchmask, quiet=False) | ||
292 | if watchtarget: | ||
293 | watcher.bbwatchedfiles.add(watchtarget) | ||
294 | break | ||
295 | except pyinotify.WatchManagerError as e: | ||
296 | if 'ENOENT' in str(e): | ||
297 | watchtarget = f | ||
298 | f = os.path.dirname(f) | ||
299 | if f in watcher.bbseen: | ||
300 | break | ||
301 | watcher.bbseen.add(f) | ||
302 | continue | ||
303 | if 'ENOSPC' in str(e): | ||
304 | providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?") | ||
305 | providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.") | ||
306 | providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.") | ||
307 | providerlog.error("Root privilege is required to modify max_user_watches.") | ||
308 | raise | ||
309 | 223 | ||
310 | def sigterm_exception(self, signum, stackframe): | 224 | def sigterm_exception(self, signum, stackframe): |
311 | if signum == signal.SIGTERM: | 225 | if signum == signal.SIGTERM: |
@@ -313,6 +227,7 @@ class BBCooker: | |||
313 | elif signum == signal.SIGHUP: | 227 | elif signum == signal.SIGHUP: |
314 | bb.warn("Cooker received SIGHUP, shutting down...") | 228 | bb.warn("Cooker received SIGHUP, shutting down...") |
315 | self.state = state.forceshutdown | 229 | self.state = state.forceshutdown |
230 | bb.event._should_exit.set() | ||
316 | 231 | ||
317 | def setFeatures(self, features): | 232 | def setFeatures(self, features): |
318 | # we only accept a new feature set if we're in state initial, so we can reset without problems | 233 | # we only accept a new feature set if we're in state initial, so we can reset without problems |
@@ -330,6 +245,13 @@ class BBCooker: | |||
330 | self.state = state.initial | 245 | self.state = state.initial |
331 | self.caches_array = [] | 246 | self.caches_array = [] |
332 | 247 | ||
248 | sys.path = self.orig_syspath.copy() | ||
249 | for mod in [*sys.modules]: | ||
250 | if mod not in self.orig_sysmodules: | ||
251 | del sys.modules[mod] | ||
252 | |||
253 | self.configwatched = {} | ||
254 | |||
333 | # Need to preserve BB_CONSOLELOG over resets | 255 | # Need to preserve BB_CONSOLELOG over resets |
334 | consolelog = None | 256 | consolelog = None |
335 | if hasattr(self, "data"): | 257 | if hasattr(self, "data"): |
@@ -338,12 +260,12 @@ class BBCooker: | |||
338 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: | 260 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: |
339 | self.enableDataTracking() | 261 | self.enableDataTracking() |
340 | 262 | ||
341 | all_extra_cache_names = [] | 263 | caches_name_array = ['bb.cache:CoreRecipeInfo'] |
342 | # We hardcode all known cache types in a single place, here. | 264 | # We hardcode all known cache types in a single place, here. |
343 | if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: | 265 | if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: |
344 | all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") | 266 | caches_name_array.append("bb.cache_extra:HobRecipeInfo") |
345 | 267 | if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset: | |
346 | caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names | 268 | caches_name_array.append("bb.cache:SiggenRecipeInfo") |
347 | 269 | ||
348 | # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! | 270 | # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! |
349 | # This is the entry point, no further check needed! | 271 | # This is the entry point, no further check needed! |
@@ -362,6 +284,10 @@ class BBCooker: | |||
362 | self.data_hash = self.databuilder.data_hash | 284 | self.data_hash = self.databuilder.data_hash |
363 | self.extraconfigdata = {} | 285 | self.extraconfigdata = {} |
364 | 286 | ||
287 | eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") | ||
288 | if not self.configuration.writeeventlog and eventlog: | ||
289 | self.setupEventLog(eventlog) | ||
290 | |||
365 | if consolelog: | 291 | if consolelog: |
366 | self.data.setVar("BB_CONSOLELOG", consolelog) | 292 | self.data.setVar("BB_CONSOLELOG", consolelog) |
367 | 293 | ||
@@ -371,31 +297,42 @@ class BBCooker: | |||
371 | self.disableDataTracking() | 297 | self.disableDataTracking() |
372 | 298 | ||
373 | for mc in self.databuilder.mcdata.values(): | 299 | for mc in self.databuilder.mcdata.values(): |
374 | mc.renameVar("__depends", "__base_depends") | 300 | self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True) |
375 | self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher) | ||
376 | 301 | ||
377 | self.baseconfig_valid = True | 302 | self._baseconfig_set(True) |
378 | self.parsecache_valid = False | 303 | self._parsecache_set(False) |
379 | 304 | ||
380 | def handlePRServ(self): | 305 | def handlePRServ(self): |
381 | # Setup a PR Server based on the new configuration | 306 | # Setup a PR Server based on the new configuration |
382 | try: | 307 | try: |
383 | self.prhost = prserv.serv.auto_start(self.data) | 308 | self.prhost = prserv.serv.auto_start(self.data) |
384 | except prserv.serv.PRServiceConfigError as e: | 309 | except prserv.serv.PRServiceConfigError as e: |
385 | bb.fatal("Unable to start PR Server, exitting") | 310 | bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log") |
386 | 311 | ||
387 | if self.data.getVar("BB_HASHSERVE") == "auto": | 312 | if self.data.getVar("BB_HASHSERVE") == "auto": |
388 | # Create a new hash server bound to a unix domain socket | 313 | # Create a new hash server bound to a unix domain socket |
389 | if not self.hashserv: | 314 | if not self.hashserv: |
390 | dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" | 315 | dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" |
316 | upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None | ||
317 | if upstream: | ||
318 | import socket | ||
319 | try: | ||
320 | sock = socket.create_connection(upstream.split(":"), 5) | ||
321 | sock.close() | ||
322 | except socket.error as e: | ||
323 | bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" | ||
324 | % (upstream, repr(e))) | ||
325 | |||
391 | self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") | 326 | self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") |
392 | self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False) | 327 | self.hashserv = hashserv.create_server( |
393 | self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever) | 328 | self.hashservaddr, |
394 | self.hashserv.process.start() | 329 | dbfile, |
395 | self.data.setVar("BB_HASHSERVE", self.hashservaddr) | 330 | sync=False, |
396 | self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr) | 331 | upstream=upstream, |
397 | self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr) | 332 | ) |
333 | self.hashserv.serve_as_process(log_level=logging.WARNING) | ||
398 | for mc in self.databuilder.mcdata: | 334 | for mc in self.databuilder.mcdata: |
335 | self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) | ||
399 | self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) | 336 | self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) |
400 | 337 | ||
401 | bb.parse.init_parser(self.data) | 338 | bb.parse.init_parser(self.data) |
@@ -410,6 +347,29 @@ class BBCooker: | |||
410 | if hasattr(self, "data"): | 347 | if hasattr(self, "data"): |
411 | self.data.disableTracking() | 348 | self.data.disableTracking() |
412 | 349 | ||
350 | def revalidateCaches(self): | ||
351 | bb.parse.clear_cache() | ||
352 | |||
353 | clean = True | ||
354 | for f in self.configwatched: | ||
355 | if not bb.parse.check_mtime(f, self.configwatched[f]): | ||
356 | bb.server.process.serverlog("Found %s changed, invalid cache" % f) | ||
357 | self._baseconfig_set(False) | ||
358 | self._parsecache_set(False) | ||
359 | clean = False | ||
360 | break | ||
361 | |||
362 | if clean: | ||
363 | for f in self.parsewatched: | ||
364 | if not bb.parse.check_mtime(f, self.parsewatched[f]): | ||
365 | bb.server.process.serverlog("Found %s changed, invalid cache" % f) | ||
366 | self._parsecache_set(False) | ||
367 | clean = False | ||
368 | break | ||
369 | |||
370 | if not clean: | ||
371 | bb.parse.BBHandler.cached_statements = {} | ||
372 | |||
413 | def parseConfiguration(self): | 373 | def parseConfiguration(self): |
414 | self.updateCacheSync() | 374 | self.updateCacheSync() |
415 | 375 | ||
@@ -428,8 +388,24 @@ class BBCooker: | |||
428 | self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) | 388 | self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) |
429 | 389 | ||
430 | self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) | 390 | self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) |
431 | 391 | self.collections = {} | |
432 | self.parsecache_valid = False | 392 | for mc in self.multiconfigs: |
393 | self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc) | ||
394 | |||
395 | self._parsecache_set(False) | ||
396 | |||
397 | def setupEventLog(self, eventlog): | ||
398 | if self.eventlog and self.eventlog[0] != eventlog: | ||
399 | bb.event.unregister_UIHhandler(self.eventlog[1]) | ||
400 | self.eventlog = None | ||
401 | if not self.eventlog or self.eventlog[0] != eventlog: | ||
402 | # we log all events to a file if so directed | ||
403 | # register the log file writer as UI Handler | ||
404 | if not os.path.exists(os.path.dirname(eventlog)): | ||
405 | bb.utils.mkdirhier(os.path.dirname(eventlog)) | ||
406 | writer = EventWriter(self, eventlog) | ||
407 | EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event']) | ||
408 | self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer) | ||
433 | 409 | ||
434 | def updateConfigOpts(self, options, environment, cmdline): | 410 | def updateConfigOpts(self, options, environment, cmdline): |
435 | self.ui_cmdline = cmdline | 411 | self.ui_cmdline = cmdline |
@@ -450,14 +426,7 @@ class BBCooker: | |||
450 | setattr(self.configuration, o, options[o]) | 426 | setattr(self.configuration, o, options[o]) |
451 | 427 | ||
452 | if self.configuration.writeeventlog: | 428 | if self.configuration.writeeventlog: |
453 | if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog: | 429 | self.setupEventLog(self.configuration.writeeventlog) |
454 | bb.event.unregister_UIHhandler(self.eventlog[1]) | ||
455 | if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog: | ||
456 | # we log all events to a file if so directed | ||
457 | # register the log file writer as UI Handler | ||
458 | writer = EventWriter(self, self.configuration.writeeventlog) | ||
459 | EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event']) | ||
460 | self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer))) | ||
461 | 430 | ||
462 | bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel | 431 | bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel |
463 | bb.msg.loggerDefaultDomains = self.configuration.debug_domains | 432 | bb.msg.loggerDefaultDomains = self.configuration.debug_domains |
@@ -487,37 +456,37 @@ class BBCooker: | |||
487 | # Now update all the variables not in the datastore to match | 456 | # Now update all the variables not in the datastore to match |
488 | self.configuration.env = environment | 457 | self.configuration.env = environment |
489 | 458 | ||
459 | self.revalidateCaches() | ||
490 | if not clean: | 460 | if not clean: |
491 | logger.debug("Base environment change, triggering reparse") | 461 | logger.debug("Base environment change, triggering reparse") |
492 | self.reset() | 462 | self.reset() |
493 | 463 | ||
494 | def runCommands(self, server, data, abort): | ||
495 | """ | ||
496 | Run any queued asynchronous command | ||
497 | This is done by the idle handler so it runs in true context rather than | ||
498 | tied to any UI. | ||
499 | """ | ||
500 | |||
501 | return self.command.runAsyncCommand() | ||
502 | |||
503 | def showVersions(self): | 464 | def showVersions(self): |
504 | 465 | ||
505 | (latest_versions, preferred_versions) = self.findProviders() | 466 | (latest_versions, preferred_versions, required) = self.findProviders() |
506 | 467 | ||
507 | logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") | 468 | logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version") |
508 | logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") | 469 | logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================") |
509 | 470 | ||
510 | for p in sorted(self.recipecaches[''].pkg_pn): | 471 | for p in sorted(self.recipecaches[''].pkg_pn): |
511 | pref = preferred_versions[p] | 472 | preferred = preferred_versions[p] |
512 | latest = latest_versions[p] | 473 | latest = latest_versions[p] |
474 | requiredstr = "" | ||
475 | preferredstr = "" | ||
476 | if required[p]: | ||
477 | if preferred[0] is not None: | ||
478 | requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2] | ||
479 | else: | ||
480 | bb.fatal("REQUIRED_VERSION of package %s not available" % p) | ||
481 | else: | ||
482 | preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2] | ||
513 | 483 | ||
514 | prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] | ||
515 | lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] | 484 | lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] |
516 | 485 | ||
517 | if pref == latest: | 486 | if preferred == latest: |
518 | prefstr = "" | 487 | preferredstr = "" |
519 | 488 | ||
520 | logger.plain("%-35s %25s %25s", p, lateststr, prefstr) | 489 | logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr) |
521 | 490 | ||
522 | def showEnvironment(self, buildfile=None, pkgs_to_build=None): | 491 | def showEnvironment(self, buildfile=None, pkgs_to_build=None): |
523 | """ | 492 | """ |
@@ -533,6 +502,8 @@ class BBCooker: | |||
533 | if not orig_tracking: | 502 | if not orig_tracking: |
534 | self.enableDataTracking() | 503 | self.enableDataTracking() |
535 | self.reset() | 504 | self.reset() |
505 | # reset() resets to the UI requested value so we have to redo this | ||
506 | self.enableDataTracking() | ||
536 | 507 | ||
537 | def mc_base(p): | 508 | def mc_base(p): |
538 | if p.startswith('mc:'): | 509 | if p.startswith('mc:'): |
@@ -556,21 +527,21 @@ class BBCooker: | |||
556 | if pkgs_to_build[0] in set(ignore.split()): | 527 | if pkgs_to_build[0] in set(ignore.split()): |
557 | bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) | 528 | bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) |
558 | 529 | ||
559 | taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True) | 530 | taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True) |
560 | 531 | ||
561 | mc = runlist[0][0] | 532 | mc = runlist[0][0] |
562 | fn = runlist[0][3] | 533 | fn = runlist[0][3] |
563 | 534 | ||
564 | if fn: | 535 | if fn: |
565 | try: | 536 | try: |
566 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) | 537 | layername = self.collections[mc].calc_bbfile_priority(fn)[2] |
567 | envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) | 538 | envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername) |
568 | except Exception as e: | 539 | except Exception as e: |
569 | parselog.exception("Unable to read %s", fn) | 540 | parselog.exception("Unable to read %s", fn) |
570 | raise | 541 | raise |
571 | else: | 542 | else: |
572 | if not mc in self.databuilder.mcdata: | 543 | if not mc in self.databuilder.mcdata: |
573 | bb.fatal('Not multiconfig named "%s" found' % mc) | 544 | bb.fatal('No multiconfig named "%s" found' % mc) |
574 | envdata = self.databuilder.mcdata[mc] | 545 | envdata = self.databuilder.mcdata[mc] |
575 | data.expandKeys(envdata) | 546 | data.expandKeys(envdata) |
576 | parse.ast.runAnonFuncs(envdata) | 547 | parse.ast.runAnonFuncs(envdata) |
@@ -585,7 +556,7 @@ class BBCooker: | |||
585 | data.emit_env(env, envdata, True) | 556 | data.emit_env(env, envdata, True) |
586 | logger.plain(env.getvalue()) | 557 | logger.plain(env.getvalue()) |
587 | 558 | ||
588 | # emit the metadata which isnt valid shell | 559 | # emit the metadata which isn't valid shell |
589 | for e in sorted(envdata.keys()): | 560 | for e in sorted(envdata.keys()): |
590 | if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): | 561 | if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): |
591 | logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) | 562 | logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) |
@@ -594,7 +565,7 @@ class BBCooker: | |||
594 | self.disableDataTracking() | 565 | self.disableDataTracking() |
595 | self.reset() | 566 | self.reset() |
596 | 567 | ||
597 | def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False): | 568 | def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False): |
598 | """ | 569 | """ |
599 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | 570 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build |
600 | """ | 571 | """ |
@@ -641,7 +612,7 @@ class BBCooker: | |||
641 | localdata = {} | 612 | localdata = {} |
642 | 613 | ||
643 | for mc in self.multiconfigs: | 614 | for mc in self.multiconfigs: |
644 | taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete) | 615 | taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) |
645 | localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) | 616 | localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) |
646 | bb.data.expandKeys(localdata[mc]) | 617 | bb.data.expandKeys(localdata[mc]) |
647 | 618 | ||
@@ -690,19 +661,18 @@ class BBCooker: | |||
690 | taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) | 661 | taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) |
691 | mcdeps |= set(taskdata[mc].get_mcdepends()) | 662 | mcdeps |= set(taskdata[mc].get_mcdepends()) |
692 | new = False | 663 | new = False |
693 | for mc in self.multiconfigs: | 664 | for k in mcdeps: |
694 | for k in mcdeps: | 665 | if k in seen: |
695 | if k in seen: | 666 | continue |
696 | continue | 667 | l = k.split(':') |
697 | l = k.split(':') | 668 | depmc = l[2] |
698 | depmc = l[2] | 669 | if depmc not in self.multiconfigs: |
699 | if depmc not in self.multiconfigs: | 670 | bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) |
700 | bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) | 671 | else: |
701 | else: | 672 | logger.debug("Adding providers for multiconfig dependency %s" % l[3]) |
702 | logger.debug("Adding providers for multiconfig dependency %s" % l[3]) | 673 | taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) |
703 | taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) | 674 | seen.add(k) |
704 | seen.add(k) | 675 | new = True |
705 | new = True | ||
706 | 676 | ||
707 | for mc in self.multiconfigs: | 677 | for mc in self.multiconfigs: |
708 | taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) | 678 | taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) |
@@ -715,7 +685,7 @@ class BBCooker: | |||
715 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | 685 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build |
716 | """ | 686 | """ |
717 | 687 | ||
718 | # We set abort to False here to prevent unbuildable targets raising | 688 | # We set halt to False here to prevent unbuildable targets raising |
719 | # an exception when we're just generating data | 689 | # an exception when we're just generating data |
720 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) | 690 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) |
721 | 691 | ||
@@ -792,7 +762,9 @@ class BBCooker: | |||
792 | for dep in rq.rqdata.runtaskentries[tid].depends: | 762 | for dep in rq.rqdata.runtaskentries[tid].depends: |
793 | (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) | 763 | (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) |
794 | deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] | 764 | deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] |
795 | depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep))) | 765 | if depmc: |
766 | depmc = "mc:" + depmc + ":" | ||
767 | depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep))) | ||
796 | if taskfn not in seen_fns: | 768 | if taskfn not in seen_fns: |
797 | seen_fns.append(taskfn) | 769 | seen_fns.append(taskfn) |
798 | packages = [] | 770 | packages = [] |
@@ -1056,6 +1028,11 @@ class BBCooker: | |||
1056 | if matches: | 1028 | if matches: |
1057 | bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) | 1029 | bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) |
1058 | 1030 | ||
1031 | def testCookerCommandEvent(self, filepattern): | ||
1032 | # Dummy command used by OEQA selftest to test tinfoil without IO | ||
1033 | matches = ["A", "B"] | ||
1034 | bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) | ||
1035 | |||
1059 | def findProviders(self, mc=''): | 1036 | def findProviders(self, mc=''): |
1060 | return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) | 1037 | return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) |
1061 | 1038 | ||
@@ -1063,10 +1040,16 @@ class BBCooker: | |||
1063 | if pn in self.recipecaches[mc].providers: | 1040 | if pn in self.recipecaches[mc].providers: |
1064 | filenames = self.recipecaches[mc].providers[pn] | 1041 | filenames = self.recipecaches[mc].providers[pn] |
1065 | eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) | 1042 | eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) |
1066 | filename = eligible[0] | 1043 | if eligible is not None: |
1044 | filename = eligible[0] | ||
1045 | else: | ||
1046 | filename = None | ||
1067 | return None, None, None, filename | 1047 | return None, None, None, filename |
1068 | elif pn in self.recipecaches[mc].pkg_pn: | 1048 | elif pn in self.recipecaches[mc].pkg_pn: |
1069 | return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) | 1049 | (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) |
1050 | if required and preferred_file is None: | ||
1051 | return None, None, None, None | ||
1052 | return (latest, latest_f, preferred_ver, preferred_file) | ||
1070 | else: | 1053 | else: |
1071 | return None, None, None, None | 1054 | return None, None, None, None |
1072 | 1055 | ||
@@ -1211,15 +1194,15 @@ class BBCooker: | |||
1211 | except bb.utils.VersionStringException as vse: | 1194 | except bb.utils.VersionStringException as vse: |
1212 | bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) | 1195 | bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) |
1213 | if not res: | 1196 | if not res: |
1214 | parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) | 1197 | parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) |
1215 | continue | 1198 | continue |
1216 | else: | 1199 | else: |
1217 | parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) | 1200 | parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) |
1218 | continue | 1201 | continue |
1219 | parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec) | 1202 | parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec) |
1220 | collection_depends[c].append(rec) | 1203 | collection_depends[c].append(rec) |
1221 | else: | 1204 | else: |
1222 | parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) | 1205 | parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) |
1223 | 1206 | ||
1224 | # Recursively work out collection priorities based on dependencies | 1207 | # Recursively work out collection priorities based on dependencies |
1225 | def calc_layer_priority(collection): | 1208 | def calc_layer_priority(collection): |
@@ -1231,7 +1214,7 @@ class BBCooker: | |||
1231 | if depprio > max_depprio: | 1214 | if depprio > max_depprio: |
1232 | max_depprio = depprio | 1215 | max_depprio = depprio |
1233 | max_depprio += 1 | 1216 | max_depprio += 1 |
1234 | parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) | 1217 | parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio) |
1235 | collection_priorities[collection] = max_depprio | 1218 | collection_priorities[collection] = max_depprio |
1236 | 1219 | ||
1237 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities | 1220 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities |
@@ -1243,7 +1226,7 @@ class BBCooker: | |||
1243 | errors = True | 1226 | errors = True |
1244 | continue | 1227 | continue |
1245 | elif regex == "": | 1228 | elif regex == "": |
1246 | parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) | 1229 | parselog.debug("BBFILE_PATTERN_%s is empty" % c) |
1247 | cre = re.compile('^NULL$') | 1230 | cre = re.compile('^NULL$') |
1248 | errors = False | 1231 | errors = False |
1249 | else: | 1232 | else: |
@@ -1290,8 +1273,8 @@ class BBCooker: | |||
1290 | if bf.startswith("/") or bf.startswith("../"): | 1273 | if bf.startswith("/") or bf.startswith("../"): |
1291 | bf = os.path.abspath(bf) | 1274 | bf = os.path.abspath(bf) |
1292 | 1275 | ||
1293 | self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} | 1276 | collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} |
1294 | filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) | 1277 | filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) |
1295 | try: | 1278 | try: |
1296 | os.stat(bf) | 1279 | os.stat(bf) |
1297 | bf = os.path.abspath(bf) | 1280 | bf = os.path.abspath(bf) |
@@ -1357,7 +1340,8 @@ class BBCooker: | |||
1357 | 1340 | ||
1358 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) | 1341 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) |
1359 | 1342 | ||
1360 | infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) | 1343 | layername = self.collections[mc].calc_bbfile_priority(fn)[2] |
1344 | infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) | ||
1361 | infos = dict(infos) | 1345 | infos = dict(infos) |
1362 | 1346 | ||
1363 | fn = bb.cache.realfn2virtual(fn, cls, mc) | 1347 | fn = bb.cache.realfn2virtual(fn, cls, mc) |
@@ -1383,14 +1367,16 @@ class BBCooker: | |||
1383 | self.recipecaches[mc].rundeps[fn] = defaultdict(list) | 1367 | self.recipecaches[mc].rundeps[fn] = defaultdict(list) |
1384 | self.recipecaches[mc].runrecs[fn] = defaultdict(list) | 1368 | self.recipecaches[mc].runrecs[fn] = defaultdict(list) |
1385 | 1369 | ||
1370 | bb.parse.siggen.setup_datacache(self.recipecaches) | ||
1371 | |||
1386 | # Invalidate task for target if force mode active | 1372 | # Invalidate task for target if force mode active |
1387 | if self.configuration.force: | 1373 | if self.configuration.force: |
1388 | logger.verbose("Invalidate task %s, %s", task, fn) | 1374 | logger.verbose("Invalidate task %s, %s", task, fn) |
1389 | bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn) | 1375 | bb.parse.siggen.invalidate_task(task, fn) |
1390 | 1376 | ||
1391 | # Setup taskdata structure | 1377 | # Setup taskdata structure |
1392 | taskdata = {} | 1378 | taskdata = {} |
1393 | taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) | 1379 | taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt) |
1394 | taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) | 1380 | taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) |
1395 | 1381 | ||
1396 | if quietlog: | 1382 | if quietlog: |
@@ -1400,17 +1386,20 @@ class BBCooker: | |||
1400 | buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") | 1386 | buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") |
1401 | if fireevents: | 1387 | if fireevents: |
1402 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) | 1388 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) |
1389 | if self.eventlog: | ||
1390 | self.eventlog[2].write_variables() | ||
1391 | bb.event.enable_heartbeat() | ||
1403 | 1392 | ||
1404 | # Execute the runqueue | 1393 | # Execute the runqueue |
1405 | runlist = [[mc, item, task, fn]] | 1394 | runlist = [[mc, item, task, fn]] |
1406 | 1395 | ||
1407 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) | 1396 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) |
1408 | 1397 | ||
1409 | def buildFileIdle(server, rq, abort): | 1398 | def buildFileIdle(server, rq, halt): |
1410 | 1399 | ||
1411 | msg = None | 1400 | msg = None |
1412 | interrupted = 0 | 1401 | interrupted = 0 |
1413 | if abort or self.state == state.forceshutdown: | 1402 | if halt or self.state == state.forceshutdown: |
1414 | rq.finish_runqueue(True) | 1403 | rq.finish_runqueue(True) |
1415 | msg = "Forced shutdown" | 1404 | msg = "Forced shutdown" |
1416 | interrupted = 2 | 1405 | interrupted = 2 |
@@ -1425,37 +1414,68 @@ class BBCooker: | |||
1425 | failures += len(exc.args) | 1414 | failures += len(exc.args) |
1426 | retval = False | 1415 | retval = False |
1427 | except SystemExit as exc: | 1416 | except SystemExit as exc: |
1428 | self.command.finishAsyncCommand(str(exc)) | ||
1429 | if quietlog: | 1417 | if quietlog: |
1430 | bb.runqueue.logger.setLevel(rqloglevel) | 1418 | bb.runqueue.logger.setLevel(rqloglevel) |
1431 | return False | 1419 | return bb.server.process.idleFinish(str(exc)) |
1432 | 1420 | ||
1433 | if not retval: | 1421 | if not retval: |
1434 | if fireevents: | 1422 | if fireevents: |
1435 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) | 1423 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) |
1436 | self.command.finishAsyncCommand(msg) | 1424 | bb.event.disable_heartbeat() |
1437 | # We trashed self.recipecaches above | 1425 | # We trashed self.recipecaches above |
1438 | self.parsecache_valid = False | 1426 | self._parsecache_set(False) |
1439 | self.configuration.limited_deps = False | 1427 | self.configuration.limited_deps = False |
1440 | bb.parse.siggen.reset(self.data) | 1428 | bb.parse.siggen.reset(self.data) |
1441 | if quietlog: | 1429 | if quietlog: |
1442 | bb.runqueue.logger.setLevel(rqloglevel) | 1430 | bb.runqueue.logger.setLevel(rqloglevel) |
1443 | return False | 1431 | return bb.server.process.idleFinish(msg) |
1444 | if retval is True: | 1432 | if retval is True: |
1445 | return True | 1433 | return True |
1446 | return retval | 1434 | return retval |
1447 | 1435 | ||
1448 | self.idleCallBackRegister(buildFileIdle, rq) | 1436 | self.idleCallBackRegister(buildFileIdle, rq) |
1449 | 1437 | ||
1438 | def getTaskSignatures(self, target, tasks): | ||
1439 | sig = [] | ||
1440 | getAllTaskSignatures = False | ||
1441 | |||
1442 | if not tasks: | ||
1443 | tasks = ["do_build"] | ||
1444 | getAllTaskSignatures = True | ||
1445 | |||
1446 | for task in tasks: | ||
1447 | taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt) | ||
1448 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) | ||
1449 | rq.rqdata.prepare() | ||
1450 | |||
1451 | for l in runlist: | ||
1452 | mc, pn, taskname, fn = l | ||
1453 | |||
1454 | taskdep = rq.rqdata.dataCaches[mc].task_deps[fn] | ||
1455 | for t in taskdep['tasks']: | ||
1456 | if t in taskdep['nostamp'] or "setscene" in t: | ||
1457 | continue | ||
1458 | tid = bb.runqueue.build_tid(mc, fn, t) | ||
1459 | |||
1460 | if t in task or getAllTaskSignatures: | ||
1461 | try: | ||
1462 | rq.rqdata.prepare_task_hash(tid) | ||
1463 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) | ||
1464 | except KeyError: | ||
1465 | sig.append(self.getTaskSignatures(target, [t])[0]) | ||
1466 | |||
1467 | return sig | ||
1468 | |||
1450 | def buildTargets(self, targets, task): | 1469 | def buildTargets(self, targets, task): |
1451 | """ | 1470 | """ |
1452 | Attempt to build the targets specified | 1471 | Attempt to build the targets specified |
1453 | """ | 1472 | """ |
1454 | 1473 | ||
1455 | def buildTargetsIdle(server, rq, abort): | 1474 | def buildTargetsIdle(server, rq, halt): |
1456 | msg = None | 1475 | msg = None |
1457 | interrupted = 0 | 1476 | interrupted = 0 |
1458 | if abort or self.state == state.forceshutdown: | 1477 | if halt or self.state == state.forceshutdown: |
1478 | bb.event._should_exit.set() | ||
1459 | rq.finish_runqueue(True) | 1479 | rq.finish_runqueue(True) |
1460 | msg = "Forced shutdown" | 1480 | msg = "Forced shutdown" |
1461 | interrupted = 2 | 1481 | interrupted = 2 |
@@ -1470,16 +1490,16 @@ class BBCooker: | |||
1470 | failures += len(exc.args) | 1490 | failures += len(exc.args) |
1471 | retval = False | 1491 | retval = False |
1472 | except SystemExit as exc: | 1492 | except SystemExit as exc: |
1473 | self.command.finishAsyncCommand(str(exc)) | 1493 | return bb.server.process.idleFinish(str(exc)) |
1474 | return False | ||
1475 | 1494 | ||
1476 | if not retval: | 1495 | if not retval: |
1477 | try: | 1496 | try: |
1478 | for mc in self.multiconfigs: | 1497 | for mc in self.multiconfigs: |
1479 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) | 1498 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) |
1480 | finally: | 1499 | finally: |
1481 | self.command.finishAsyncCommand(msg) | 1500 | bb.event.disable_heartbeat() |
1482 | return False | 1501 | return bb.server.process.idleFinish(msg) |
1502 | |||
1483 | if retval is True: | 1503 | if retval is True: |
1484 | return True | 1504 | return True |
1485 | return retval | 1505 | return retval |
@@ -1498,7 +1518,7 @@ class BBCooker: | |||
1498 | 1518 | ||
1499 | bb.event.fire(bb.event.BuildInit(packages), self.data) | 1519 | bb.event.fire(bb.event.BuildInit(packages), self.data) |
1500 | 1520 | ||
1501 | taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort) | 1521 | taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt) |
1502 | 1522 | ||
1503 | buildname = self.data.getVar("BUILDNAME", False) | 1523 | buildname = self.data.getVar("BUILDNAME", False) |
1504 | 1524 | ||
@@ -1511,6 +1531,9 @@ class BBCooker: | |||
1511 | 1531 | ||
1512 | for mc in self.multiconfigs: | 1532 | for mc in self.multiconfigs: |
1513 | bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) | 1533 | bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) |
1534 | if self.eventlog: | ||
1535 | self.eventlog[2].write_variables() | ||
1536 | bb.event.enable_heartbeat() | ||
1514 | 1537 | ||
1515 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) | 1538 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) |
1516 | if 'universe' in targets: | 1539 | if 'universe' in targets: |
@@ -1520,7 +1543,13 @@ class BBCooker: | |||
1520 | 1543 | ||
1521 | 1544 | ||
1522 | def getAllKeysWithFlags(self, flaglist): | 1545 | def getAllKeysWithFlags(self, flaglist): |
1546 | def dummy_autorev(d): | ||
1547 | return | ||
1548 | |||
1523 | dump = {} | 1549 | dump = {} |
1550 | # Horrible but for now we need to avoid any sideeffects of autorev being called | ||
1551 | saved = bb.fetch2.get_autorev | ||
1552 | bb.fetch2.get_autorev = dummy_autorev | ||
1524 | for k in self.data.keys(): | 1553 | for k in self.data.keys(): |
1525 | try: | 1554 | try: |
1526 | expand = True | 1555 | expand = True |
@@ -1540,6 +1569,7 @@ class BBCooker: | |||
1540 | dump[k][d] = None | 1569 | dump[k][d] = None |
1541 | except Exception as e: | 1570 | except Exception as e: |
1542 | print(e) | 1571 | print(e) |
1572 | bb.fetch2.get_autorev = saved | ||
1543 | return dump | 1573 | return dump |
1544 | 1574 | ||
1545 | 1575 | ||
@@ -1547,13 +1577,6 @@ class BBCooker: | |||
1547 | if self.state == state.running: | 1577 | if self.state == state.running: |
1548 | return | 1578 | return |
1549 | 1579 | ||
1550 | # reload files for which we got notifications | ||
1551 | for p in self.inotify_modified_files: | ||
1552 | bb.parse.update_cache(p) | ||
1553 | if p in bb.parse.BBHandler.cached_statements: | ||
1554 | del bb.parse.BBHandler.cached_statements[p] | ||
1555 | self.inotify_modified_files = [] | ||
1556 | |||
1557 | if not self.baseconfig_valid: | 1580 | if not self.baseconfig_valid: |
1558 | logger.debug("Reloading base configuration data") | 1581 | logger.debug("Reloading base configuration data") |
1559 | self.initConfigurationData() | 1582 | self.initConfigurationData() |
@@ -1566,7 +1589,7 @@ class BBCooker: | |||
1566 | 1589 | ||
1567 | if self.state in (state.shutdown, state.forceshutdown, state.error): | 1590 | if self.state in (state.shutdown, state.forceshutdown, state.error): |
1568 | if hasattr(self.parser, 'shutdown'): | 1591 | if hasattr(self.parser, 'shutdown'): |
1569 | self.parser.shutdown(clean=False, force = True) | 1592 | self.parser.shutdown(clean=False) |
1570 | self.parser.final_cleanup() | 1593 | self.parser.final_cleanup() |
1571 | raise bb.BBHandledException() | 1594 | raise bb.BBHandledException() |
1572 | 1595 | ||
@@ -1574,6 +1597,9 @@ class BBCooker: | |||
1574 | self.updateCacheSync() | 1597 | self.updateCacheSync() |
1575 | 1598 | ||
1576 | if self.state != state.parsing and not self.parsecache_valid: | 1599 | if self.state != state.parsing and not self.parsecache_valid: |
1600 | bb.server.process.serverlog("Parsing started") | ||
1601 | self.parsewatched = {} | ||
1602 | |||
1577 | bb.parse.siggen.reset(self.data) | 1603 | bb.parse.siggen.reset(self.data) |
1578 | self.parseConfiguration () | 1604 | self.parseConfiguration () |
1579 | if CookerFeatures.SEND_SANITYEVENTS in self.featureset: | 1605 | if CookerFeatures.SEND_SANITYEVENTS in self.featureset: |
@@ -1587,30 +1613,27 @@ class BBCooker: | |||
1587 | for dep in self.configuration.extra_assume_provided: | 1613 | for dep in self.configuration.extra_assume_provided: |
1588 | self.recipecaches[mc].ignored_dependencies.add(dep) | 1614 | self.recipecaches[mc].ignored_dependencies.add(dep) |
1589 | 1615 | ||
1590 | self.collections = {} | ||
1591 | |||
1592 | mcfilelist = {} | 1616 | mcfilelist = {} |
1593 | total_masked = 0 | 1617 | total_masked = 0 |
1594 | searchdirs = set() | 1618 | searchdirs = set() |
1595 | for mc in self.multiconfigs: | 1619 | for mc in self.multiconfigs: |
1596 | self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc) | ||
1597 | (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) | 1620 | (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) |
1598 | 1621 | ||
1599 | mcfilelist[mc] = filelist | 1622 | mcfilelist[mc] = filelist |
1600 | total_masked += masked | 1623 | total_masked += masked |
1601 | searchdirs |= set(search) | 1624 | searchdirs |= set(search) |
1602 | 1625 | ||
1603 | # Add inotify watches for directories searched for bb/bbappend files | 1626 | # Add mtimes for directories searched for bb/bbappend files |
1604 | for dirent in searchdirs: | 1627 | for dirent in searchdirs: |
1605 | self.add_filewatch([[dirent]], dirs=True) | 1628 | self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))]) |
1606 | 1629 | ||
1607 | self.parser = CookerParser(self, mcfilelist, total_masked) | 1630 | self.parser = CookerParser(self, mcfilelist, total_masked) |
1608 | self.parsecache_valid = True | 1631 | self._parsecache_set(True) |
1609 | 1632 | ||
1610 | self.state = state.parsing | 1633 | self.state = state.parsing |
1611 | 1634 | ||
1612 | if not self.parser.parse_next(): | 1635 | if not self.parser.parse_next(): |
1613 | collectlog.debug(1, "parsing complete") | 1636 | collectlog.debug("parsing complete") |
1614 | if self.parser.error: | 1637 | if self.parser.error: |
1615 | raise bb.BBHandledException() | 1638 | raise bb.BBHandledException() |
1616 | self.show_appends_with_no_recipes() | 1639 | self.show_appends_with_no_recipes() |
@@ -1633,7 +1656,7 @@ class BBCooker: | |||
1633 | # Return a copy, don't modify the original | 1656 | # Return a copy, don't modify the original |
1634 | pkgs_to_build = pkgs_to_build[:] | 1657 | pkgs_to_build = pkgs_to_build[:] |
1635 | 1658 | ||
1636 | if len(pkgs_to_build) == 0: | 1659 | if not pkgs_to_build: |
1637 | raise NothingToBuild | 1660 | raise NothingToBuild |
1638 | 1661 | ||
1639 | ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() | 1662 | ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() |
@@ -1655,7 +1678,7 @@ class BBCooker: | |||
1655 | 1678 | ||
1656 | if 'universe' in pkgs_to_build: | 1679 | if 'universe' in pkgs_to_build: |
1657 | parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") | 1680 | parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") |
1658 | parselog.debug(1, "collating packages for \"universe\"") | 1681 | parselog.debug("collating packages for \"universe\"") |
1659 | pkgs_to_build.remove('universe') | 1682 | pkgs_to_build.remove('universe') |
1660 | for mc in self.multiconfigs: | 1683 | for mc in self.multiconfigs: |
1661 | for t in self.recipecaches[mc].universe_target: | 1684 | for t in self.recipecaches[mc].universe_target: |
@@ -1680,26 +1703,36 @@ class BBCooker: | |||
1680 | def post_serve(self): | 1703 | def post_serve(self): |
1681 | self.shutdown(force=True) | 1704 | self.shutdown(force=True) |
1682 | prserv.serv.auto_shutdown() | 1705 | prserv.serv.auto_shutdown() |
1706 | if hasattr(bb.parse, "siggen"): | ||
1707 | bb.parse.siggen.exit() | ||
1683 | if self.hashserv: | 1708 | if self.hashserv: |
1684 | self.hashserv.process.terminate() | 1709 | self.hashserv.process.terminate() |
1685 | self.hashserv.process.join() | 1710 | self.hashserv.process.join() |
1686 | if hasattr(self, "data"): | 1711 | if hasattr(self, "data"): |
1687 | bb.event.fire(CookerExit(), self.data) | 1712 | bb.event.fire(CookerExit(), self.data) |
1688 | 1713 | ||
1689 | def shutdown(self, force = False): | 1714 | def shutdown(self, force=False): |
1690 | if force: | 1715 | if force: |
1691 | self.state = state.forceshutdown | 1716 | self.state = state.forceshutdown |
1717 | bb.event._should_exit.set() | ||
1692 | else: | 1718 | else: |
1693 | self.state = state.shutdown | 1719 | self.state = state.shutdown |
1694 | 1720 | ||
1695 | if self.parser: | 1721 | if self.parser: |
1696 | self.parser.shutdown(clean=not force, force=force) | 1722 | self.parser.shutdown(clean=False) |
1697 | self.parser.final_cleanup() | 1723 | self.parser.final_cleanup() |
1698 | 1724 | ||
1699 | def finishcommand(self): | 1725 | def finishcommand(self): |
1726 | if hasattr(self.parser, 'shutdown'): | ||
1727 | self.parser.shutdown(clean=False) | ||
1728 | self.parser.final_cleanup() | ||
1700 | self.state = state.initial | 1729 | self.state = state.initial |
1730 | bb.event._should_exit.clear() | ||
1701 | 1731 | ||
1702 | def reset(self): | 1732 | def reset(self): |
1733 | if hasattr(bb.parse, "siggen"): | ||
1734 | bb.parse.siggen.exit() | ||
1735 | self.finishcommand() | ||
1703 | self.initConfigurationData() | 1736 | self.initConfigurationData() |
1704 | self.handlePRServ() | 1737 | self.handlePRServ() |
1705 | 1738 | ||
@@ -1711,9 +1744,9 @@ class BBCooker: | |||
1711 | if hasattr(self, "data"): | 1744 | if hasattr(self, "data"): |
1712 | self.databuilder.reset() | 1745 | self.databuilder.reset() |
1713 | self.data = self.databuilder.data | 1746 | self.data = self.databuilder.data |
1714 | self.parsecache_valid = False | 1747 | # In theory tinfoil could have modified the base data before parsing, |
1715 | self.baseconfig_valid = False | 1748 | # ideally need to track if anything did modify the datastore |
1716 | 1749 | self._parsecache_set(False) | |
1717 | 1750 | ||
1718 | class CookerExit(bb.event.Event): | 1751 | class CookerExit(bb.event.Event): |
1719 | """ | 1752 | """ |
@@ -1728,16 +1761,16 @@ class CookerCollectFiles(object): | |||
1728 | def __init__(self, priorities, mc=''): | 1761 | def __init__(self, priorities, mc=''): |
1729 | self.mc = mc | 1762 | self.mc = mc |
1730 | self.bbappends = [] | 1763 | self.bbappends = [] |
1731 | # Priorities is a list of tupples, with the second element as the pattern. | 1764 | # Priorities is a list of tuples, with the second element as the pattern. |
1732 | # We need to sort the list with the longest pattern first, and so on to | 1765 | # We need to sort the list with the longest pattern first, and so on to |
1733 | # the shortest. This allows nested layers to be properly evaluated. | 1766 | # the shortest. This allows nested layers to be properly evaluated. |
1734 | self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) | 1767 | self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) |
1735 | 1768 | ||
1736 | def calc_bbfile_priority(self, filename): | 1769 | def calc_bbfile_priority(self, filename): |
1737 | for _, _, regex, pri in self.bbfile_config_priorities: | 1770 | for layername, _, regex, pri in self.bbfile_config_priorities: |
1738 | if regex.match(filename): | 1771 | if regex.match(filename): |
1739 | return pri, regex | 1772 | return pri, regex, layername |
1740 | return 0, None | 1773 | return 0, None, None |
1741 | 1774 | ||
1742 | def get_bbfiles(self): | 1775 | def get_bbfiles(self): |
1743 | """Get list of default .bb files by reading out the current directory""" | 1776 | """Get list of default .bb files by reading out the current directory""" |
@@ -1756,7 +1789,7 @@ class CookerCollectFiles(object): | |||
1756 | for ignored in ('SCCS', 'CVS', '.svn'): | 1789 | for ignored in ('SCCS', 'CVS', '.svn'): |
1757 | if ignored in dirs: | 1790 | if ignored in dirs: |
1758 | dirs.remove(ignored) | 1791 | dirs.remove(ignored) |
1759 | found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] | 1792 | found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))] |
1760 | 1793 | ||
1761 | return found | 1794 | return found |
1762 | 1795 | ||
@@ -1764,7 +1797,7 @@ class CookerCollectFiles(object): | |||
1764 | """Collect all available .bb build files""" | 1797 | """Collect all available .bb build files""" |
1765 | masked = 0 | 1798 | masked = 0 |
1766 | 1799 | ||
1767 | collectlog.debug(1, "collecting .bb files") | 1800 | collectlog.debug("collecting .bb files") |
1768 | 1801 | ||
1769 | files = (config.getVar( "BBFILES") or "").split() | 1802 | files = (config.getVar( "BBFILES") or "").split() |
1770 | 1803 | ||
@@ -1772,14 +1805,14 @@ class CookerCollectFiles(object): | |||
1772 | files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) | 1805 | files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) |
1773 | config.setVar("BBFILES_PRIORITIZED", " ".join(files)) | 1806 | config.setVar("BBFILES_PRIORITIZED", " ".join(files)) |
1774 | 1807 | ||
1775 | if not len(files): | 1808 | if not files: |
1776 | files = self.get_bbfiles() | 1809 | files = self.get_bbfiles() |
1777 | 1810 | ||
1778 | if not len(files): | 1811 | if not files: |
1779 | collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") | 1812 | collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") |
1780 | bb.event.fire(CookerExit(), eventdata) | 1813 | bb.event.fire(CookerExit(), eventdata) |
1781 | 1814 | ||
1782 | # We need to track where we look so that we can add inotify watches. There | 1815 | # We need to track where we look so that we can know when the cache is invalid. There |
1783 | # is no nice way to do this, this is horrid. We intercept the os.listdir() | 1816 | # is no nice way to do this, this is horrid. We intercept the os.listdir() |
1784 | # (or os.scandir() for python 3.6+) calls while we run glob(). | 1817 | # (or os.scandir() for python 3.6+) calls while we run glob(). |
1785 | origlistdir = os.listdir | 1818 | origlistdir = os.listdir |
@@ -1835,7 +1868,7 @@ class CookerCollectFiles(object): | |||
1835 | try: | 1868 | try: |
1836 | re.compile(mask) | 1869 | re.compile(mask) |
1837 | bbmasks.append(mask) | 1870 | bbmasks.append(mask) |
1838 | except sre_constants.error: | 1871 | except re.error: |
1839 | collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) | 1872 | collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) |
1840 | 1873 | ||
1841 | # Then validate the combined regular expressions. This should never | 1874 | # Then validate the combined regular expressions. This should never |
@@ -1843,7 +1876,7 @@ class CookerCollectFiles(object): | |||
1843 | bbmask = "|".join(bbmasks) | 1876 | bbmask = "|".join(bbmasks) |
1844 | try: | 1877 | try: |
1845 | bbmask_compiled = re.compile(bbmask) | 1878 | bbmask_compiled = re.compile(bbmask) |
1846 | except sre_constants.error: | 1879 | except re.error: |
1847 | collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) | 1880 | collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) |
1848 | bbmask = None | 1881 | bbmask = None |
1849 | 1882 | ||
@@ -1851,7 +1884,7 @@ class CookerCollectFiles(object): | |||
1851 | bbappend = [] | 1884 | bbappend = [] |
1852 | for f in newfiles: | 1885 | for f in newfiles: |
1853 | if bbmask and bbmask_compiled.search(f): | 1886 | if bbmask and bbmask_compiled.search(f): |
1854 | collectlog.debug(1, "skipping masked file %s", f) | 1887 | collectlog.debug("skipping masked file %s", f) |
1855 | masked += 1 | 1888 | masked += 1 |
1856 | continue | 1889 | continue |
1857 | if f.endswith('.bb'): | 1890 | if f.endswith('.bb'): |
@@ -1859,7 +1892,7 @@ class CookerCollectFiles(object): | |||
1859 | elif f.endswith('.bbappend'): | 1892 | elif f.endswith('.bbappend'): |
1860 | bbappend.append(f) | 1893 | bbappend.append(f) |
1861 | else: | 1894 | else: |
1862 | collectlog.debug(1, "skipping %s: unknown file extension", f) | 1895 | collectlog.debug("skipping %s: unknown file extension", f) |
1863 | 1896 | ||
1864 | # Build a list of .bbappend files for each .bb file | 1897 | # Build a list of .bbappend files for each .bb file |
1865 | for f in bbappend: | 1898 | for f in bbappend: |
@@ -1910,7 +1943,7 @@ class CookerCollectFiles(object): | |||
1910 | # Calculate priorities for each file | 1943 | # Calculate priorities for each file |
1911 | for p in pkgfns: | 1944 | for p in pkgfns: |
1912 | realfn, cls, mc = bb.cache.virtualfn2realfn(p) | 1945 | realfn, cls, mc = bb.cache.virtualfn2realfn(p) |
1913 | priorities[p], regex = self.calc_bbfile_priority(realfn) | 1946 | priorities[p], regex, _ = self.calc_bbfile_priority(realfn) |
1914 | if regex in unmatched_regex: | 1947 | if regex in unmatched_regex: |
1915 | matched_regex.add(regex) | 1948 | matched_regex.add(regex) |
1916 | unmatched_regex.remove(regex) | 1949 | unmatched_regex.remove(regex) |
@@ -1961,15 +1994,30 @@ class ParsingFailure(Exception): | |||
1961 | Exception.__init__(self, realexception, recipe) | 1994 | Exception.__init__(self, realexception, recipe) |
1962 | 1995 | ||
1963 | class Parser(multiprocessing.Process): | 1996 | class Parser(multiprocessing.Process): |
1964 | def __init__(self, jobs, results, quit, init, profile): | 1997 | def __init__(self, jobs, results, quit, profile): |
1965 | self.jobs = jobs | 1998 | self.jobs = jobs |
1966 | self.results = results | 1999 | self.results = results |
1967 | self.quit = quit | 2000 | self.quit = quit |
1968 | self.init = init | ||
1969 | multiprocessing.Process.__init__(self) | 2001 | multiprocessing.Process.__init__(self) |
1970 | self.context = bb.utils.get_context().copy() | 2002 | self.context = bb.utils.get_context().copy() |
1971 | self.handlers = bb.event.get_class_handlers().copy() | 2003 | self.handlers = bb.event.get_class_handlers().copy() |
1972 | self.profile = profile | 2004 | self.profile = profile |
2005 | self.queue_signals = False | ||
2006 | self.signal_received = [] | ||
2007 | self.signal_threadlock = threading.Lock() | ||
2008 | |||
2009 | def catch_sig(self, signum, frame): | ||
2010 | if self.queue_signals: | ||
2011 | self.signal_received.append(signum) | ||
2012 | else: | ||
2013 | self.handle_sig(signum, frame) | ||
2014 | |||
2015 | def handle_sig(self, signum, frame): | ||
2016 | if signum == signal.SIGTERM: | ||
2017 | signal.signal(signal.SIGTERM, signal.SIG_DFL) | ||
2018 | os.kill(os.getpid(), signal.SIGTERM) | ||
2019 | elif signum == signal.SIGINT: | ||
2020 | signal.default_int_handler(signum, frame) | ||
1973 | 2021 | ||
1974 | def run(self): | 2022 | def run(self): |
1975 | 2023 | ||
@@ -1989,38 +2037,50 @@ class Parser(multiprocessing.Process): | |||
1989 | prof.dump_stats(logfile) | 2037 | prof.dump_stats(logfile) |
1990 | 2038 | ||
1991 | def realrun(self): | 2039 | def realrun(self): |
1992 | if self.init: | 2040 | # Signal handling here is hard. We must not terminate any process or thread holding the write |
1993 | self.init() | 2041 | # lock for the event stream as it will not be released, ever, and things will hang. |
2042 | # Python handles signals in the main thread/process but they can be raised from any thread and | ||
2043 | # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section | ||
2044 | # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any | ||
2045 | # new thread should also do so) and we defer handling but we handle with the local thread lock | ||
2046 | # held (a threading lock, not a multiprocessing one) so that no other thread in the process | ||
2047 | # can be in the critical section. | ||
2048 | signal.signal(signal.SIGTERM, self.catch_sig) | ||
2049 | signal.signal(signal.SIGHUP, signal.SIG_DFL) | ||
2050 | signal.signal(signal.SIGINT, self.catch_sig) | ||
2051 | bb.utils.set_process_name(multiprocessing.current_process().name) | ||
2052 | multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) | ||
2053 | multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) | ||
1994 | 2054 | ||
1995 | pending = [] | 2055 | pending = [] |
1996 | while True: | 2056 | havejobs = True |
1997 | try: | 2057 | try: |
1998 | self.quit.get_nowait() | 2058 | while havejobs or pending: |
1999 | except queue.Empty: | 2059 | if self.quit.is_set(): |
2000 | pass | 2060 | break |
2001 | else: | ||
2002 | self.results.close() | ||
2003 | self.results.join_thread() | ||
2004 | break | ||
2005 | 2061 | ||
2006 | if pending: | 2062 | job = None |
2007 | result = pending.pop() | ||
2008 | else: | ||
2009 | try: | 2063 | try: |
2010 | job = self.jobs.pop() | 2064 | job = self.jobs.pop() |
2011 | except IndexError: | 2065 | except IndexError: |
2012 | self.results.close() | 2066 | havejobs = False |
2013 | self.results.join_thread() | 2067 | if job: |
2014 | break | 2068 | result = self.parse(*job) |
2015 | result = self.parse(*job) | 2069 | # Clear the siggen cache after parsing to control memory usage, its huge |
2016 | # Clear the siggen cache after parsing to control memory usage, its huge | 2070 | bb.parse.siggen.postparsing_clean_cache() |
2017 | bb.parse.siggen.postparsing_clean_cache() | 2071 | pending.append(result) |
2018 | try: | 2072 | |
2019 | self.results.put(result, timeout=0.25) | 2073 | if pending: |
2020 | except queue.Full: | 2074 | try: |
2021 | pending.append(result) | 2075 | result = pending.pop() |
2076 | self.results.put(result, timeout=0.05) | ||
2077 | except queue.Full: | ||
2078 | pending.append(result) | ||
2079 | finally: | ||
2080 | self.results.close() | ||
2081 | self.results.join_thread() | ||
2022 | 2082 | ||
2023 | def parse(self, mc, cache, filename, appends): | 2083 | def parse(self, mc, cache, filename, appends, layername): |
2024 | try: | 2084 | try: |
2025 | origfilter = bb.event.LogHandler.filter | 2085 | origfilter = bb.event.LogHandler.filter |
2026 | # Record the filename we're parsing into any events generated | 2086 | # Record the filename we're parsing into any events generated |
@@ -2034,17 +2094,17 @@ class Parser(multiprocessing.Process): | |||
2034 | bb.event.set_class_handlers(self.handlers.copy()) | 2094 | bb.event.set_class_handlers(self.handlers.copy()) |
2035 | bb.event.LogHandler.filter = parse_filter | 2095 | bb.event.LogHandler.filter = parse_filter |
2036 | 2096 | ||
2037 | return True, mc, cache.parse(filename, appends) | 2097 | return True, mc, cache.parse(filename, appends, layername) |
2038 | except Exception as exc: | 2098 | except Exception as exc: |
2039 | tb = sys.exc_info()[2] | 2099 | tb = sys.exc_info()[2] |
2040 | exc.recipe = filename | 2100 | exc.recipe = filename |
2041 | exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) | 2101 | exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) |
2042 | return True, exc | 2102 | return True, None, exc |
2043 | # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown | 2103 | # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown |
2044 | # and for example a worker thread doesn't just exit on its own in response to | 2104 | # and for example a worker thread doesn't just exit on its own in response to |
2045 | # a SystemExit event for example. | 2105 | # a SystemExit event for example. |
2046 | except BaseException as exc: | 2106 | except BaseException as exc: |
2047 | return True, ParsingFailure(exc, filename) | 2107 | return True, None, ParsingFailure(exc, filename) |
2048 | finally: | 2108 | finally: |
2049 | bb.event.LogHandler.filter = origfilter | 2109 | bb.event.LogHandler.filter = origfilter |
2050 | 2110 | ||
@@ -2074,10 +2134,11 @@ class CookerParser(object): | |||
2074 | for mc in self.cooker.multiconfigs: | 2134 | for mc in self.cooker.multiconfigs: |
2075 | for filename in self.mcfilelist[mc]: | 2135 | for filename in self.mcfilelist[mc]: |
2076 | appends = self.cooker.collections[mc].get_file_appends(filename) | 2136 | appends = self.cooker.collections[mc].get_file_appends(filename) |
2137 | layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2] | ||
2077 | if not self.bb_caches[mc].cacheValid(filename, appends): | 2138 | if not self.bb_caches[mc].cacheValid(filename, appends): |
2078 | self.willparse.add((mc, self.bb_caches[mc], filename, appends)) | 2139 | self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername)) |
2079 | else: | 2140 | else: |
2080 | self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) | 2141 | self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername)) |
2081 | 2142 | ||
2082 | self.total = len(self.fromcache) + len(self.willparse) | 2143 | self.total = len(self.fromcache) + len(self.willparse) |
2083 | self.toparse = len(self.willparse) | 2144 | self.toparse = len(self.willparse) |
@@ -2086,6 +2147,7 @@ class CookerParser(object): | |||
2086 | self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or | 2147 | self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or |
2087 | multiprocessing.cpu_count()), self.toparse) | 2148 | multiprocessing.cpu_count()), self.toparse) |
2088 | 2149 | ||
2150 | bb.cache.SiggenRecipeInfo.reset() | ||
2089 | self.start() | 2151 | self.start() |
2090 | self.haveshutdown = False | 2152 | self.haveshutdown = False |
2091 | self.syncthread = None | 2153 | self.syncthread = None |
@@ -2095,15 +2157,8 @@ class CookerParser(object): | |||
2095 | self.processes = [] | 2157 | self.processes = [] |
2096 | if self.toparse: | 2158 | if self.toparse: |
2097 | bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) | 2159 | bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) |
2098 | def init(): | 2160 | |
2099 | signal.signal(signal.SIGTERM, signal.SIG_DFL) | 2161 | self.parser_quit = multiprocessing.Event() |
2100 | signal.signal(signal.SIGHUP, signal.SIG_DFL) | ||
2101 | signal.signal(signal.SIGINT, signal.SIG_IGN) | ||
2102 | bb.utils.set_process_name(multiprocessing.current_process().name) | ||
2103 | multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) | ||
2104 | multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) | ||
2105 | |||
2106 | self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes) | ||
2107 | self.result_queue = multiprocessing.Queue() | 2162 | self.result_queue = multiprocessing.Queue() |
2108 | 2163 | ||
2109 | def chunkify(lst,n): | 2164 | def chunkify(lst,n): |
@@ -2111,14 +2166,14 @@ class CookerParser(object): | |||
2111 | self.jobs = chunkify(list(self.willparse), self.num_processes) | 2166 | self.jobs = chunkify(list(self.willparse), self.num_processes) |
2112 | 2167 | ||
2113 | for i in range(0, self.num_processes): | 2168 | for i in range(0, self.num_processes): |
2114 | parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) | 2169 | parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile) |
2115 | parser.start() | 2170 | parser.start() |
2116 | self.process_names.append(parser.name) | 2171 | self.process_names.append(parser.name) |
2117 | self.processes.append(parser) | 2172 | self.processes.append(parser) |
2118 | 2173 | ||
2119 | self.results = itertools.chain(self.results, self.parse_generator()) | 2174 | self.results = itertools.chain(self.results, self.parse_generator()) |
2120 | 2175 | ||
2121 | def shutdown(self, clean=True, force=False): | 2176 | def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"): |
2122 | if not self.toparse: | 2177 | if not self.toparse: |
2123 | return | 2178 | return |
2124 | if self.haveshutdown: | 2179 | if self.haveshutdown: |
@@ -2132,9 +2187,9 @@ class CookerParser(object): | |||
2132 | self.total) | 2187 | self.total) |
2133 | 2188 | ||
2134 | bb.event.fire(event, self.cfgdata) | 2189 | bb.event.fire(event, self.cfgdata) |
2135 | 2190 | else: | |
2136 | for process in self.processes: | 2191 | bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata) |
2137 | self.parser_quit.put(None) | 2192 | bb.error("Parsing halted due to errors, see error messages above") |
2138 | 2193 | ||
2139 | # Cleanup the queue before call process.join(), otherwise there might be | 2194 | # Cleanup the queue before call process.join(), otherwise there might be |
2140 | # deadlocks. | 2195 | # deadlocks. |
@@ -2144,25 +2199,39 @@ class CookerParser(object): | |||
2144 | except queue.Empty: | 2199 | except queue.Empty: |
2145 | break | 2200 | break |
2146 | 2201 | ||
2147 | for process in self.processes: | ||
2148 | if force: | ||
2149 | process.join(.1) | ||
2150 | process.terminate() | ||
2151 | else: | ||
2152 | process.join() | ||
2153 | |||
2154 | self.parser_quit.close() | ||
2155 | # Allow data left in the cancel queue to be discarded | ||
2156 | self.parser_quit.cancel_join_thread() | ||
2157 | |||
2158 | def sync_caches(): | 2202 | def sync_caches(): |
2159 | for c in self.bb_caches.values(): | 2203 | for c in self.bb_caches.values(): |
2204 | bb.cache.SiggenRecipeInfo.reset() | ||
2160 | c.sync() | 2205 | c.sync() |
2161 | 2206 | ||
2162 | sync = threading.Thread(target=sync_caches, name="SyncThread") | 2207 | self.syncthread = threading.Thread(target=sync_caches, name="SyncThread") |
2163 | self.syncthread = sync | 2208 | self.syncthread.start() |
2164 | sync.start() | 2209 | |
2210 | self.parser_quit.set() | ||
2211 | |||
2212 | for process in self.processes: | ||
2213 | process.join(0.5) | ||
2214 | |||
2215 | for process in self.processes: | ||
2216 | if process.exitcode is None: | ||
2217 | os.kill(process.pid, signal.SIGINT) | ||
2218 | |||
2219 | for process in self.processes: | ||
2220 | process.join(0.5) | ||
2221 | |||
2222 | for process in self.processes: | ||
2223 | if process.exitcode is None: | ||
2224 | process.terminate() | ||
2225 | |||
2226 | for process in self.processes: | ||
2227 | process.join() | ||
2228 | # Added in 3.7, cleans up zombies | ||
2229 | if hasattr(process, "close"): | ||
2230 | process.close() | ||
2231 | |||
2232 | bb.codeparser.parser_cache_save() | ||
2165 | bb.codeparser.parser_cache_savemerge() | 2233 | bb.codeparser.parser_cache_savemerge() |
2234 | bb.cache.SiggenRecipeInfo.reset() | ||
2166 | bb.fetch.fetcher_parse_done() | 2235 | bb.fetch.fetcher_parse_done() |
2167 | if self.cooker.configuration.profile: | 2236 | if self.cooker.configuration.profile: |
2168 | profiles = [] | 2237 | profiles = [] |
@@ -2180,49 +2249,64 @@ class CookerParser(object): | |||
2180 | self.syncthread.join() | 2249 | self.syncthread.join() |
2181 | 2250 | ||
2182 | def load_cached(self): | 2251 | def load_cached(self): |
2183 | for mc, cache, filename, appends in self.fromcache: | 2252 | for mc, cache, filename, appends, layername in self.fromcache: |
2184 | cached, infos = cache.load(filename, appends) | 2253 | infos = cache.loadCached(filename, appends) |
2185 | yield not cached, mc, infos | 2254 | yield False, mc, infos |
2186 | 2255 | ||
2187 | def parse_generator(self): | 2256 | def parse_generator(self): |
2188 | while True: | 2257 | empty = False |
2258 | while self.processes or not empty: | ||
2259 | for process in self.processes.copy(): | ||
2260 | if not process.is_alive(): | ||
2261 | process.join() | ||
2262 | self.processes.remove(process) | ||
2263 | |||
2189 | if self.parsed >= self.toparse: | 2264 | if self.parsed >= self.toparse: |
2190 | break | 2265 | break |
2191 | 2266 | ||
2192 | try: | 2267 | try: |
2193 | result = self.result_queue.get(timeout=0.25) | 2268 | result = self.result_queue.get(timeout=0.25) |
2194 | except queue.Empty: | 2269 | except queue.Empty: |
2195 | pass | 2270 | empty = True |
2271 | yield None, None, None | ||
2196 | else: | 2272 | else: |
2197 | value = result[1] | 2273 | empty = False |
2198 | if isinstance(value, BaseException): | 2274 | yield result |
2199 | raise value | 2275 | |
2200 | else: | 2276 | if not (self.parsed >= self.toparse): |
2201 | yield result | 2277 | raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None) |
2278 | |||
2202 | 2279 | ||
2203 | def parse_next(self): | 2280 | def parse_next(self): |
2204 | result = [] | 2281 | result = [] |
2205 | parsed = None | 2282 | parsed = None |
2206 | try: | 2283 | try: |
2207 | parsed, mc, result = next(self.results) | 2284 | parsed, mc, result = next(self.results) |
2285 | if isinstance(result, BaseException): | ||
2286 | # Turn exceptions back into exceptions | ||
2287 | raise result | ||
2288 | if parsed is None: | ||
2289 | # Timeout, loop back through the main loop | ||
2290 | return True | ||
2291 | |||
2208 | except StopIteration: | 2292 | except StopIteration: |
2209 | self.shutdown() | 2293 | self.shutdown() |
2210 | return False | 2294 | return False |
2211 | except bb.BBHandledException as exc: | 2295 | except bb.BBHandledException as exc: |
2212 | self.error += 1 | 2296 | self.error += 1 |
2213 | logger.error('Failed to parse recipe: %s' % exc.recipe) | 2297 | logger.debug('Failed to parse recipe: %s' % exc.recipe) |
2214 | self.shutdown(clean=False, force=True) | 2298 | self.shutdown(clean=False) |
2215 | return False | 2299 | return False |
2216 | except ParsingFailure as exc: | 2300 | except ParsingFailure as exc: |
2217 | self.error += 1 | 2301 | self.error += 1 |
2218 | logger.error('Unable to parse %s: %s' % | 2302 | logger.error('Unable to parse %s: %s' % |
2219 | (exc.recipe, bb.exceptions.to_string(exc.realexception))) | 2303 | (exc.recipe, bb.exceptions.to_string(exc.realexception))) |
2220 | self.shutdown(clean=False, force=True) | 2304 | self.shutdown(clean=False) |
2221 | return False | 2305 | return False |
2222 | except bb.parse.ParseError as exc: | 2306 | except bb.parse.ParseError as exc: |
2223 | self.error += 1 | 2307 | self.error += 1 |
2224 | logger.error(str(exc)) | 2308 | logger.error(str(exc)) |
2225 | self.shutdown(clean=False, force=True) | 2309 | self.shutdown(clean=False, eventmsg=str(exc)) |
2226 | return False | 2310 | return False |
2227 | except bb.data_smart.ExpansionError as exc: | 2311 | except bb.data_smart.ExpansionError as exc: |
2228 | self.error += 1 | 2312 | self.error += 1 |
@@ -2231,7 +2315,7 @@ class CookerParser(object): | |||
2231 | tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) | 2315 | tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) |
2232 | logger.error('ExpansionError during parsing %s', value.recipe, | 2316 | logger.error('ExpansionError during parsing %s', value.recipe, |
2233 | exc_info=(etype, value, tb)) | 2317 | exc_info=(etype, value, tb)) |
2234 | self.shutdown(clean=False, force=True) | 2318 | self.shutdown(clean=False) |
2235 | return False | 2319 | return False |
2236 | except Exception as exc: | 2320 | except Exception as exc: |
2237 | self.error += 1 | 2321 | self.error += 1 |
@@ -2243,7 +2327,7 @@ class CookerParser(object): | |||
2243 | # Most likely, an exception occurred during raising an exception | 2327 | # Most likely, an exception occurred during raising an exception |
2244 | import traceback | 2328 | import traceback |
2245 | logger.error('Exception during parse: %s' % traceback.format_exc()) | 2329 | logger.error('Exception during parse: %s' % traceback.format_exc()) |
2246 | self.shutdown(clean=False, force=True) | 2330 | self.shutdown(clean=False) |
2247 | return False | 2331 | return False |
2248 | 2332 | ||
2249 | self.current += 1 | 2333 | self.current += 1 |
@@ -2265,11 +2349,13 @@ class CookerParser(object): | |||
2265 | return True | 2349 | return True |
2266 | 2350 | ||
2267 | def reparse(self, filename): | 2351 | def reparse(self, filename): |
2352 | bb.cache.SiggenRecipeInfo.reset() | ||
2268 | to_reparse = set() | 2353 | to_reparse = set() |
2269 | for mc in self.cooker.multiconfigs: | 2354 | for mc in self.cooker.multiconfigs: |
2270 | to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) | 2355 | layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2] |
2356 | to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername)) | ||
2271 | 2357 | ||
2272 | for mc, filename, appends in to_reparse: | 2358 | for mc, filename, appends, layername in to_reparse: |
2273 | infos = self.bb_caches[mc].parse(filename, appends) | 2359 | infos = self.bb_caches[mc].parse(filename, appends, layername) |
2274 | for vfn, info_array in infos: | 2360 | for vfn, info_array in infos: |
2275 | self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) | 2361 | self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) |
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py index 1c1e008c6b..0649e40995 100644 --- a/bitbake/lib/bb/cookerdata.py +++ b/bitbake/lib/bb/cookerdata.py | |||
@@ -57,7 +57,7 @@ class ConfigParameters(object): | |||
57 | 57 | ||
58 | def updateToServer(self, server, environment): | 58 | def updateToServer(self, server, environment): |
59 | options = {} | 59 | options = {} |
60 | for o in ["abort", "force", "invalidate_stamp", | 60 | for o in ["halt", "force", "invalidate_stamp", |
61 | "dry_run", "dump_signatures", | 61 | "dry_run", "dump_signatures", |
62 | "extra_assume_provided", "profile", | 62 | "extra_assume_provided", "profile", |
63 | "prefile", "postfile", "server_timeout", | 63 | "prefile", "postfile", "server_timeout", |
@@ -86,7 +86,7 @@ class ConfigParameters(object): | |||
86 | action['msg'] = "Only one target can be used with the --environment option." | 86 | action['msg'] = "Only one target can be used with the --environment option." |
87 | elif self.options.buildfile and len(self.options.pkgs_to_build) > 0: | 87 | elif self.options.buildfile and len(self.options.pkgs_to_build) > 0: |
88 | action['msg'] = "No target should be used with the --environment and --buildfile options." | 88 | action['msg'] = "No target should be used with the --environment and --buildfile options." |
89 | elif len(self.options.pkgs_to_build) > 0: | 89 | elif self.options.pkgs_to_build: |
90 | action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build] | 90 | action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build] |
91 | else: | 91 | else: |
92 | action['action'] = ["showEnvironment", self.options.buildfile] | 92 | action['action'] = ["showEnvironment", self.options.buildfile] |
@@ -124,7 +124,7 @@ class CookerConfiguration(object): | |||
124 | self.prefile = [] | 124 | self.prefile = [] |
125 | self.postfile = [] | 125 | self.postfile = [] |
126 | self.cmd = None | 126 | self.cmd = None |
127 | self.abort = True | 127 | self.halt = True |
128 | self.force = False | 128 | self.force = False |
129 | self.profile = False | 129 | self.profile = False |
130 | self.nosetscene = False | 130 | self.nosetscene = False |
@@ -160,12 +160,7 @@ def catch_parse_error(func): | |||
160 | def wrapped(fn, *args): | 160 | def wrapped(fn, *args): |
161 | try: | 161 | try: |
162 | return func(fn, *args) | 162 | return func(fn, *args) |
163 | except IOError as exc: | 163 | except Exception as exc: |
164 | import traceback | ||
165 | parselog.critical(traceback.format_exc()) | ||
166 | parselog.critical("Unable to parse %s: %s" % (fn, exc)) | ||
167 | raise bb.BBHandledException() | ||
168 | except bb.data_smart.ExpansionError as exc: | ||
169 | import traceback | 164 | import traceback |
170 | 165 | ||
171 | bbdir = os.path.dirname(__file__) + os.sep | 166 | bbdir = os.path.dirname(__file__) + os.sep |
@@ -177,14 +172,11 @@ def catch_parse_error(func): | |||
177 | break | 172 | break |
178 | parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb)) | 173 | parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb)) |
179 | raise bb.BBHandledException() | 174 | raise bb.BBHandledException() |
180 | except bb.parse.ParseError as exc: | ||
181 | parselog.critical(str(exc)) | ||
182 | raise bb.BBHandledException() | ||
183 | return wrapped | 175 | return wrapped |
184 | 176 | ||
185 | @catch_parse_error | 177 | @catch_parse_error |
186 | def parse_config_file(fn, data, include=True): | 178 | def parse_config_file(fn, data, include=True): |
187 | return bb.parse.handle(fn, data, include) | 179 | return bb.parse.handle(fn, data, include, baseconfig=True) |
188 | 180 | ||
189 | @catch_parse_error | 181 | @catch_parse_error |
190 | def _inherit(bbclass, data): | 182 | def _inherit(bbclass, data): |
@@ -210,7 +202,7 @@ def findConfigFile(configfile, data): | |||
210 | 202 | ||
211 | # | 203 | # |
212 | # We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working | 204 | # We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working |
213 | # up to /. If that fails, we search for a conf/bitbake.conf in BBPATH. | 205 | # up to /. If that fails, bitbake would fall back to cwd. |
214 | # | 206 | # |
215 | 207 | ||
216 | def findTopdir(): | 208 | def findTopdir(): |
@@ -223,11 +215,8 @@ def findTopdir(): | |||
223 | layerconf = findConfigFile("bblayers.conf", d) | 215 | layerconf = findConfigFile("bblayers.conf", d) |
224 | if layerconf: | 216 | if layerconf: |
225 | return os.path.dirname(os.path.dirname(layerconf)) | 217 | return os.path.dirname(os.path.dirname(layerconf)) |
226 | if bbpath: | 218 | |
227 | bitbakeconf = bb.utils.which(bbpath, "conf/bitbake.conf") | 219 | return os.path.abspath(os.getcwd()) |
228 | if bitbakeconf: | ||
229 | return os.path.dirname(os.path.dirname(bitbakeconf)) | ||
230 | return None | ||
231 | 220 | ||
232 | class CookerDataBuilder(object): | 221 | class CookerDataBuilder(object): |
233 | 222 | ||
@@ -250,10 +239,14 @@ class CookerDataBuilder(object): | |||
250 | self.savedenv = bb.data.init() | 239 | self.savedenv = bb.data.init() |
251 | for k in cookercfg.env: | 240 | for k in cookercfg.env: |
252 | self.savedenv.setVar(k, cookercfg.env[k]) | 241 | self.savedenv.setVar(k, cookercfg.env[k]) |
242 | if k in bb.data_smart.bitbake_renamed_vars: | ||
243 | bb.error('Shell environment variable %s has been renamed to %s' % (k, bb.data_smart.bitbake_renamed_vars[k])) | ||
244 | bb.fatal("Exiting to allow enviroment variables to be corrected") | ||
253 | 245 | ||
254 | filtered_keys = bb.utils.approved_variables() | 246 | filtered_keys = bb.utils.approved_variables() |
255 | bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys) | 247 | bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys) |
256 | self.basedata.setVar("BB_ORIGENV", self.savedenv) | 248 | self.basedata.setVar("BB_ORIGENV", self.savedenv) |
249 | self.basedata.setVar("__bbclasstype", "global") | ||
257 | 250 | ||
258 | if worker: | 251 | if worker: |
259 | self.basedata.setVar("BB_WORKERCONTEXT", "1") | 252 | self.basedata.setVar("BB_WORKERCONTEXT", "1") |
@@ -261,15 +254,15 @@ class CookerDataBuilder(object): | |||
261 | self.data = self.basedata | 254 | self.data = self.basedata |
262 | self.mcdata = {} | 255 | self.mcdata = {} |
263 | 256 | ||
264 | def parseBaseConfiguration(self): | 257 | def parseBaseConfiguration(self, worker=False): |
258 | mcdata = {} | ||
265 | data_hash = hashlib.sha256() | 259 | data_hash = hashlib.sha256() |
266 | try: | 260 | try: |
267 | self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) | 261 | self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) |
268 | 262 | ||
269 | if self.data.getVar("BB_WORKERCONTEXT", False) is None: | 263 | if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker: |
270 | bb.fetch.fetcher_init(self.data) | 264 | bb.fetch.fetcher_init(self.data) |
271 | bb.parse.init_parser(self.data) | 265 | bb.parse.init_parser(self.data) |
272 | bb.codeparser.parser_cache_init(self.data) | ||
273 | 266 | ||
274 | bb.event.fire(bb.event.ConfigParsed(), self.data) | 267 | bb.event.fire(bb.event.ConfigParsed(), self.data) |
275 | 268 | ||
@@ -287,38 +280,62 @@ class CookerDataBuilder(object): | |||
287 | 280 | ||
288 | bb.parse.init_parser(self.data) | 281 | bb.parse.init_parser(self.data) |
289 | data_hash.update(self.data.get_hash().encode('utf-8')) | 282 | data_hash.update(self.data.get_hash().encode('utf-8')) |
290 | self.mcdata[''] = self.data | 283 | mcdata[''] = self.data |
291 | 284 | ||
292 | multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() | 285 | multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() |
293 | for config in multiconfig: | 286 | for config in multiconfig: |
294 | mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) | 287 | if config[0].isdigit(): |
295 | bb.event.fire(bb.event.ConfigParsed(), mcdata) | 288 | bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config) |
296 | self.mcdata[config] = mcdata | 289 | parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) |
297 | data_hash.update(mcdata.get_hash().encode('utf-8')) | 290 | bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata) |
291 | mcdata[config] = parsed_mcdata | ||
292 | data_hash.update(parsed_mcdata.get_hash().encode('utf-8')) | ||
298 | if multiconfig: | 293 | if multiconfig: |
299 | bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data) | 294 | bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data) |
300 | 295 | ||
301 | self.data_hash = data_hash.hexdigest() | 296 | self.data_hash = data_hash.hexdigest() |
302 | except (SyntaxError, bb.BBHandledException): | ||
303 | raise bb.BBHandledException() | ||
304 | except bb.data_smart.ExpansionError as e: | 297 | except bb.data_smart.ExpansionError as e: |
305 | logger.error(str(e)) | 298 | logger.error(str(e)) |
306 | raise bb.BBHandledException() | 299 | raise bb.BBHandledException() |
307 | except Exception: | 300 | |
308 | logger.exception("Error parsing configuration files") | 301 | bb.codeparser.update_module_dependencies(self.data) |
302 | |||
303 | # Handle obsolete variable names | ||
304 | d = self.data | ||
305 | renamedvars = d.getVarFlags('BB_RENAMED_VARIABLES') or {} | ||
306 | renamedvars.update(bb.data_smart.bitbake_renamed_vars) | ||
307 | issues = False | ||
308 | for v in renamedvars: | ||
309 | if d.getVar(v) != None or d.hasOverrides(v): | ||
310 | issues = True | ||
311 | loginfo = {} | ||
312 | history = d.varhistory.get_variable_refs(v) | ||
313 | for h in history: | ||
314 | for line in history[h]: | ||
315 | loginfo = {'file' : h, 'line' : line} | ||
316 | bb.data.data_smart._print_rename_error(v, loginfo, renamedvars) | ||
317 | if not history: | ||
318 | bb.data.data_smart._print_rename_error(v, loginfo, renamedvars) | ||
319 | if issues: | ||
309 | raise bb.BBHandledException() | 320 | raise bb.BBHandledException() |
310 | 321 | ||
322 | for mc in mcdata: | ||
323 | mcdata[mc].renameVar("__depends", "__base_depends") | ||
324 | mcdata[mc].setVar("__bbclasstype", "recipe") | ||
325 | |||
311 | # Create a copy so we can reset at a later date when UIs disconnect | 326 | # Create a copy so we can reset at a later date when UIs disconnect |
312 | self.origdata = self.data | 327 | self.mcorigdata = mcdata |
313 | self.data = bb.data.createCopy(self.origdata) | 328 | for mc in mcdata: |
314 | self.mcdata[''] = self.data | 329 | self.mcdata[mc] = bb.data.createCopy(mcdata[mc]) |
330 | self.data = self.mcdata[''] | ||
315 | 331 | ||
316 | def reset(self): | 332 | def reset(self): |
317 | # We may not have run parseBaseConfiguration() yet | 333 | # We may not have run parseBaseConfiguration() yet |
318 | if not hasattr(self, 'origdata'): | 334 | if not hasattr(self, 'mcorigdata'): |
319 | return | 335 | return |
320 | self.data = bb.data.createCopy(self.origdata) | 336 | for mc in self.mcorigdata: |
321 | self.mcdata[''] = self.data | 337 | self.mcdata[mc] = bb.data.createCopy(self.mcorigdata[mc]) |
338 | self.data = self.mcdata[''] | ||
322 | 339 | ||
323 | def _findLayerConf(self, data): | 340 | def _findLayerConf(self, data): |
324 | return findConfigFile("bblayers.conf", data) | 341 | return findConfigFile("bblayers.conf", data) |
@@ -333,15 +350,23 @@ class CookerDataBuilder(object): | |||
333 | 350 | ||
334 | layerconf = self._findLayerConf(data) | 351 | layerconf = self._findLayerConf(data) |
335 | if layerconf: | 352 | if layerconf: |
336 | parselog.debug(2, "Found bblayers.conf (%s)", layerconf) | 353 | parselog.debug2("Found bblayers.conf (%s)", layerconf) |
337 | # By definition bblayers.conf is in conf/ of TOPDIR. | 354 | # By definition bblayers.conf is in conf/ of TOPDIR. |
338 | # We may have been called with cwd somewhere else so reset TOPDIR | 355 | # We may have been called with cwd somewhere else so reset TOPDIR |
339 | data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) | 356 | data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) |
340 | data = parse_config_file(layerconf, data) | 357 | data = parse_config_file(layerconf, data) |
341 | 358 | ||
359 | if not data.getVar("BB_CACHEDIR"): | ||
360 | data.setVar("BB_CACHEDIR", "${TOPDIR}/cache") | ||
361 | |||
362 | bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR")) | ||
363 | |||
342 | layers = (data.getVar('BBLAYERS') or "").split() | 364 | layers = (data.getVar('BBLAYERS') or "").split() |
343 | broken_layers = [] | 365 | broken_layers = [] |
344 | 366 | ||
367 | if not layers: | ||
368 | bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition") | ||
369 | |||
345 | data = bb.data.createCopy(data) | 370 | data = bb.data.createCopy(data) |
346 | approved = bb.utils.approved_variables() | 371 | approved = bb.utils.approved_variables() |
347 | 372 | ||
@@ -357,8 +382,10 @@ class CookerDataBuilder(object): | |||
357 | parselog.critical("Please check BBLAYERS in %s" % (layerconf)) | 382 | parselog.critical("Please check BBLAYERS in %s" % (layerconf)) |
358 | raise bb.BBHandledException() | 383 | raise bb.BBHandledException() |
359 | 384 | ||
385 | layerseries = None | ||
386 | compat_entries = {} | ||
360 | for layer in layers: | 387 | for layer in layers: |
361 | parselog.debug(2, "Adding layer %s", layer) | 388 | parselog.debug2("Adding layer %s", layer) |
362 | if 'HOME' in approved and '~' in layer: | 389 | if 'HOME' in approved and '~' in layer: |
363 | layer = os.path.expanduser(layer) | 390 | layer = os.path.expanduser(layer) |
364 | if layer.endswith('/'): | 391 | if layer.endswith('/'): |
@@ -369,8 +396,27 @@ class CookerDataBuilder(object): | |||
369 | data.expandVarref('LAYERDIR') | 396 | data.expandVarref('LAYERDIR') |
370 | data.expandVarref('LAYERDIR_RE') | 397 | data.expandVarref('LAYERDIR_RE') |
371 | 398 | ||
399 | # Sadly we can't have nice things. | ||
400 | # Some layers think they're going to be 'clever' and copy the values from | ||
401 | # another layer, e.g. using ${LAYERSERIES_COMPAT_core}. The whole point of | ||
402 | # this mechanism is to make it clear which releases a layer supports and | ||
403 | # show when a layer master branch is bitrotting and is unmaintained. | ||
404 | # We therefore avoid people doing this here. | ||
405 | collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() | ||
406 | for c in collections: | ||
407 | compat_entry = data.getVar("LAYERSERIES_COMPAT_%s" % c) | ||
408 | if compat_entry: | ||
409 | compat_entries[c] = set(compat_entry.split()) | ||
410 | data.delVar("LAYERSERIES_COMPAT_%s" % c) | ||
411 | if not layerseries: | ||
412 | layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split()) | ||
413 | if layerseries: | ||
414 | data.delVar("LAYERSERIES_CORENAMES") | ||
415 | |||
372 | data.delVar('LAYERDIR_RE') | 416 | data.delVar('LAYERDIR_RE') |
373 | data.delVar('LAYERDIR') | 417 | data.delVar('LAYERDIR') |
418 | for c in compat_entries: | ||
419 | data.setVar("LAYERSERIES_COMPAT_%s" % c, " ".join(sorted(compat_entries[c]))) | ||
374 | 420 | ||
375 | bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() | 421 | bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() |
376 | collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() | 422 | collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() |
@@ -389,26 +435,38 @@ class CookerDataBuilder(object): | |||
389 | if invalid: | 435 | if invalid: |
390 | bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) | 436 | bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) |
391 | 437 | ||
392 | layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split()) | ||
393 | collections_tmp = collections[:] | 438 | collections_tmp = collections[:] |
394 | for c in collections: | 439 | for c in collections: |
395 | collections_tmp.remove(c) | 440 | collections_tmp.remove(c) |
396 | if c in collections_tmp: | 441 | if c in collections_tmp: |
397 | bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) | 442 | bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) |
398 | compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) | 443 | |
444 | compat = set() | ||
445 | if c in compat_entries: | ||
446 | compat = compat_entries[c] | ||
447 | if compat and not layerseries: | ||
448 | bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c) | ||
399 | if compat and not (compat & layerseries): | 449 | if compat and not (compat & layerseries): |
400 | bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" | 450 | bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" |
401 | % (c, " ".join(layerseries), " ".join(compat))) | 451 | % (c, " ".join(layerseries), " ".join(compat))) |
402 | elif not compat and not data.getVar("BB_WORKERCONTEXT"): | 452 | elif not compat and not data.getVar("BB_WORKERCONTEXT"): |
403 | bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) | 453 | bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) |
404 | 454 | ||
455 | data.setVar("LAYERSERIES_CORENAMES", " ".join(sorted(layerseries))) | ||
456 | |||
405 | if not data.getVar("BBPATH"): | 457 | if not data.getVar("BBPATH"): |
406 | msg = "The BBPATH variable is not set" | 458 | msg = "The BBPATH variable is not set" |
407 | if not layerconf: | 459 | if not layerconf: |
408 | msg += (" and bitbake did not find a conf/bblayers.conf file in" | 460 | msg += (" and bitbake did not find a conf/bblayers.conf file in" |
409 | " the expected location.\nMaybe you accidentally" | 461 | " the expected location.\nMaybe you accidentally" |
410 | " invoked bitbake from the wrong directory?") | 462 | " invoked bitbake from the wrong directory?") |
411 | raise SystemExit(msg) | 463 | bb.fatal(msg) |
464 | |||
465 | if not data.getVar("TOPDIR"): | ||
466 | data.setVar("TOPDIR", os.path.abspath(os.getcwd())) | ||
467 | if not data.getVar("BB_CACHEDIR"): | ||
468 | data.setVar("BB_CACHEDIR", "${TOPDIR}/cache") | ||
469 | bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR")) | ||
412 | 470 | ||
413 | data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) | 471 | data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) |
414 | 472 | ||
@@ -421,7 +479,7 @@ class CookerDataBuilder(object): | |||
421 | for bbclass in bbclasses: | 479 | for bbclass in bbclasses: |
422 | data = _inherit(bbclass, data) | 480 | data = _inherit(bbclass, data) |
423 | 481 | ||
424 | # Nomally we only register event handlers at the end of parsing .bb files | 482 | # Normally we only register event handlers at the end of parsing .bb files |
425 | # We register any handlers we've found so far here... | 483 | # We register any handlers we've found so far here... |
426 | for var in data.getVar('__BBHANDLERS', False) or []: | 484 | for var in data.getVar('__BBHANDLERS', False) or []: |
427 | handlerfn = data.getVarFlag(var, "filename", False) | 485 | handlerfn = data.getVarFlag(var, "filename", False) |
@@ -435,3 +493,54 @@ class CookerDataBuilder(object): | |||
435 | 493 | ||
436 | return data | 494 | return data |
437 | 495 | ||
496 | @staticmethod | ||
497 | def _parse_recipe(bb_data, bbfile, appends, mc, layername): | ||
498 | bb_data.setVar("__BBMULTICONFIG", mc) | ||
499 | bb_data.setVar("FILE_LAYERNAME", layername) | ||
500 | |||
501 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
502 | bb.parse.cached_mtime_noerror(bbfile_loc) | ||
503 | |||
504 | if appends: | ||
505 | bb_data.setVar('__BBAPPEND', " ".join(appends)) | ||
506 | |||
507 | return bb.parse.handle(bbfile, bb_data) | ||
508 | |||
509 | def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None): | ||
510 | """ | ||
511 | Load and parse one .bb build file | ||
512 | Return the data and whether parsing resulted in the file being skipped | ||
513 | """ | ||
514 | |||
515 | if virtonly: | ||
516 | (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile) | ||
517 | bb_data = self.mcdata[mc].createCopy() | ||
518 | bb_data.setVar("__ONLYFINALISE", virtual or "default") | ||
519 | return self._parse_recipe(bb_data, bbfile, appends, mc, layername) | ||
520 | |||
521 | if mc is not None: | ||
522 | bb_data = self.mcdata[mc].createCopy() | ||
523 | return self._parse_recipe(bb_data, bbfile, appends, mc, layername) | ||
524 | |||
525 | bb_data = self.data.createCopy() | ||
526 | datastores = self._parse_recipe(bb_data, bbfile, appends, '', layername) | ||
527 | |||
528 | for mc in self.mcdata: | ||
529 | if not mc: | ||
530 | continue | ||
531 | bb_data = self.mcdata[mc].createCopy() | ||
532 | newstores = self._parse_recipe(bb_data, bbfile, appends, mc, layername) | ||
533 | for ns in newstores: | ||
534 | datastores["mc:%s:%s" % (mc, ns)] = newstores[ns] | ||
535 | |||
536 | return datastores | ||
537 | |||
538 | def parseRecipe(self, virtualfn, appends, layername): | ||
539 | """ | ||
540 | Return a complete set of data for fn. | ||
541 | To do this, we need to parse the file. | ||
542 | """ | ||
543 | logger.debug("Parsing %s (full)" % virtualfn) | ||
544 | (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn) | ||
545 | datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername) | ||
546 | return datastores[virtual] | ||
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py index c187fcfc6c..7689404436 100644 --- a/bitbake/lib/bb/daemonize.py +++ b/bitbake/lib/bb/daemonize.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -74,26 +76,26 @@ def createDaemon(function, logfile): | |||
74 | with open('/dev/null', 'r') as si: | 76 | with open('/dev/null', 'r') as si: |
75 | os.dup2(si.fileno(), sys.stdin.fileno()) | 77 | os.dup2(si.fileno(), sys.stdin.fileno()) |
76 | 78 | ||
77 | try: | 79 | with open(logfile, 'a+') as so: |
78 | so = open(logfile, 'a+') | 80 | try: |
79 | os.dup2(so.fileno(), sys.stdout.fileno()) | 81 | os.dup2(so.fileno(), sys.stdout.fileno()) |
80 | os.dup2(so.fileno(), sys.stderr.fileno()) | 82 | os.dup2(so.fileno(), sys.stderr.fileno()) |
81 | except io.UnsupportedOperation: | 83 | except io.UnsupportedOperation: |
82 | sys.stdout = open(logfile, 'a+') | 84 | sys.stdout = so |
83 | 85 | ||
84 | # Have stdout and stderr be the same so log output matches chronologically | 86 | # Have stdout and stderr be the same so log output matches chronologically |
85 | # and there aren't two seperate buffers | 87 | # and there aren't two separate buffers |
86 | sys.stderr = sys.stdout | 88 | sys.stderr = sys.stdout |
87 | 89 | ||
88 | try: | 90 | try: |
89 | function() | 91 | function() |
90 | except Exception as e: | 92 | except Exception as e: |
91 | traceback.print_exc() | 93 | traceback.print_exc() |
92 | finally: | 94 | finally: |
93 | bb.event.print_ui_queue() | 95 | bb.event.print_ui_queue() |
94 | # os._exit() doesn't flush open files like os.exit() does. Manually flush | 96 | # os._exit() doesn't flush open files like os.exit() does. Manually flush |
95 | # stdout and stderr so that any logging output will be seen, particularly | 97 | # stdout and stderr so that any logging output will be seen, particularly |
96 | # exception tracebacks. | 98 | # exception tracebacks. |
97 | sys.stdout.flush() | 99 | sys.stdout.flush() |
98 | sys.stderr.flush() | 100 | sys.stderr.flush() |
99 | os._exit(0) | 101 | os._exit(0) |
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 97022853ca..505f42950f 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
@@ -4,14 +4,16 @@ BitBake 'Data' implementations | |||
4 | Functions for interacting with the data structure used by the | 4 | Functions for interacting with the data structure used by the |
5 | BitBake build tools. | 5 | BitBake build tools. |
6 | 6 | ||
7 | The expandKeys and update_data are the most expensive | 7 | expandKeys and datastore iteration are the most expensive |
8 | operations. At night the cookie monster came by and | 8 | operations. Updating overrides is now "on the fly" but still based |
9 | on the idea of the cookie monster introduced by zecke: | ||
10 | "At night the cookie monster came by and | ||
9 | suggested 'give me cookies on setting the variables and | 11 | suggested 'give me cookies on setting the variables and |
10 | things will work out'. Taking this suggestion into account | 12 | things will work out'. Taking this suggestion into account |
11 | applying the skills from the not yet passed 'Entwurf und | 13 | applying the skills from the not yet passed 'Entwurf und |
12 | Analyse von Algorithmen' lecture and the cookie | 14 | Analyse von Algorithmen' lecture and the cookie |
13 | monster seems to be right. We will track setVar more carefully | 15 | monster seems to be right. We will track setVar more carefully |
14 | to have faster update_data and expandKeys operations. | 16 | to have faster datastore operations." |
15 | 17 | ||
16 | This is a trade-off between speed and memory again but | 18 | This is a trade-off between speed and memory again but |
17 | the speed is more critical here. | 19 | the speed is more critical here. |
@@ -26,11 +28,6 @@ the speed is more critical here. | |||
26 | 28 | ||
27 | import sys, os, re | 29 | import sys, os, re |
28 | import hashlib | 30 | import hashlib |
29 | if sys.argv[0][-5:] == "pydoc": | ||
30 | path = os.path.dirname(os.path.dirname(sys.argv[1])) | ||
31 | else: | ||
32 | path = os.path.dirname(os.path.dirname(sys.argv[0])) | ||
33 | sys.path.insert(0, path) | ||
34 | from itertools import groupby | 31 | from itertools import groupby |
35 | 32 | ||
36 | from bb import data_smart | 33 | from bb import data_smart |
@@ -70,10 +67,6 @@ def keys(d): | |||
70 | """Return a list of keys in d""" | 67 | """Return a list of keys in d""" |
71 | return d.keys() | 68 | return d.keys() |
72 | 69 | ||
73 | |||
74 | __expand_var_regexp__ = re.compile(r"\${[^{}]+}") | ||
75 | __expand_python_regexp__ = re.compile(r"\${@.+?}") | ||
76 | |||
77 | def expand(s, d, varname = None): | 70 | def expand(s, d, varname = None): |
78 | """Variable expansion using the data store""" | 71 | """Variable expansion using the data store""" |
79 | return d.expand(s, varname) | 72 | return d.expand(s, varname) |
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): | |||
121 | if d.getVarFlag(var, 'python', False) and func: | 114 | if d.getVarFlag(var, 'python', False) and func: |
122 | return False | 115 | return False |
123 | 116 | ||
124 | export = d.getVarFlag(var, "export", False) | 117 | export = bb.utils.to_boolean(d.getVarFlag(var, "export")) |
125 | unexport = d.getVarFlag(var, "unexport", False) | 118 | unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport")) |
126 | if not all and not export and not unexport and not func: | 119 | if not all and not export and not unexport and not func: |
127 | return False | 120 | return False |
128 | 121 | ||
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False): | |||
195 | 188 | ||
196 | def exported_keys(d): | 189 | def exported_keys(d): |
197 | return (key for key in d.keys() if not key.startswith('__') and | 190 | return (key for key in d.keys() if not key.startswith('__') and |
198 | d.getVarFlag(key, 'export', False) and | 191 | bb.utils.to_boolean(d.getVarFlag(key, 'export')) and |
199 | not d.getVarFlag(key, 'unexport', False)) | 192 | not bb.utils.to_boolean(d.getVarFlag(key, 'unexport'))) |
200 | 193 | ||
201 | def exported_vars(d): | 194 | def exported_vars(d): |
202 | k = list(exported_keys(d)) | 195 | k = list(exported_keys(d)) |
@@ -226,7 +219,7 @@ def emit_func(func, o=sys.__stdout__, d = init()): | |||
226 | deps = newdeps | 219 | deps = newdeps |
227 | seen |= deps | 220 | seen |= deps |
228 | newdeps = set() | 221 | newdeps = set() |
229 | for dep in deps: | 222 | for dep in sorted(deps): |
230 | if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): | 223 | if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): |
231 | emit_var(dep, o, d, False) and o.write('\n') | 224 | emit_var(dep, o, d, False) and o.write('\n') |
232 | newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) | 225 | newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) |
@@ -268,65 +261,72 @@ def emit_func_python(func, o=sys.__stdout__, d = init()): | |||
268 | newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) | 261 | newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) |
269 | newdeps -= seen | 262 | newdeps -= seen |
270 | 263 | ||
271 | def update_data(d): | 264 | def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata): |
272 | """Performs final steps upon the datastore, including application of overrides""" | 265 | def handle_contains(value, contains, exclusions, d): |
273 | d.finalize(parent = True) | 266 | newvalue = [] |
267 | if value: | ||
268 | newvalue.append(str(value)) | ||
269 | for k in sorted(contains): | ||
270 | if k in exclusions or k in ignored_vars: | ||
271 | continue | ||
272 | l = (d.getVar(k) or "").split() | ||
273 | for item in sorted(contains[k]): | ||
274 | for word in item.split(): | ||
275 | if not word in l: | ||
276 | newvalue.append("\n%s{%s} = Unset" % (k, item)) | ||
277 | break | ||
278 | else: | ||
279 | newvalue.append("\n%s{%s} = Set" % (k, item)) | ||
280 | return "".join(newvalue) | ||
281 | |||
282 | def handle_remove(value, deps, removes, d): | ||
283 | for r in sorted(removes): | ||
284 | r2 = d.expandWithRefs(r, None) | ||
285 | value += "\n_remove of %s" % r | ||
286 | deps |= r2.references | ||
287 | deps = deps | (keys & r2.execs) | ||
288 | value = handle_contains(value, r2.contains, exclusions, d) | ||
289 | return value | ||
274 | 290 | ||
275 | def build_dependencies(key, keys, shelldeps, varflagsexcl, d): | ||
276 | deps = set() | 291 | deps = set() |
277 | try: | 292 | try: |
293 | if key in mod_funcs: | ||
294 | exclusions = set() | ||
295 | moddep = bb.codeparser.modulecode_deps[key] | ||
296 | value = handle_contains("", moddep[3], exclusions, d) | ||
297 | return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value | ||
298 | |||
278 | if key[-1] == ']': | 299 | if key[-1] == ']': |
279 | vf = key[:-1].split('[') | 300 | vf = key[:-1].split('[') |
301 | if vf[1] == "vardepvalueexclude": | ||
302 | return deps, "" | ||
280 | value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True) | 303 | value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True) |
281 | deps |= parser.references | 304 | deps |= parser.references |
282 | deps = deps | (keys & parser.execs) | 305 | deps = deps | (keys & parser.execs) |
283 | return deps, value | 306 | deps -= ignored_vars |
307 | return frozenset(deps), value | ||
284 | varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {} | 308 | varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {} |
285 | vardeps = varflags.get("vardeps") | 309 | vardeps = varflags.get("vardeps") |
286 | 310 | exclusions = varflags.get("vardepsexclude", "").split() | |
287 | def handle_contains(value, contains, d): | ||
288 | newvalue = "" | ||
289 | for k in sorted(contains): | ||
290 | l = (d.getVar(k) or "").split() | ||
291 | for item in sorted(contains[k]): | ||
292 | for word in item.split(): | ||
293 | if not word in l: | ||
294 | newvalue += "\n%s{%s} = Unset" % (k, item) | ||
295 | break | ||
296 | else: | ||
297 | newvalue += "\n%s{%s} = Set" % (k, item) | ||
298 | if not newvalue: | ||
299 | return value | ||
300 | if not value: | ||
301 | return newvalue | ||
302 | return value + newvalue | ||
303 | |||
304 | def handle_remove(value, deps, removes, d): | ||
305 | for r in sorted(removes): | ||
306 | r2 = d.expandWithRefs(r, None) | ||
307 | value += "\n_remove of %s" % r | ||
308 | deps |= r2.references | ||
309 | deps = deps | (keys & r2.execs) | ||
310 | return value | ||
311 | 311 | ||
312 | if "vardepvalue" in varflags: | 312 | if "vardepvalue" in varflags: |
313 | value = varflags.get("vardepvalue") | 313 | value = varflags.get("vardepvalue") |
314 | elif varflags.get("func"): | 314 | elif varflags.get("func"): |
315 | if varflags.get("python"): | 315 | if varflags.get("python"): |
316 | value = d.getVarFlag(key, "_content", False) | 316 | value = codeparsedata.getVarFlag(key, "_content", False) |
317 | parser = bb.codeparser.PythonParser(key, logger) | 317 | parser = bb.codeparser.PythonParser(key, logger) |
318 | parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) | 318 | parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) |
319 | deps = deps | parser.references | 319 | deps = deps | parser.references |
320 | deps = deps | (keys & parser.execs) | 320 | deps = deps | (keys & parser.execs) |
321 | value = handle_contains(value, parser.contains, d) | 321 | value = handle_contains(value, parser.contains, exclusions, d) |
322 | else: | 322 | else: |
323 | value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True) | 323 | value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True) |
324 | parser = bb.codeparser.ShellParser(key, logger) | 324 | parser = bb.codeparser.ShellParser(key, logger) |
325 | parser.parse_shell(parsedvar.value) | 325 | parser.parse_shell(parsedvar.value) |
326 | deps = deps | shelldeps | 326 | deps = deps | shelldeps |
327 | deps = deps | parsedvar.references | 327 | deps = deps | parsedvar.references |
328 | deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) | 328 | deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) |
329 | value = handle_contains(value, parsedvar.contains, d) | 329 | value = handle_contains(value, parsedvar.contains, exclusions, d) |
330 | if hasattr(parsedvar, "removes"): | 330 | if hasattr(parsedvar, "removes"): |
331 | value = handle_remove(value, deps, parsedvar.removes, d) | 331 | value = handle_remove(value, deps, parsedvar.removes, d) |
332 | if vardeps is None: | 332 | if vardeps is None: |
@@ -341,7 +341,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): | |||
341 | value, parser = d.getVarFlag(key, "_content", False, retparser=True) | 341 | value, parser = d.getVarFlag(key, "_content", False, retparser=True) |
342 | deps |= parser.references | 342 | deps |= parser.references |
343 | deps = deps | (keys & parser.execs) | 343 | deps = deps | (keys & parser.execs) |
344 | value = handle_contains(value, parser.contains, d) | 344 | value = handle_contains(value, parser.contains, exclusions, d) |
345 | if hasattr(parser, "removes"): | 345 | if hasattr(parser, "removes"): |
346 | value = handle_remove(value, deps, parser.removes, d) | 346 | value = handle_remove(value, deps, parser.removes, d) |
347 | 347 | ||
@@ -361,43 +361,50 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): | |||
361 | deps |= set(varfdeps) | 361 | deps |= set(varfdeps) |
362 | 362 | ||
363 | deps |= set((vardeps or "").split()) | 363 | deps |= set((vardeps or "").split()) |
364 | deps -= set(varflags.get("vardepsexclude", "").split()) | 364 | deps -= set(exclusions) |
365 | deps -= ignored_vars | ||
365 | except bb.parse.SkipRecipe: | 366 | except bb.parse.SkipRecipe: |
366 | raise | 367 | raise |
367 | except Exception as e: | 368 | except Exception as e: |
368 | bb.warn("Exception during build_dependencies for %s" % key) | 369 | bb.warn("Exception during build_dependencies for %s" % key) |
369 | raise | 370 | raise |
370 | return deps, value | 371 | return frozenset(deps), value |
371 | #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) | 372 | #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) |
372 | #d.setVarFlag(key, "vardeps", deps) | 373 | #d.setVarFlag(key, "vardeps", deps) |
373 | 374 | ||
374 | def generate_dependencies(d, whitelist): | 375 | def generate_dependencies(d, ignored_vars): |
375 | 376 | ||
376 | keys = set(key for key in d if not key.startswith("__")) | 377 | mod_funcs = set(bb.codeparser.modulecode_deps.keys()) |
377 | shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) | 378 | keys = set(key for key in d if not key.startswith("__")) | mod_funcs |
379 | shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport"))) | ||
378 | varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') | 380 | varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') |
379 | 381 | ||
382 | codeparserd = d.createCopy() | ||
383 | for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split(): | ||
384 | key, value = forced.split("=", 1) | ||
385 | codeparserd.setVar(key, value) | ||
386 | |||
380 | deps = {} | 387 | deps = {} |
381 | values = {} | 388 | values = {} |
382 | 389 | ||
383 | tasklist = d.getVar('__BBTASKS', False) or [] | 390 | tasklist = d.getVar('__BBTASKS', False) or [] |
384 | for task in tasklist: | 391 | for task in tasklist: |
385 | deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d) | 392 | deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd) |
386 | newdeps = deps[task] | 393 | newdeps = deps[task] |
387 | seen = set() | 394 | seen = set() |
388 | while newdeps: | 395 | while newdeps: |
389 | nextdeps = newdeps - whitelist | 396 | nextdeps = newdeps |
390 | seen |= nextdeps | 397 | seen |= nextdeps |
391 | newdeps = set() | 398 | newdeps = set() |
392 | for dep in nextdeps: | 399 | for dep in nextdeps: |
393 | if dep not in deps: | 400 | if dep not in deps: |
394 | deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d) | 401 | deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd) |
395 | newdeps |= deps[dep] | 402 | newdeps |= deps[dep] |
396 | newdeps -= seen | 403 | newdeps -= seen |
397 | #print "For %s: %s" % (task, str(deps[task])) | 404 | #print "For %s: %s" % (task, str(deps[task])) |
398 | return tasklist, deps, values | 405 | return tasklist, deps, values |
399 | 406 | ||
400 | def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): | 407 | def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn): |
401 | taskdeps = {} | 408 | taskdeps = {} |
402 | basehash = {} | 409 | basehash = {} |
403 | 410 | ||
@@ -406,9 +413,10 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): | |||
406 | 413 | ||
407 | if data is None: | 414 | if data is None: |
408 | bb.error("Task %s from %s seems to be empty?!" % (task, fn)) | 415 | bb.error("Task %s from %s seems to be empty?!" % (task, fn)) |
409 | data = '' | 416 | data = [] |
417 | else: | ||
418 | data = [data] | ||
410 | 419 | ||
411 | gendeps[task] -= whitelist | ||
412 | newdeps = gendeps[task] | 420 | newdeps = gendeps[task] |
413 | seen = set() | 421 | seen = set() |
414 | while newdeps: | 422 | while newdeps: |
@@ -416,27 +424,24 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): | |||
416 | seen |= nextdeps | 424 | seen |= nextdeps |
417 | newdeps = set() | 425 | newdeps = set() |
418 | for dep in nextdeps: | 426 | for dep in nextdeps: |
419 | if dep in whitelist: | ||
420 | continue | ||
421 | gendeps[dep] -= whitelist | ||
422 | newdeps |= gendeps[dep] | 427 | newdeps |= gendeps[dep] |
423 | newdeps -= seen | 428 | newdeps -= seen |
424 | 429 | ||
425 | alldeps = sorted(seen) | 430 | alldeps = sorted(seen) |
426 | for dep in alldeps: | 431 | for dep in alldeps: |
427 | data = data + dep | 432 | data.append(dep) |
428 | var = lookupcache[dep] | 433 | var = lookupcache[dep] |
429 | if var is not None: | 434 | if var is not None: |
430 | data = data + str(var) | 435 | data.append(str(var)) |
431 | k = fn + ":" + task | 436 | k = fn + ":" + task |
432 | basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() | 437 | basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest() |
433 | taskdeps[task] = alldeps | 438 | taskdeps[task] = frozenset(seen) |
434 | 439 | ||
435 | return taskdeps, basehash | 440 | return taskdeps, basehash |
436 | 441 | ||
437 | def inherits_class(klass, d): | 442 | def inherits_class(klass, d): |
438 | val = d.getVar('__inherit_cache', False) or [] | 443 | val = d.getVar('__inherit_cache', False) or [] |
439 | needle = os.path.join('classes', '%s.bbclass' % klass) | 444 | needle = '/%s.bbclass' % klass |
440 | for v in val: | 445 | for v in val: |
441 | if v.endswith(needle): | 446 | if v.endswith(needle): |
442 | return True | 447 | return True |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index 2328c334ac..0128a5bb17 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
@@ -16,8 +16,11 @@ BitBake build tools. | |||
16 | # | 16 | # |
17 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | 17 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig |
18 | 18 | ||
19 | import copy, re, sys, traceback | 19 | import builtins |
20 | from collections import MutableMapping | 20 | import copy |
21 | import re | ||
22 | import sys | ||
23 | from collections.abc import MutableMapping | ||
21 | import logging | 24 | import logging |
22 | import hashlib | 25 | import hashlib |
23 | import bb, bb.codeparser | 26 | import bb, bb.codeparser |
@@ -26,13 +29,25 @@ from bb.COW import COWDictBase | |||
26 | 29 | ||
27 | logger = logging.getLogger("BitBake.Data") | 30 | logger = logging.getLogger("BitBake.Data") |
28 | 31 | ||
29 | __setvar_keyword__ = ["_append", "_prepend", "_remove"] | 32 | __setvar_keyword__ = [":append", ":prepend", ":remove"] |
30 | __setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$') | 33 | __setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$') |
31 | __expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}") | 34 | __expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}") |
32 | __expand_python_regexp__ = re.compile(r"\${@.+?}") | 35 | __expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}") |
33 | __whitespace_split__ = re.compile(r'(\s)') | 36 | __whitespace_split__ = re.compile(r'(\s)') |
34 | __override_regexp__ = re.compile(r'[a-z0-9]+') | 37 | __override_regexp__ = re.compile(r'[a-z0-9]+') |
35 | 38 | ||
39 | bitbake_renamed_vars = { | ||
40 | "BB_ENV_WHITELIST": "BB_ENV_PASSTHROUGH", | ||
41 | "BB_ENV_EXTRAWHITE": "BB_ENV_PASSTHROUGH_ADDITIONS", | ||
42 | "BB_HASHBASE_WHITELIST": "BB_BASEHASH_IGNORE_VARS", | ||
43 | "BB_HASHCONFIG_WHITELIST": "BB_HASHCONFIG_IGNORE_VARS", | ||
44 | "BB_HASHTASK_WHITELIST": "BB_TASKHASH_IGNORE_TASKS", | ||
45 | "BB_SETSCENE_ENFORCE_WHITELIST": "BB_SETSCENE_ENFORCE_IGNORE_TASKS", | ||
46 | "MULTI_PROVIDER_WHITELIST": "BB_MULTI_PROVIDER_ALLOWED", | ||
47 | "BB_STAMP_WHITELIST": "is a deprecated variable and support has been removed", | ||
48 | "BB_STAMP_POLICY": "is a deprecated variable and support has been removed", | ||
49 | } | ||
50 | |||
36 | def infer_caller_details(loginfo, parent = False, varval = True): | 51 | def infer_caller_details(loginfo, parent = False, varval = True): |
37 | """Save the caller the trouble of specifying everything.""" | 52 | """Save the caller the trouble of specifying everything.""" |
38 | # Save effort. | 53 | # Save effort. |
@@ -80,10 +95,11 @@ def infer_caller_details(loginfo, parent = False, varval = True): | |||
80 | loginfo['func'] = func | 95 | loginfo['func'] = func |
81 | 96 | ||
82 | class VariableParse: | 97 | class VariableParse: |
83 | def __init__(self, varname, d, val = None): | 98 | def __init__(self, varname, d, unexpanded_value = None, val = None): |
84 | self.varname = varname | 99 | self.varname = varname |
85 | self.d = d | 100 | self.d = d |
86 | self.value = val | 101 | self.value = val |
102 | self.unexpanded_value = unexpanded_value | ||
87 | 103 | ||
88 | self.references = set() | 104 | self.references = set() |
89 | self.execs = set() | 105 | self.execs = set() |
@@ -107,6 +123,11 @@ class VariableParse: | |||
107 | else: | 123 | else: |
108 | code = match.group()[3:-1] | 124 | code = match.group()[3:-1] |
109 | 125 | ||
126 | # Do not run code that contains one or more unexpanded variables | ||
127 | # instead return the code with the characters we removed put back | ||
128 | if __expand_var_regexp__.findall(code): | ||
129 | return "${@" + code + "}" | ||
130 | |||
110 | if self.varname: | 131 | if self.varname: |
111 | varname = 'Var <%s>' % self.varname | 132 | varname = 'Var <%s>' % self.varname |
112 | else: | 133 | else: |
@@ -132,16 +153,21 @@ class VariableParse: | |||
132 | value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) | 153 | value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) |
133 | return str(value) | 154 | return str(value) |
134 | 155 | ||
135 | |||
136 | class DataContext(dict): | 156 | class DataContext(dict): |
157 | excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe']) | ||
158 | |||
137 | def __init__(self, metadata, **kwargs): | 159 | def __init__(self, metadata, **kwargs): |
138 | self.metadata = metadata | 160 | self.metadata = metadata |
139 | dict.__init__(self, **kwargs) | 161 | dict.__init__(self, **kwargs) |
140 | self['d'] = metadata | 162 | self['d'] = metadata |
163 | self.context = set(bb.utils.get_context()) | ||
141 | 164 | ||
142 | def __missing__(self, key): | 165 | def __missing__(self, key): |
166 | if key in self.excluded or key in self.context: | ||
167 | raise KeyError(key) | ||
168 | |||
143 | value = self.metadata.getVar(key) | 169 | value = self.metadata.getVar(key) |
144 | if value is None or self.metadata.getVarFlag(key, 'func', False): | 170 | if value is None: |
145 | raise KeyError(key) | 171 | raise KeyError(key) |
146 | else: | 172 | else: |
147 | return value | 173 | return value |
@@ -151,6 +177,7 @@ class ExpansionError(Exception): | |||
151 | self.expression = expression | 177 | self.expression = expression |
152 | self.variablename = varname | 178 | self.variablename = varname |
153 | self.exception = exception | 179 | self.exception = exception |
180 | self.varlist = [varname or expression or ""] | ||
154 | if varname: | 181 | if varname: |
155 | if expression: | 182 | if expression: |
156 | self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) | 183 | self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) |
@@ -160,8 +187,14 @@ class ExpansionError(Exception): | |||
160 | self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception) | 187 | self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception) |
161 | Exception.__init__(self, self.msg) | 188 | Exception.__init__(self, self.msg) |
162 | self.args = (varname, expression, exception) | 189 | self.args = (varname, expression, exception) |
190 | |||
191 | def addVar(self, varname): | ||
192 | if varname: | ||
193 | self.varlist.append(varname) | ||
194 | |||
163 | def __str__(self): | 195 | def __str__(self): |
164 | return self.msg | 196 | chain = "\nThe variable dependency chain for the failure is: " + " -> ".join(self.varlist) |
197 | return self.msg + chain | ||
165 | 198 | ||
166 | class IncludeHistory(object): | 199 | class IncludeHistory(object): |
167 | def __init__(self, parent = None, filename = '[TOP LEVEL]'): | 200 | def __init__(self, parent = None, filename = '[TOP LEVEL]'): |
@@ -277,7 +310,7 @@ class VariableHistory(object): | |||
277 | for (r, override) in d.overridedata[var]: | 310 | for (r, override) in d.overridedata[var]: |
278 | for event in self.variable(r): | 311 | for event in self.variable(r): |
279 | loginfo = event.copy() | 312 | loginfo = event.copy() |
280 | if 'flag' in loginfo and not loginfo['flag'].startswith("_"): | 313 | if 'flag' in loginfo and not loginfo['flag'].startswith(("_", ":")): |
281 | continue | 314 | continue |
282 | loginfo['variable'] = var | 315 | loginfo['variable'] = var |
283 | loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op']) | 316 | loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op']) |
@@ -329,6 +362,16 @@ class VariableHistory(object): | |||
329 | lines.append(line) | 362 | lines.append(line) |
330 | return lines | 363 | return lines |
331 | 364 | ||
365 | def get_variable_refs(self, var): | ||
366 | """Return a dict of file/line references""" | ||
367 | var_history = self.variable(var) | ||
368 | refs = {} | ||
369 | for event in var_history: | ||
370 | if event['file'] not in refs: | ||
371 | refs[event['file']] = [] | ||
372 | refs[event['file']].append(event['line']) | ||
373 | return refs | ||
374 | |||
332 | def get_variable_items_files(self, var): | 375 | def get_variable_items_files(self, var): |
333 | """ | 376 | """ |
334 | Use variable history to map items added to a list variable and | 377 | Use variable history to map items added to a list variable and |
@@ -342,7 +385,7 @@ class VariableHistory(object): | |||
342 | for event in history: | 385 | for event in history: |
343 | if 'flag' in event: | 386 | if 'flag' in event: |
344 | continue | 387 | continue |
345 | if event['op'] == '_remove': | 388 | if event['op'] == ':remove': |
346 | continue | 389 | continue |
347 | if isset and event['op'] == 'set?': | 390 | if isset and event['op'] == 'set?': |
348 | continue | 391 | continue |
@@ -363,6 +406,23 @@ class VariableHistory(object): | |||
363 | else: | 406 | else: |
364 | self.variables[var] = [] | 407 | self.variables[var] = [] |
365 | 408 | ||
409 | def _print_rename_error(var, loginfo, renamedvars, fullvar=None): | ||
410 | info = "" | ||
411 | if "file" in loginfo: | ||
412 | info = " file: %s" % loginfo["file"] | ||
413 | if "line" in loginfo: | ||
414 | info += " line: %s" % loginfo["line"] | ||
415 | if fullvar and fullvar != var: | ||
416 | info += " referenced as: %s" % fullvar | ||
417 | if info: | ||
418 | info = " (%s)" % info.strip() | ||
419 | renameinfo = renamedvars[var] | ||
420 | if " " in renameinfo: | ||
421 | # A space signals a string to display instead of a rename | ||
422 | bb.erroronce('Variable %s %s%s' % (var, renameinfo, info)) | ||
423 | else: | ||
424 | bb.erroronce('Variable %s has been renamed to %s%s' % (var, renameinfo, info)) | ||
425 | |||
366 | class DataSmart(MutableMapping): | 426 | class DataSmart(MutableMapping): |
367 | def __init__(self): | 427 | def __init__(self): |
368 | self.dict = {} | 428 | self.dict = {} |
@@ -370,6 +430,8 @@ class DataSmart(MutableMapping): | |||
370 | self.inchistory = IncludeHistory() | 430 | self.inchistory = IncludeHistory() |
371 | self.varhistory = VariableHistory(self) | 431 | self.varhistory = VariableHistory(self) |
372 | self._tracking = False | 432 | self._tracking = False |
433 | self._var_renames = {} | ||
434 | self._var_renames.update(bitbake_renamed_vars) | ||
373 | 435 | ||
374 | self.expand_cache = {} | 436 | self.expand_cache = {} |
375 | 437 | ||
@@ -391,9 +453,9 @@ class DataSmart(MutableMapping): | |||
391 | def expandWithRefs(self, s, varname): | 453 | def expandWithRefs(self, s, varname): |
392 | 454 | ||
393 | if not isinstance(s, str): # sanity check | 455 | if not isinstance(s, str): # sanity check |
394 | return VariableParse(varname, self, s) | 456 | return VariableParse(varname, self, s, s) |
395 | 457 | ||
396 | varparse = VariableParse(varname, self) | 458 | varparse = VariableParse(varname, self, s) |
397 | 459 | ||
398 | while s.find('${') != -1: | 460 | while s.find('${') != -1: |
399 | olds = s | 461 | olds = s |
@@ -403,14 +465,17 @@ class DataSmart(MutableMapping): | |||
403 | s = __expand_python_regexp__.sub(varparse.python_sub, s) | 465 | s = __expand_python_regexp__.sub(varparse.python_sub, s) |
404 | except SyntaxError as e: | 466 | except SyntaxError as e: |
405 | # Likely unmatched brackets, just don't expand the expression | 467 | # Likely unmatched brackets, just don't expand the expression |
406 | if e.msg != "EOL while scanning string literal": | 468 | if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"): |
407 | raise | 469 | raise |
408 | if s == olds: | 470 | if s == olds: |
409 | break | 471 | break |
410 | except ExpansionError: | 472 | except ExpansionError as e: |
473 | e.addVar(varname) | ||
411 | raise | 474 | raise |
412 | except bb.parse.SkipRecipe: | 475 | except bb.parse.SkipRecipe: |
413 | raise | 476 | raise |
477 | except bb.BBHandledException: | ||
478 | raise | ||
414 | except Exception as exc: | 479 | except Exception as exc: |
415 | tb = sys.exc_info()[2] | 480 | tb = sys.exc_info()[2] |
416 | raise ExpansionError(varname, s, exc).with_traceback(tb) from exc | 481 | raise ExpansionError(varname, s, exc).with_traceback(tb) from exc |
@@ -422,24 +487,19 @@ class DataSmart(MutableMapping): | |||
422 | def expand(self, s, varname = None): | 487 | def expand(self, s, varname = None): |
423 | return self.expandWithRefs(s, varname).value | 488 | return self.expandWithRefs(s, varname).value |
424 | 489 | ||
425 | def finalize(self, parent = False): | ||
426 | return | ||
427 | |||
428 | def internal_finalize(self, parent = False): | ||
429 | """Performs final steps upon the datastore, including application of overrides""" | ||
430 | self.overrides = None | ||
431 | |||
432 | def need_overrides(self): | 490 | def need_overrides(self): |
433 | if self.overrides is not None: | 491 | if self.overrides is not None: |
434 | return | 492 | return |
435 | if self.inoverride: | 493 | if self.inoverride: |
436 | return | 494 | return |
495 | overrride_stack = [] | ||
437 | for count in range(5): | 496 | for count in range(5): |
438 | self.inoverride = True | 497 | self.inoverride = True |
439 | # Can end up here recursively so setup dummy values | 498 | # Can end up here recursively so setup dummy values |
440 | self.overrides = [] | 499 | self.overrides = [] |
441 | self.overridesset = set() | 500 | self.overridesset = set() |
442 | self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] | 501 | self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] |
502 | overrride_stack.append(self.overrides) | ||
443 | self.overridesset = set(self.overrides) | 503 | self.overridesset = set(self.overrides) |
444 | self.inoverride = False | 504 | self.inoverride = False |
445 | self.expand_cache = {} | 505 | self.expand_cache = {} |
@@ -449,7 +509,7 @@ class DataSmart(MutableMapping): | |||
449 | self.overrides = newoverrides | 509 | self.overrides = newoverrides |
450 | self.overridesset = set(self.overrides) | 510 | self.overridesset = set(self.overrides) |
451 | else: | 511 | else: |
452 | bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.") | 512 | bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work. The list of failing override expansions: %s" % "\n".join(str(s) for s in overrride_stack)) |
453 | 513 | ||
454 | def initVar(self, var): | 514 | def initVar(self, var): |
455 | self.expand_cache = {} | 515 | self.expand_cache = {} |
@@ -460,27 +520,44 @@ class DataSmart(MutableMapping): | |||
460 | dest = self.dict | 520 | dest = self.dict |
461 | while dest: | 521 | while dest: |
462 | if var in dest: | 522 | if var in dest: |
463 | return dest[var], self.overridedata.get(var, None) | 523 | return dest[var] |
464 | 524 | ||
465 | if "_data" not in dest: | 525 | if "_data" not in dest: |
466 | break | 526 | break |
467 | dest = dest["_data"] | 527 | dest = dest["_data"] |
468 | return None, self.overridedata.get(var, None) | 528 | return None |
469 | 529 | ||
470 | def _makeShadowCopy(self, var): | 530 | def _makeShadowCopy(self, var): |
471 | if var in self.dict: | 531 | if var in self.dict: |
472 | return | 532 | return |
473 | 533 | ||
474 | local_var, _ = self._findVar(var) | 534 | local_var = self._findVar(var) |
475 | 535 | ||
476 | if local_var: | 536 | if local_var: |
477 | self.dict[var] = copy.copy(local_var) | 537 | self.dict[var] = copy.copy(local_var) |
478 | else: | 538 | else: |
479 | self.initVar(var) | 539 | self.initVar(var) |
480 | 540 | ||
541 | def hasOverrides(self, var): | ||
542 | return var in self.overridedata | ||
481 | 543 | ||
482 | def setVar(self, var, value, **loginfo): | 544 | def setVar(self, var, value, **loginfo): |
483 | #print("var=" + str(var) + " val=" + str(value)) | 545 | #print("var=" + str(var) + " val=" + str(value)) |
546 | |||
547 | if not var.startswith("__anon_") and ("_append" in var or "_prepend" in var or "_remove" in var): | ||
548 | info = "%s" % var | ||
549 | if "file" in loginfo: | ||
550 | info += " file: %s" % loginfo["file"] | ||
551 | if "line" in loginfo: | ||
552 | info += " line: %s" % loginfo["line"] | ||
553 | bb.fatal("Variable %s contains an operation using the old override syntax. Please convert this layer/metadata before attempting to use with a newer bitbake." % info) | ||
554 | |||
555 | shortvar = var.split(":", 1)[0] | ||
556 | if shortvar in self._var_renames: | ||
557 | _print_rename_error(shortvar, loginfo, self._var_renames, fullvar=var) | ||
558 | # Mark that we have seen a renamed variable | ||
559 | self.setVar("_FAILPARSINGERRORHANDLED", True) | ||
560 | |||
484 | self.expand_cache = {} | 561 | self.expand_cache = {} |
485 | parsing=False | 562 | parsing=False |
486 | if 'parsing' in loginfo: | 563 | if 'parsing' in loginfo: |
@@ -509,7 +586,7 @@ class DataSmart(MutableMapping): | |||
509 | # pay the cookie monster | 586 | # pay the cookie monster |
510 | 587 | ||
511 | # more cookies for the cookie monster | 588 | # more cookies for the cookie monster |
512 | if '_' in var: | 589 | if ':' in var: |
513 | self._setvar_update_overrides(base, **loginfo) | 590 | self._setvar_update_overrides(base, **loginfo) |
514 | 591 | ||
515 | if base in self.overridevars: | 592 | if base in self.overridevars: |
@@ -520,27 +597,27 @@ class DataSmart(MutableMapping): | |||
520 | self._makeShadowCopy(var) | 597 | self._makeShadowCopy(var) |
521 | 598 | ||
522 | if not parsing: | 599 | if not parsing: |
523 | if "_append" in self.dict[var]: | 600 | if ":append" in self.dict[var]: |
524 | del self.dict[var]["_append"] | 601 | del self.dict[var][":append"] |
525 | if "_prepend" in self.dict[var]: | 602 | if ":prepend" in self.dict[var]: |
526 | del self.dict[var]["_prepend"] | 603 | del self.dict[var][":prepend"] |
527 | if "_remove" in self.dict[var]: | 604 | if ":remove" in self.dict[var]: |
528 | del self.dict[var]["_remove"] | 605 | del self.dict[var][":remove"] |
529 | if var in self.overridedata: | 606 | if var in self.overridedata: |
530 | active = [] | 607 | active = [] |
531 | self.need_overrides() | 608 | self.need_overrides() |
532 | for (r, o) in self.overridedata[var]: | 609 | for (r, o) in self.overridedata[var]: |
533 | if o in self.overridesset: | 610 | if o in self.overridesset: |
534 | active.append(r) | 611 | active.append(r) |
535 | elif "_" in o: | 612 | elif ":" in o: |
536 | if set(o.split("_")).issubset(self.overridesset): | 613 | if set(o.split(":")).issubset(self.overridesset): |
537 | active.append(r) | 614 | active.append(r) |
538 | for a in active: | 615 | for a in active: |
539 | self.delVar(a) | 616 | self.delVar(a) |
540 | del self.overridedata[var] | 617 | del self.overridedata[var] |
541 | 618 | ||
542 | # more cookies for the cookie monster | 619 | # more cookies for the cookie monster |
543 | if '_' in var: | 620 | if ':' in var: |
544 | self._setvar_update_overrides(var, **loginfo) | 621 | self._setvar_update_overrides(var, **loginfo) |
545 | 622 | ||
546 | # setting var | 623 | # setting var |
@@ -562,12 +639,12 @@ class DataSmart(MutableMapping): | |||
562 | nextnew.update(vardata.references) | 639 | nextnew.update(vardata.references) |
563 | nextnew.update(vardata.contains.keys()) | 640 | nextnew.update(vardata.contains.keys()) |
564 | new = nextnew | 641 | new = nextnew |
565 | self.internal_finalize(True) | 642 | self.overrides = None |
566 | 643 | ||
567 | def _setvar_update_overrides(self, var, **loginfo): | 644 | def _setvar_update_overrides(self, var, **loginfo): |
568 | # aka pay the cookie monster | 645 | # aka pay the cookie monster |
569 | override = var[var.rfind('_')+1:] | 646 | override = var[var.rfind(':')+1:] |
570 | shortvar = var[:var.rfind('_')] | 647 | shortvar = var[:var.rfind(':')] |
571 | while override and __override_regexp__.match(override): | 648 | while override and __override_regexp__.match(override): |
572 | if shortvar not in self.overridedata: | 649 | if shortvar not in self.overridedata: |
573 | self.overridedata[shortvar] = [] | 650 | self.overridedata[shortvar] = [] |
@@ -576,9 +653,9 @@ class DataSmart(MutableMapping): | |||
576 | self.overridedata[shortvar] = list(self.overridedata[shortvar]) | 653 | self.overridedata[shortvar] = list(self.overridedata[shortvar]) |
577 | self.overridedata[shortvar].append([var, override]) | 654 | self.overridedata[shortvar].append([var, override]) |
578 | override = None | 655 | override = None |
579 | if "_" in shortvar: | 656 | if ":" in shortvar: |
580 | override = var[shortvar.rfind('_')+1:] | 657 | override = var[shortvar.rfind(':')+1:] |
581 | shortvar = var[:shortvar.rfind('_')] | 658 | shortvar = var[:shortvar.rfind(':')] |
582 | if len(shortvar) == 0: | 659 | if len(shortvar) == 0: |
583 | override = None | 660 | override = None |
584 | 661 | ||
@@ -602,10 +679,11 @@ class DataSmart(MutableMapping): | |||
602 | self.varhistory.record(**loginfo) | 679 | self.varhistory.record(**loginfo) |
603 | self.setVar(newkey, val, ignore=True, parsing=True) | 680 | self.setVar(newkey, val, ignore=True, parsing=True) |
604 | 681 | ||
605 | for i in (__setvar_keyword__): | 682 | srcflags = self.getVarFlags(key, False, True) or {} |
606 | src = self.getVarFlag(key, i, False) | 683 | for i in srcflags: |
607 | if src is None: | 684 | if i not in (__setvar_keyword__): |
608 | continue | 685 | continue |
686 | src = srcflags[i] | ||
609 | 687 | ||
610 | dest = self.getVarFlag(newkey, i, False) or [] | 688 | dest = self.getVarFlag(newkey, i, False) or [] |
611 | dest.extend(src) | 689 | dest.extend(src) |
@@ -617,7 +695,7 @@ class DataSmart(MutableMapping): | |||
617 | self.overridedata[newkey].append([v.replace(key, newkey), o]) | 695 | self.overridedata[newkey].append([v.replace(key, newkey), o]) |
618 | self.renameVar(v, v.replace(key, newkey)) | 696 | self.renameVar(v, v.replace(key, newkey)) |
619 | 697 | ||
620 | if '_' in newkey and val is None: | 698 | if ':' in newkey and val is None: |
621 | self._setvar_update_overrides(newkey, **loginfo) | 699 | self._setvar_update_overrides(newkey, **loginfo) |
622 | 700 | ||
623 | loginfo['variable'] = key | 701 | loginfo['variable'] = key |
@@ -629,12 +707,12 @@ class DataSmart(MutableMapping): | |||
629 | def appendVar(self, var, value, **loginfo): | 707 | def appendVar(self, var, value, **loginfo): |
630 | loginfo['op'] = 'append' | 708 | loginfo['op'] = 'append' |
631 | self.varhistory.record(**loginfo) | 709 | self.varhistory.record(**loginfo) |
632 | self.setVar(var + "_append", value, ignore=True, parsing=True) | 710 | self.setVar(var + ":append", value, ignore=True, parsing=True) |
633 | 711 | ||
634 | def prependVar(self, var, value, **loginfo): | 712 | def prependVar(self, var, value, **loginfo): |
635 | loginfo['op'] = 'prepend' | 713 | loginfo['op'] = 'prepend' |
636 | self.varhistory.record(**loginfo) | 714 | self.varhistory.record(**loginfo) |
637 | self.setVar(var + "_prepend", value, ignore=True, parsing=True) | 715 | self.setVar(var + ":prepend", value, ignore=True, parsing=True) |
638 | 716 | ||
639 | def delVar(self, var, **loginfo): | 717 | def delVar(self, var, **loginfo): |
640 | self.expand_cache = {} | 718 | self.expand_cache = {} |
@@ -645,10 +723,10 @@ class DataSmart(MutableMapping): | |||
645 | self.dict[var] = {} | 723 | self.dict[var] = {} |
646 | if var in self.overridedata: | 724 | if var in self.overridedata: |
647 | del self.overridedata[var] | 725 | del self.overridedata[var] |
648 | if '_' in var: | 726 | if ':' in var: |
649 | override = var[var.rfind('_')+1:] | 727 | override = var[var.rfind(':')+1:] |
650 | shortvar = var[:var.rfind('_')] | 728 | shortvar = var[:var.rfind(':')] |
651 | while override and override.islower(): | 729 | while override and __override_regexp__.match(override): |
652 | try: | 730 | try: |
653 | if shortvar in self.overridedata: | 731 | if shortvar in self.overridedata: |
654 | # Force CoW by recreating the list first | 732 | # Force CoW by recreating the list first |
@@ -657,15 +735,23 @@ class DataSmart(MutableMapping): | |||
657 | except ValueError as e: | 735 | except ValueError as e: |
658 | pass | 736 | pass |
659 | override = None | 737 | override = None |
660 | if "_" in shortvar: | 738 | if ":" in shortvar: |
661 | override = var[shortvar.rfind('_')+1:] | 739 | override = var[shortvar.rfind(':')+1:] |
662 | shortvar = var[:shortvar.rfind('_')] | 740 | shortvar = var[:shortvar.rfind(':')] |
663 | if len(shortvar) == 0: | 741 | if len(shortvar) == 0: |
664 | override = None | 742 | override = None |
665 | 743 | ||
666 | def setVarFlag(self, var, flag, value, **loginfo): | 744 | def setVarFlag(self, var, flag, value, **loginfo): |
667 | self.expand_cache = {} | 745 | self.expand_cache = {} |
668 | 746 | ||
747 | if var == "BB_RENAMED_VARIABLES": | ||
748 | self._var_renames[flag] = value | ||
749 | |||
750 | if var in self._var_renames: | ||
751 | _print_rename_error(var, loginfo, self._var_renames) | ||
752 | # Mark that we have seen a renamed variable | ||
753 | self.setVar("_FAILPARSINGERRORHANDLED", True) | ||
754 | |||
669 | if 'op' not in loginfo: | 755 | if 'op' not in loginfo: |
670 | loginfo['op'] = "set" | 756 | loginfo['op'] = "set" |
671 | loginfo['flag'] = flag | 757 | loginfo['flag'] = flag |
@@ -674,7 +760,7 @@ class DataSmart(MutableMapping): | |||
674 | self._makeShadowCopy(var) | 760 | self._makeShadowCopy(var) |
675 | self.dict[var][flag] = value | 761 | self.dict[var][flag] = value |
676 | 762 | ||
677 | if flag == "_defaultval" and '_' in var: | 763 | if flag == "_defaultval" and ':' in var: |
678 | self._setvar_update_overrides(var, **loginfo) | 764 | self._setvar_update_overrides(var, **loginfo) |
679 | if flag == "_defaultval" and var in self.overridevars: | 765 | if flag == "_defaultval" and var in self.overridevars: |
680 | self._setvar_update_overridevars(var, value) | 766 | self._setvar_update_overridevars(var, value) |
@@ -695,22 +781,27 @@ class DataSmart(MutableMapping): | |||
695 | return None | 781 | return None |
696 | cachename = var + "[" + flag + "]" | 782 | cachename = var + "[" + flag + "]" |
697 | 783 | ||
784 | if not expand and retparser and cachename in self.expand_cache: | ||
785 | return self.expand_cache[cachename].unexpanded_value, self.expand_cache[cachename] | ||
786 | |||
698 | if expand and cachename in self.expand_cache: | 787 | if expand and cachename in self.expand_cache: |
699 | return self.expand_cache[cachename].value | 788 | return self.expand_cache[cachename].value |
700 | 789 | ||
701 | local_var, overridedata = self._findVar(var) | 790 | local_var = self._findVar(var) |
702 | value = None | 791 | value = None |
703 | removes = set() | 792 | removes = set() |
704 | if flag == "_content" and overridedata is not None and not parsing: | 793 | if flag == "_content" and not parsing: |
794 | overridedata = self.overridedata.get(var, None) | ||
795 | if flag == "_content" and not parsing and overridedata is not None: | ||
705 | match = False | 796 | match = False |
706 | active = {} | 797 | active = {} |
707 | self.need_overrides() | 798 | self.need_overrides() |
708 | for (r, o) in overridedata: | 799 | for (r, o) in overridedata: |
709 | # What about double overrides both with "_" in the name? | 800 | # FIXME What about double overrides both with "_" in the name? |
710 | if o in self.overridesset: | 801 | if o in self.overridesset: |
711 | active[o] = r | 802 | active[o] = r |
712 | elif "_" in o: | 803 | elif ":" in o: |
713 | if set(o.split("_")).issubset(self.overridesset): | 804 | if set(o.split(":")).issubset(self.overridesset): |
714 | active[o] = r | 805 | active[o] = r |
715 | 806 | ||
716 | mod = True | 807 | mod = True |
@@ -718,10 +809,10 @@ class DataSmart(MutableMapping): | |||
718 | mod = False | 809 | mod = False |
719 | for o in self.overrides: | 810 | for o in self.overrides: |
720 | for a in active.copy(): | 811 | for a in active.copy(): |
721 | if a.endswith("_" + o): | 812 | if a.endswith(":" + o): |
722 | t = active[a] | 813 | t = active[a] |
723 | del active[a] | 814 | del active[a] |
724 | active[a.replace("_" + o, "")] = t | 815 | active[a.replace(":" + o, "")] = t |
725 | mod = True | 816 | mod = True |
726 | elif a == o: | 817 | elif a == o: |
727 | match = active[a] | 818 | match = active[a] |
@@ -740,31 +831,31 @@ class DataSmart(MutableMapping): | |||
740 | value = copy.copy(local_var["_defaultval"]) | 831 | value = copy.copy(local_var["_defaultval"]) |
741 | 832 | ||
742 | 833 | ||
743 | if flag == "_content" and local_var is not None and "_append" in local_var and not parsing: | 834 | if flag == "_content" and local_var is not None and ":append" in local_var and not parsing: |
744 | if not value: | ||
745 | value = "" | ||
746 | self.need_overrides() | 835 | self.need_overrides() |
747 | for (r, o) in local_var["_append"]: | 836 | for (r, o) in local_var[":append"]: |
748 | match = True | 837 | match = True |
749 | if o: | 838 | if o: |
750 | for o2 in o.split("_"): | 839 | for o2 in o.split(":"): |
751 | if not o2 in self.overrides: | 840 | if not o2 in self.overrides: |
752 | match = False | 841 | match = False |
753 | if match: | 842 | if match: |
843 | if value is None: | ||
844 | value = "" | ||
754 | value = value + r | 845 | value = value + r |
755 | 846 | ||
756 | if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing: | 847 | if flag == "_content" and local_var is not None and ":prepend" in local_var and not parsing: |
757 | if not value: | ||
758 | value = "" | ||
759 | self.need_overrides() | 848 | self.need_overrides() |
760 | for (r, o) in local_var["_prepend"]: | 849 | for (r, o) in local_var[":prepend"]: |
761 | 850 | ||
762 | match = True | 851 | match = True |
763 | if o: | 852 | if o: |
764 | for o2 in o.split("_"): | 853 | for o2 in o.split(":"): |
765 | if not o2 in self.overrides: | 854 | if not o2 in self.overrides: |
766 | match = False | 855 | match = False |
767 | if match: | 856 | if match: |
857 | if value is None: | ||
858 | value = "" | ||
768 | value = r + value | 859 | value = r + value |
769 | 860 | ||
770 | parser = None | 861 | parser = None |
@@ -773,12 +864,12 @@ class DataSmart(MutableMapping): | |||
773 | if expand: | 864 | if expand: |
774 | value = parser.value | 865 | value = parser.value |
775 | 866 | ||
776 | if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing: | 867 | if value and flag == "_content" and local_var is not None and ":remove" in local_var and not parsing: |
777 | self.need_overrides() | 868 | self.need_overrides() |
778 | for (r, o) in local_var["_remove"]: | 869 | for (r, o) in local_var[":remove"]: |
779 | match = True | 870 | match = True |
780 | if o: | 871 | if o: |
781 | for o2 in o.split("_"): | 872 | for o2 in o.split(":"): |
782 | if not o2 in self.overrides: | 873 | if not o2 in self.overrides: |
783 | match = False | 874 | match = False |
784 | if match: | 875 | if match: |
@@ -791,7 +882,7 @@ class DataSmart(MutableMapping): | |||
791 | expanded_removes[r] = self.expand(r).split() | 882 | expanded_removes[r] = self.expand(r).split() |
792 | 883 | ||
793 | parser.removes = set() | 884 | parser.removes = set() |
794 | val = "" | 885 | val = [] |
795 | for v in __whitespace_split__.split(parser.value): | 886 | for v in __whitespace_split__.split(parser.value): |
796 | skip = False | 887 | skip = False |
797 | for r in removes: | 888 | for r in removes: |
@@ -800,8 +891,8 @@ class DataSmart(MutableMapping): | |||
800 | skip = True | 891 | skip = True |
801 | if skip: | 892 | if skip: |
802 | continue | 893 | continue |
803 | val = val + v | 894 | val.append(v) |
804 | parser.value = val | 895 | parser.value = "".join(val) |
805 | if expand: | 896 | if expand: |
806 | value = parser.value | 897 | value = parser.value |
807 | 898 | ||
@@ -816,7 +907,7 @@ class DataSmart(MutableMapping): | |||
816 | def delVarFlag(self, var, flag, **loginfo): | 907 | def delVarFlag(self, var, flag, **loginfo): |
817 | self.expand_cache = {} | 908 | self.expand_cache = {} |
818 | 909 | ||
819 | local_var, _ = self._findVar(var) | 910 | local_var = self._findVar(var) |
820 | if not local_var: | 911 | if not local_var: |
821 | return | 912 | return |
822 | if not var in self.dict: | 913 | if not var in self.dict: |
@@ -859,12 +950,12 @@ class DataSmart(MutableMapping): | |||
859 | self.dict[var][i] = flags[i] | 950 | self.dict[var][i] = flags[i] |
860 | 951 | ||
861 | def getVarFlags(self, var, expand = False, internalflags=False): | 952 | def getVarFlags(self, var, expand = False, internalflags=False): |
862 | local_var, _ = self._findVar(var) | 953 | local_var = self._findVar(var) |
863 | flags = {} | 954 | flags = {} |
864 | 955 | ||
865 | if local_var: | 956 | if local_var: |
866 | for i in local_var: | 957 | for i in local_var: |
867 | if i.startswith("_") and not internalflags: | 958 | if i.startswith(("_", ":")) and not internalflags: |
868 | continue | 959 | continue |
869 | flags[i] = local_var[i] | 960 | flags[i] = local_var[i] |
870 | if expand and i in expand: | 961 | if expand and i in expand: |
@@ -905,6 +996,7 @@ class DataSmart(MutableMapping): | |||
905 | data.inchistory = self.inchistory.copy() | 996 | data.inchistory = self.inchistory.copy() |
906 | 997 | ||
907 | data._tracking = self._tracking | 998 | data._tracking = self._tracking |
999 | data._var_renames = self._var_renames | ||
908 | 1000 | ||
909 | data.overrides = None | 1001 | data.overrides = None |
910 | data.overridevars = copy.copy(self.overridevars) | 1002 | data.overridevars = copy.copy(self.overridevars) |
@@ -927,7 +1019,7 @@ class DataSmart(MutableMapping): | |||
927 | value = self.getVar(variable, False) | 1019 | value = self.getVar(variable, False) |
928 | for key in keys: | 1020 | for key in keys: |
929 | referrervalue = self.getVar(key, False) | 1021 | referrervalue = self.getVar(key, False) |
930 | if referrervalue and ref in referrervalue: | 1022 | if referrervalue and isinstance(referrervalue, str) and ref in referrervalue: |
931 | self.setVar(key, referrervalue.replace(ref, value)) | 1023 | self.setVar(key, referrervalue.replace(ref, value)) |
932 | 1024 | ||
933 | def localkeys(self): | 1025 | def localkeys(self): |
@@ -962,8 +1054,8 @@ class DataSmart(MutableMapping): | |||
962 | for (r, o) in self.overridedata[var]: | 1054 | for (r, o) in self.overridedata[var]: |
963 | if o in self.overridesset: | 1055 | if o in self.overridesset: |
964 | overrides.add(var) | 1056 | overrides.add(var) |
965 | elif "_" in o: | 1057 | elif ":" in o: |
966 | if set(o.split("_")).issubset(self.overridesset): | 1058 | if set(o.split(":")).issubset(self.overridesset): |
967 | overrides.add(var) | 1059 | overrides.add(var) |
968 | 1060 | ||
969 | for k in keylist(self.dict): | 1061 | for k in keylist(self.dict): |
@@ -993,10 +1085,10 @@ class DataSmart(MutableMapping): | |||
993 | d = self.createCopy() | 1085 | d = self.createCopy() |
994 | bb.data.expandKeys(d) | 1086 | bb.data.expandKeys(d) |
995 | 1087 | ||
996 | config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split()) | 1088 | config_ignore_vars = set((d.getVar("BB_HASHCONFIG_IGNORE_VARS") or "").split()) |
997 | keys = set(key for key in iter(d) if not key.startswith("__")) | 1089 | keys = set(key for key in iter(d) if not key.startswith("__")) |
998 | for key in keys: | 1090 | for key in keys: |
999 | if key in config_whitelist: | 1091 | if key in config_ignore_vars: |
1000 | continue | 1092 | continue |
1001 | 1093 | ||
1002 | value = d.getVar(key, False) or "" | 1094 | value = d.getVar(key, False) or "" |
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py index 23e1f3187b..4761c86880 100644 --- a/bitbake/lib/bb/event.py +++ b/bitbake/lib/bb/event.py | |||
@@ -40,7 +40,7 @@ class HeartbeatEvent(Event): | |||
40 | """Triggered at regular time intervals of 10 seconds. Other events can fire much more often | 40 | """Triggered at regular time intervals of 10 seconds. Other events can fire much more often |
41 | (runQueueTaskStarted when there are many short tasks) or not at all for long periods | 41 | (runQueueTaskStarted when there are many short tasks) or not at all for long periods |
42 | of time (again runQueueTaskStarted, when there is just one long-running task), so this | 42 | of time (again runQueueTaskStarted, when there is just one long-running task), so this |
43 | event is more suitable for doing some task-independent work occassionally.""" | 43 | event is more suitable for doing some task-independent work occasionally.""" |
44 | def __init__(self, time): | 44 | def __init__(self, time): |
45 | Event.__init__(self) | 45 | Event.__init__(self) |
46 | self.time = time | 46 | self.time = time |
@@ -68,29 +68,39 @@ _catchall_handlers = {} | |||
68 | _eventfilter = None | 68 | _eventfilter = None |
69 | _uiready = False | 69 | _uiready = False |
70 | _thread_lock = threading.Lock() | 70 | _thread_lock = threading.Lock() |
71 | _thread_lock_enabled = False | 71 | _heartbeat_enabled = False |
72 | 72 | _should_exit = threading.Event() | |
73 | if hasattr(__builtins__, '__setitem__'): | ||
74 | builtins = __builtins__ | ||
75 | else: | ||
76 | builtins = __builtins__.__dict__ | ||
77 | 73 | ||
78 | def enable_threadlock(): | 74 | def enable_threadlock(): |
79 | global _thread_lock_enabled | 75 | # Always needed now |
80 | _thread_lock_enabled = True | 76 | return |
81 | 77 | ||
82 | def disable_threadlock(): | 78 | def disable_threadlock(): |
83 | global _thread_lock_enabled | 79 | # Always needed now |
84 | _thread_lock_enabled = False | 80 | return |
81 | |||
82 | def enable_heartbeat(): | ||
83 | global _heartbeat_enabled | ||
84 | _heartbeat_enabled = True | ||
85 | |||
86 | def disable_heartbeat(): | ||
87 | global _heartbeat_enabled | ||
88 | _heartbeat_enabled = False | ||
89 | |||
90 | # | ||
91 | # In long running code, this function should be called periodically | ||
92 | # to check if we should exit due to an interuption (.e.g Ctrl+C from the UI) | ||
93 | # | ||
94 | def check_for_interrupts(d): | ||
95 | global _should_exit | ||
96 | if _should_exit.is_set(): | ||
97 | bb.warn("Exiting due to interrupt.") | ||
98 | raise bb.BBHandledException() | ||
85 | 99 | ||
86 | def execute_handler(name, handler, event, d): | 100 | def execute_handler(name, handler, event, d): |
87 | event.data = d | 101 | event.data = d |
88 | addedd = False | ||
89 | if 'd' not in builtins: | ||
90 | builtins['d'] = d | ||
91 | addedd = True | ||
92 | try: | 102 | try: |
93 | ret = handler(event) | 103 | ret = handler(event, d) |
94 | except (bb.parse.SkipRecipe, bb.BBHandledException): | 104 | except (bb.parse.SkipRecipe, bb.BBHandledException): |
95 | raise | 105 | raise |
96 | except Exception: | 106 | except Exception: |
@@ -104,8 +114,7 @@ def execute_handler(name, handler, event, d): | |||
104 | raise | 114 | raise |
105 | finally: | 115 | finally: |
106 | del event.data | 116 | del event.data |
107 | if addedd: | 117 | |
108 | del builtins['d'] | ||
109 | 118 | ||
110 | def fire_class_handlers(event, d): | 119 | def fire_class_handlers(event, d): |
111 | if isinstance(event, logging.LogRecord): | 120 | if isinstance(event, logging.LogRecord): |
@@ -118,7 +127,7 @@ def fire_class_handlers(event, d): | |||
118 | if _eventfilter: | 127 | if _eventfilter: |
119 | if not _eventfilter(name, handler, event, d): | 128 | if not _eventfilter(name, handler, event, d): |
120 | continue | 129 | continue |
121 | if d and not name in (d.getVar("__BBHANDLERS_MC") or []): | 130 | if d is not None and not name in (d.getVar("__BBHANDLERS_MC") or set()): |
122 | continue | 131 | continue |
123 | execute_handler(name, handler, event, d) | 132 | execute_handler(name, handler, event, d) |
124 | 133 | ||
@@ -132,8 +141,14 @@ def print_ui_queue(): | |||
132 | if not _uiready: | 141 | if not _uiready: |
133 | from bb.msg import BBLogFormatter | 142 | from bb.msg import BBLogFormatter |
134 | # Flush any existing buffered content | 143 | # Flush any existing buffered content |
135 | sys.stdout.flush() | 144 | try: |
136 | sys.stderr.flush() | 145 | sys.stdout.flush() |
146 | except: | ||
147 | pass | ||
148 | try: | ||
149 | sys.stderr.flush() | ||
150 | except: | ||
151 | pass | ||
137 | stdout = logging.StreamHandler(sys.stdout) | 152 | stdout = logging.StreamHandler(sys.stdout) |
138 | stderr = logging.StreamHandler(sys.stderr) | 153 | stderr = logging.StreamHandler(sys.stderr) |
139 | formatter = BBLogFormatter("%(levelname)s: %(message)s") | 154 | formatter = BBLogFormatter("%(levelname)s: %(message)s") |
@@ -174,36 +189,30 @@ def print_ui_queue(): | |||
174 | 189 | ||
175 | def fire_ui_handlers(event, d): | 190 | def fire_ui_handlers(event, d): |
176 | global _thread_lock | 191 | global _thread_lock |
177 | global _thread_lock_enabled | ||
178 | 192 | ||
179 | if not _uiready: | 193 | if not _uiready: |
180 | # No UI handlers registered yet, queue up the messages | 194 | # No UI handlers registered yet, queue up the messages |
181 | ui_queue.append(event) | 195 | ui_queue.append(event) |
182 | return | 196 | return |
183 | 197 | ||
184 | if _thread_lock_enabled: | 198 | with bb.utils.lock_timeout(_thread_lock): |
185 | _thread_lock.acquire() | 199 | errors = [] |
186 | 200 | for h in _ui_handlers: | |
187 | errors = [] | 201 | #print "Sending event %s" % event |
188 | for h in _ui_handlers: | 202 | try: |
189 | #print "Sending event %s" % event | 203 | if not _ui_logfilters[h].filter(event): |
190 | try: | 204 | continue |
191 | if not _ui_logfilters[h].filter(event): | 205 | # We use pickle here since it better handles object instances |
192 | continue | 206 | # which xmlrpc's marshaller does not. Events *must* be serializable |
193 | # We use pickle here since it better handles object instances | 207 | # by pickle. |
194 | # which xmlrpc's marshaller does not. Events *must* be serializable | 208 | if hasattr(_ui_handlers[h].event, "sendpickle"): |
195 | # by pickle. | 209 | _ui_handlers[h].event.sendpickle((pickle.dumps(event))) |
196 | if hasattr(_ui_handlers[h].event, "sendpickle"): | 210 | else: |
197 | _ui_handlers[h].event.sendpickle((pickle.dumps(event))) | 211 | _ui_handlers[h].event.send(event) |
198 | else: | 212 | except: |
199 | _ui_handlers[h].event.send(event) | 213 | errors.append(h) |
200 | except: | 214 | for h in errors: |
201 | errors.append(h) | 215 | del _ui_handlers[h] |
202 | for h in errors: | ||
203 | del _ui_handlers[h] | ||
204 | |||
205 | if _thread_lock_enabled: | ||
206 | _thread_lock.release() | ||
207 | 216 | ||
208 | def fire(event, d): | 217 | def fire(event, d): |
209 | """Fire off an Event""" | 218 | """Fire off an Event""" |
@@ -232,26 +241,31 @@ noop = lambda _: None | |||
232 | def register(name, handler, mask=None, filename=None, lineno=None, data=None): | 241 | def register(name, handler, mask=None, filename=None, lineno=None, data=None): |
233 | """Register an Event handler""" | 242 | """Register an Event handler""" |
234 | 243 | ||
235 | if data and data.getVar("BB_CURRENT_MC"): | 244 | if data is not None and data.getVar("BB_CURRENT_MC"): |
236 | mc = data.getVar("BB_CURRENT_MC") | 245 | mc = data.getVar("BB_CURRENT_MC") |
237 | name = '%s%s' % (mc.replace('-', '_'), name) | 246 | name = '%s%s' % (mc.replace('-', '_'), name) |
238 | 247 | ||
239 | # already registered | 248 | # already registered |
240 | if name in _handlers: | 249 | if name in _handlers: |
250 | if data is not None: | ||
251 | bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set()) | ||
252 | bbhands_mc.add(name) | ||
253 | data.setVar("__BBHANDLERS_MC", bbhands_mc) | ||
241 | return AlreadyRegistered | 254 | return AlreadyRegistered |
242 | 255 | ||
243 | if handler is not None: | 256 | if handler is not None: |
244 | # handle string containing python code | 257 | # handle string containing python code |
245 | if isinstance(handler, str): | 258 | if isinstance(handler, str): |
246 | tmp = "def %s(e):\n%s" % (name, handler) | 259 | tmp = "def %s(e, d):\n%s" % (name, handler) |
260 | # Inject empty lines to make code match lineno in filename | ||
261 | if lineno is not None: | ||
262 | tmp = "\n" * (lineno-1) + tmp | ||
247 | try: | 263 | try: |
248 | code = bb.methodpool.compile_cache(tmp) | 264 | code = bb.methodpool.compile_cache(tmp) |
249 | if not code: | 265 | if not code: |
250 | if filename is None: | 266 | if filename is None: |
251 | filename = "%s(e)" % name | 267 | filename = "%s(e, d)" % name |
252 | code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) | 268 | code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) |
253 | if lineno is not None: | ||
254 | ast.increment_lineno(code, lineno-1) | ||
255 | code = compile(code, filename, "exec") | 269 | code = compile(code, filename, "exec") |
256 | bb.methodpool.compile_cache_add(tmp, code) | 270 | bb.methodpool.compile_cache_add(tmp, code) |
257 | except SyntaxError: | 271 | except SyntaxError: |
@@ -274,16 +288,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None): | |||
274 | _event_handler_map[m] = {} | 288 | _event_handler_map[m] = {} |
275 | _event_handler_map[m][name] = True | 289 | _event_handler_map[m][name] = True |
276 | 290 | ||
277 | if data: | 291 | if data is not None: |
278 | bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) | 292 | bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set()) |
279 | bbhands_mc.append(name) | 293 | bbhands_mc.add(name) |
280 | data.setVar("__BBHANDLERS_MC", bbhands_mc) | 294 | data.setVar("__BBHANDLERS_MC", bbhands_mc) |
281 | 295 | ||
282 | return Registered | 296 | return Registered |
283 | 297 | ||
284 | def remove(name, handler, data=None): | 298 | def remove(name, handler, data=None): |
285 | """Remove an Event handler""" | 299 | """Remove an Event handler""" |
286 | if data: | 300 | if data is not None: |
287 | if data.getVar("BB_CURRENT_MC"): | 301 | if data.getVar("BB_CURRENT_MC"): |
288 | mc = data.getVar("BB_CURRENT_MC") | 302 | mc = data.getVar("BB_CURRENT_MC") |
289 | name = '%s%s' % (mc.replace('-', '_'), name) | 303 | name = '%s%s' % (mc.replace('-', '_'), name) |
@@ -295,8 +309,8 @@ def remove(name, handler, data=None): | |||
295 | if name in _event_handler_map[event]: | 309 | if name in _event_handler_map[event]: |
296 | _event_handler_map[event].pop(name) | 310 | _event_handler_map[event].pop(name) |
297 | 311 | ||
298 | if data: | 312 | if data is not None: |
299 | bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) | 313 | bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set()) |
300 | if name in bbhands_mc: | 314 | if name in bbhands_mc: |
301 | bbhands_mc.remove(name) | 315 | bbhands_mc.remove(name) |
302 | data.setVar("__BBHANDLERS_MC", bbhands_mc) | 316 | data.setVar("__BBHANDLERS_MC", bbhands_mc) |
@@ -313,21 +327,23 @@ def set_eventfilter(func): | |||
313 | _eventfilter = func | 327 | _eventfilter = func |
314 | 328 | ||
315 | def register_UIHhandler(handler, mainui=False): | 329 | def register_UIHhandler(handler, mainui=False): |
316 | bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 | 330 | with bb.utils.lock_timeout(_thread_lock): |
317 | _ui_handlers[_ui_handler_seq] = handler | 331 | bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 |
318 | level, debug_domains = bb.msg.constructLogOptions() | 332 | _ui_handlers[_ui_handler_seq] = handler |
319 | _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) | 333 | level, debug_domains = bb.msg.constructLogOptions() |
320 | if mainui: | 334 | _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) |
321 | global _uiready | 335 | if mainui: |
322 | _uiready = _ui_handler_seq | 336 | global _uiready |
323 | return _ui_handler_seq | 337 | _uiready = _ui_handler_seq |
338 | return _ui_handler_seq | ||
324 | 339 | ||
325 | def unregister_UIHhandler(handlerNum, mainui=False): | 340 | def unregister_UIHhandler(handlerNum, mainui=False): |
326 | if mainui: | 341 | if mainui: |
327 | global _uiready | 342 | global _uiready |
328 | _uiready = False | 343 | _uiready = False |
329 | if handlerNum in _ui_handlers: | 344 | with bb.utils.lock_timeout(_thread_lock): |
330 | del _ui_handlers[handlerNum] | 345 | if handlerNum in _ui_handlers: |
346 | del _ui_handlers[handlerNum] | ||
331 | return | 347 | return |
332 | 348 | ||
333 | def get_uihandler(): | 349 | def get_uihandler(): |
@@ -482,7 +498,7 @@ class BuildCompleted(BuildBase, OperationCompleted): | |||
482 | BuildBase.__init__(self, n, p, failures) | 498 | BuildBase.__init__(self, n, p, failures) |
483 | 499 | ||
484 | class DiskFull(Event): | 500 | class DiskFull(Event): |
485 | """Disk full case build aborted""" | 501 | """Disk full case build halted""" |
486 | def __init__(self, dev, type, freespace, mountpoint): | 502 | def __init__(self, dev, type, freespace, mountpoint): |
487 | Event.__init__(self) | 503 | Event.__init__(self) |
488 | self._dev = dev | 504 | self._dev = dev |
@@ -666,6 +682,17 @@ class ReachableStamps(Event): | |||
666 | Event.__init__(self) | 682 | Event.__init__(self) |
667 | self.stamps = stamps | 683 | self.stamps = stamps |
668 | 684 | ||
685 | class StaleSetSceneTasks(Event): | ||
686 | """ | ||
687 | An event listing setscene tasks which are 'stale' and will | ||
688 | be rerun. The metadata may use to clean up stale data. | ||
689 | tasks is a mapping of tasks and matching stale stamps. | ||
690 | """ | ||
691 | |||
692 | def __init__(self, tasks): | ||
693 | Event.__init__(self) | ||
694 | self.tasks = tasks | ||
695 | |||
669 | class FilesMatchingFound(Event): | 696 | class FilesMatchingFound(Event): |
670 | """ | 697 | """ |
671 | Event when a list of files matching the supplied pattern has | 698 | Event when a list of files matching the supplied pattern has |
@@ -749,7 +776,7 @@ class LogHandler(logging.Handler): | |||
749 | class MetadataEvent(Event): | 776 | class MetadataEvent(Event): |
750 | """ | 777 | """ |
751 | Generic event that target for OE-Core classes | 778 | Generic event that target for OE-Core classes |
752 | to report information during asynchrous execution | 779 | to report information during asynchronous execution |
753 | """ | 780 | """ |
754 | def __init__(self, eventtype, eventdata): | 781 | def __init__(self, eventtype, eventdata): |
755 | Event.__init__(self) | 782 | Event.__init__(self) |
@@ -830,3 +857,19 @@ class FindSigInfoResult(Event): | |||
830 | def __init__(self, result): | 857 | def __init__(self, result): |
831 | Event.__init__(self) | 858 | Event.__init__(self) |
832 | self.result = result | 859 | self.result = result |
860 | |||
861 | class GetTaskSignatureResult(Event): | ||
862 | """ | ||
863 | Event to return results from GetTaskSignatures command | ||
864 | """ | ||
865 | def __init__(self, sig): | ||
866 | Event.__init__(self) | ||
867 | self.sig = sig | ||
868 | |||
869 | class ParseError(Event): | ||
870 | """ | ||
871 | Event to indicate parse failed | ||
872 | """ | ||
873 | def __init__(self, msg): | ||
874 | super().__init__() | ||
875 | self._msg = msg | ||
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py index ecbad59970..801db9c82f 100644 --- a/bitbake/lib/bb/exceptions.py +++ b/bitbake/lib/bb/exceptions.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/bitbake/lib/bb/fetch2/README b/bitbake/lib/bb/fetch2/README new file mode 100644 index 0000000000..67b787ef47 --- /dev/null +++ b/bitbake/lib/bb/fetch2/README | |||
@@ -0,0 +1,57 @@ | |||
1 | There are expectations of users of the fetcher code. This file attempts to document | ||
2 | some of the constraints that are present. Some are obvious, some are less so. It is | ||
3 | documented in the context of how OE uses it but the API calls are generic. | ||
4 | |||
5 | a) network access for sources is only expected to happen in the do_fetch task. | ||
6 | This is not enforced or tested but is required so that we can: | ||
7 | |||
8 | i) audit the sources used (i.e. for license/manifest reasons) | ||
9 | ii) support offline builds with a suitable cache | ||
10 | iii) allow work to continue even with downtime upstream | ||
11 | iv) allow for changes upstream in incompatible ways | ||
12 | v) allow rebuilding of the software in X years time | ||
13 | |||
14 | b) network access is not expected in do_unpack task. | ||
15 | |||
16 | c) you can take DL_DIR and use it as a mirror for offline builds. | ||
17 | |||
18 | d) access to the network is only made when explicitly configured in recipes | ||
19 | (e.g. use of AUTOREV, or use of git tags which change revision). | ||
20 | |||
21 | e) fetcher output is deterministic (i.e. if you fetch configuration XXX now it | ||
22 | will match in future exactly in a clean build with a new DL_DIR). | ||
23 | One specific pain point example are git tags. They can be replaced and change | ||
24 | so the git fetcher has to resolve them with the network. We use git revisions | ||
25 | where possible to avoid this and ensure determinism. | ||
26 | |||
27 | f) network access is expected to work with the standard linux proxy variables | ||
28 | so that access behind firewalls works (the fetcher sets these in the | ||
29 | environment but only in the do_fetch tasks). | ||
30 | |||
31 | g) access during parsing has to be minimal, a "git ls-remote" for an AUTOREV | ||
32 | git recipe might be ok but you can't expect to checkout a git tree. | ||
33 | |||
34 | h) we need to provide revision information during parsing such that a version | ||
35 | for the recipe can be constructed. | ||
36 | |||
37 | i) versions are expected to be able to increase in a way which sorts allowing | ||
38 | package feeds to operate (see PR server required for git revisions to sort). | ||
39 | |||
40 | j) API to query for possible version upgrades of a url is highly desireable to | ||
41 | allow our automated upgrage code to function (it is implied this does always | ||
42 | have network access). | ||
43 | |||
44 | k) Where fixes or changes to behaviour in the fetcher are made, we ask that | ||
45 | test cases are added (run with "bitbake-selftest bb.tests.fetch"). We do | ||
46 | have fairly extensive test coverage of the fetcher as it is the only way | ||
47 | to track all of its corner cases, it still doesn't give entire coverage | ||
48 | though sadly. | ||
49 | |||
50 | l) If using tools during parse time, they will have to be in ASSUME_PROVIDED | ||
51 | in OE's context as we can't build git-native, then parse a recipe and use | ||
52 | git ls-remote. | ||
53 | |||
54 | Not all fetchers support all features, autorev is optional and doesn't make | ||
55 | sense for some. Upgrade detection means different things in different contexts | ||
56 | too. | ||
57 | |||
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 19169d780f..5bf2c4b8cf 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -113,7 +113,7 @@ class MissingParameterError(BBFetchException): | |||
113 | self.args = (missing, url) | 113 | self.args = (missing, url) |
114 | 114 | ||
115 | class ParameterError(BBFetchException): | 115 | class ParameterError(BBFetchException): |
116 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | 116 | """Exception raised when a url cannot be processed due to invalid parameters.""" |
117 | def __init__(self, message, url): | 117 | def __init__(self, message, url): |
118 | msg = "URL: '%s' has invalid parameters. %s" % (url, message) | 118 | msg = "URL: '%s' has invalid parameters. %s" % (url, message) |
119 | self.url = url | 119 | self.url = url |
@@ -182,7 +182,7 @@ class URI(object): | |||
182 | Some notes about relative URIs: while it's specified that | 182 | Some notes about relative URIs: while it's specified that |
183 | a URI beginning with <scheme>:// should either be directly | 183 | a URI beginning with <scheme>:// should either be directly |
184 | followed by a hostname or a /, the old URI handling of the | 184 | followed by a hostname or a /, the old URI handling of the |
185 | fetch2 library did not comform to this. Therefore, this URI | 185 | fetch2 library did not conform to this. Therefore, this URI |
186 | class has some kludges to make sure that URIs are parsed in | 186 | class has some kludges to make sure that URIs are parsed in |
187 | a way comforming to bitbake's current usage. This URI class | 187 | a way comforming to bitbake's current usage. This URI class |
188 | supports the following: | 188 | supports the following: |
@@ -199,7 +199,7 @@ class URI(object): | |||
199 | file://hostname/absolute/path.diff (would be IETF compliant) | 199 | file://hostname/absolute/path.diff (would be IETF compliant) |
200 | 200 | ||
201 | Note that the last case only applies to a list of | 201 | Note that the last case only applies to a list of |
202 | "whitelisted" schemes (currently only file://), that requires | 202 | explicitly allowed schemes (currently only file://), that requires |
203 | its URIs to not have a network location. | 203 | its URIs to not have a network location. |
204 | """ | 204 | """ |
205 | 205 | ||
@@ -290,12 +290,12 @@ class URI(object): | |||
290 | 290 | ||
291 | def _param_str_split(self, string, elmdelim, kvdelim="="): | 291 | def _param_str_split(self, string, elmdelim, kvdelim="="): |
292 | ret = collections.OrderedDict() | 292 | ret = collections.OrderedDict() |
293 | for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: | 293 | for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]: |
294 | ret[k] = v | 294 | ret[k] = v |
295 | return ret | 295 | return ret |
296 | 296 | ||
297 | def _param_str_join(self, dict_, elmdelim, kvdelim="="): | 297 | def _param_str_join(self, dict_, elmdelim, kvdelim="="): |
298 | return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) | 298 | return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()]) |
299 | 299 | ||
300 | @property | 300 | @property |
301 | def hostport(self): | 301 | def hostport(self): |
@@ -388,7 +388,7 @@ def decodeurl(url): | |||
388 | if s: | 388 | if s: |
389 | if not '=' in s: | 389 | if not '=' in s: |
390 | raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) | 390 | raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) |
391 | s1, s2 = s.split('=') | 391 | s1, s2 = s.split('=', 1) |
392 | p[s1] = s2 | 392 | p[s1] = s2 |
393 | 393 | ||
394 | return type, host, urllib.parse.unquote(path), user, pswd, p | 394 | return type, host, urllib.parse.unquote(path), user, pswd, p |
@@ -402,24 +402,24 @@ def encodeurl(decoded): | |||
402 | 402 | ||
403 | if not type: | 403 | if not type: |
404 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) | 404 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) |
405 | url = '%s://' % type | 405 | url = ['%s://' % type] |
406 | if user and type != "file": | 406 | if user and type != "file": |
407 | url += "%s" % user | 407 | url.append("%s" % user) |
408 | if pswd: | 408 | if pswd: |
409 | url += ":%s" % pswd | 409 | url.append(":%s" % pswd) |
410 | url += "@" | 410 | url.append("@") |
411 | if host and type != "file": | 411 | if host and type != "file": |
412 | url += "%s" % host | 412 | url.append("%s" % host) |
413 | if path: | 413 | if path: |
414 | # Standardise path to ensure comparisons work | 414 | # Standardise path to ensure comparisons work |
415 | while '//' in path: | 415 | while '//' in path: |
416 | path = path.replace("//", "/") | 416 | path = path.replace("//", "/") |
417 | url += "%s" % urllib.parse.quote(path) | 417 | url.append("%s" % urllib.parse.quote(path)) |
418 | if p: | 418 | if p: |
419 | for parm in p: | 419 | for parm in p: |
420 | url += ";%s=%s" % (parm, p[parm]) | 420 | url.append(";%s=%s" % (parm, p[parm])) |
421 | 421 | ||
422 | return url | 422 | return "".join(url) |
423 | 423 | ||
424 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | 424 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): |
425 | if not ud.url or not uri_find or not uri_replace: | 425 | if not ud.url or not uri_find or not uri_replace: |
@@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
430 | uri_replace_decoded = list(decodeurl(uri_replace)) | 430 | uri_replace_decoded = list(decodeurl(uri_replace)) |
431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) | 431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
432 | result_decoded = ['', '', '', '', '', {}] | 432 | result_decoded = ['', '', '', '', '', {}] |
433 | # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params | ||
433 | for loc, i in enumerate(uri_find_decoded): | 434 | for loc, i in enumerate(uri_find_decoded): |
434 | result_decoded[loc] = uri_decoded[loc] | 435 | result_decoded[loc] = uri_decoded[loc] |
435 | regexp = i | 436 | regexp = i |
@@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
449 | for l in replacements: | 450 | for l in replacements: |
450 | uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) | 451 | uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) |
451 | result_decoded[loc][k] = uri_replace_decoded[loc][k] | 452 | result_decoded[loc][k] = uri_replace_decoded[loc][k] |
453 | elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]: | ||
454 | # User/password in the replacement is just a straight replacement | ||
455 | result_decoded[loc] = uri_replace_decoded[loc] | ||
452 | elif (re.match(regexp, uri_decoded[loc])): | 456 | elif (re.match(regexp, uri_decoded[loc])): |
453 | if not uri_replace_decoded[loc]: | 457 | if not uri_replace_decoded[loc]: |
454 | result_decoded[loc] = "" | 458 | result_decoded[loc] = "" |
@@ -465,10 +469,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
465 | basename = os.path.basename(mirrortarball) | 469 | basename = os.path.basename(mirrortarball) |
466 | # Kill parameters, they make no sense for mirror tarballs | 470 | # Kill parameters, they make no sense for mirror tarballs |
467 | uri_decoded[5] = {} | 471 | uri_decoded[5] = {} |
472 | uri_find_decoded[5] = {} | ||
468 | elif ud.localpath and ud.method.supports_checksum(ud): | 473 | elif ud.localpath and ud.method.supports_checksum(ud): |
469 | basename = os.path.basename(ud.localpath) | 474 | basename = os.path.basename(ud.localpath) |
470 | if basename and not result_decoded[loc].endswith(basename): | 475 | if basename: |
471 | result_decoded[loc] = os.path.join(result_decoded[loc], basename) | 476 | uri_basename = os.path.basename(uri_decoded[loc]) |
477 | # Prefix with a slash as a sentinel in case | ||
478 | # result_decoded[loc] does not contain one. | ||
479 | path = "/" + result_decoded[loc] | ||
480 | if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename): | ||
481 | result_decoded[loc] = path[1:-len(uri_basename)] + basename | ||
482 | elif not path.endswith("/" + basename): | ||
483 | result_decoded[loc] = os.path.join(path[1:], basename) | ||
472 | else: | 484 | else: |
473 | return None | 485 | return None |
474 | result = encodeurl(result_decoded) | 486 | result = encodeurl(result_decoded) |
@@ -506,7 +518,7 @@ def fetcher_init(d): | |||
506 | else: | 518 | else: |
507 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
508 | 520 | ||
509 | _checksum_cache.init_cache(d) | 521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) |
510 | 522 | ||
511 | for m in methods: | 523 | for m in methods: |
512 | if hasattr(m, "init"): | 524 | if hasattr(m, "init"): |
@@ -534,7 +546,7 @@ def mirror_from_string(data): | |||
534 | bb.warn('Invalid mirror data %s, should have paired members.' % data) | 546 | bb.warn('Invalid mirror data %s, should have paired members.' % data) |
535 | return list(zip(*[iter(mirrors)]*2)) | 547 | return list(zip(*[iter(mirrors)]*2)) |
536 | 548 | ||
537 | def verify_checksum(ud, d, precomputed={}): | 549 | def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True): |
538 | """ | 550 | """ |
539 | verify the MD5 and SHA256 checksum for downloaded src | 551 | verify the MD5 and SHA256 checksum for downloaded src |
540 | 552 | ||
@@ -548,20 +560,25 @@ def verify_checksum(ud, d, precomputed={}): | |||
548 | file against those in the recipe each time, rather than only after | 560 | file against those in the recipe each time, rather than only after |
549 | downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. | 561 | downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. |
550 | """ | 562 | """ |
551 | |||
552 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): | 563 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): |
553 | return {} | 564 | return {} |
554 | 565 | ||
566 | if localpath is None: | ||
567 | localpath = ud.localpath | ||
568 | |||
555 | def compute_checksum_info(checksum_id): | 569 | def compute_checksum_info(checksum_id): |
556 | checksum_name = getattr(ud, "%s_name" % checksum_id) | 570 | checksum_name = getattr(ud, "%s_name" % checksum_id) |
557 | 571 | ||
558 | if checksum_id in precomputed: | 572 | if checksum_id in precomputed: |
559 | checksum_data = precomputed[checksum_id] | 573 | checksum_data = precomputed[checksum_id] |
560 | else: | 574 | else: |
561 | checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) | 575 | checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath) |
562 | 576 | ||
563 | checksum_expected = getattr(ud, "%s_expected" % checksum_id) | 577 | checksum_expected = getattr(ud, "%s_expected" % checksum_id) |
564 | 578 | ||
579 | if checksum_expected == '': | ||
580 | checksum_expected = None | ||
581 | |||
565 | return { | 582 | return { |
566 | "id": checksum_id, | 583 | "id": checksum_id, |
567 | "name": checksum_name, | 584 | "name": checksum_name, |
@@ -581,17 +598,13 @@ def verify_checksum(ud, d, precomputed={}): | |||
581 | checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] | 598 | checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] |
582 | 599 | ||
583 | # If no checksum has been provided | 600 | # If no checksum has been provided |
584 | if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): | 601 | if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): |
585 | messages = [] | 602 | messages = [] |
586 | strict = d.getVar("BB_STRICT_CHECKSUM") or "0" | 603 | strict = d.getVar("BB_STRICT_CHECKSUM") or "0" |
587 | 604 | ||
588 | # If strict checking enabled and neither sum defined, raise error | 605 | # If strict checking enabled and neither sum defined, raise error |
589 | if strict == "1": | 606 | if strict == "1": |
590 | messages.append("No checksum specified for '%s', please add at " \ | 607 | raise NoChecksumError("\n".join(checksum_lines)) |
591 | "least one to the recipe:" % ud.localpath) | ||
592 | messages.extend(checksum_lines) | ||
593 | logger.error("\n".join(messages)) | ||
594 | raise NoChecksumError("Missing SRC_URI checksum", ud.url) | ||
595 | 608 | ||
596 | bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) | 609 | bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) |
597 | 610 | ||
@@ -612,8 +625,8 @@ def verify_checksum(ud, d, precomputed={}): | |||
612 | 625 | ||
613 | for ci in checksum_infos: | 626 | for ci in checksum_infos: |
614 | if ci["expected"] and ci["expected"] != ci["data"]: | 627 | if ci["expected"] and ci["expected"] != ci["data"]: |
615 | messages.append("File: '%s' has %s checksum %s when %s was " \ | 628 | messages.append("File: '%s' has %s checksum '%s' when '%s' was " \ |
616 | "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) | 629 | "expected" % (localpath, ci["id"], ci["data"], ci["expected"])) |
617 | bad_checksum = ci["data"] | 630 | bad_checksum = ci["data"] |
618 | 631 | ||
619 | if bad_checksum: | 632 | if bad_checksum: |
@@ -731,13 +744,16 @@ def subprocess_setup(): | |||
731 | # SIGPIPE errors are known issues with gzip/bash | 744 | # SIGPIPE errors are known issues with gzip/bash |
732 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) | 745 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) |
733 | 746 | ||
734 | def get_autorev(d): | 747 | def mark_recipe_nocache(d): |
735 | # only not cache src rev in autorev case | ||
736 | if d.getVar('BB_SRCREV_POLICY') != "cache": | 748 | if d.getVar('BB_SRCREV_POLICY') != "cache": |
737 | d.setVar('BB_DONT_CACHE', '1') | 749 | d.setVar('BB_DONT_CACHE', '1') |
750 | |||
751 | def get_autorev(d): | ||
752 | mark_recipe_nocache(d) | ||
753 | d.setVar("__BBAUTOREV_SEEN", True) | ||
738 | return "AUTOINC" | 754 | return "AUTOINC" |
739 | 755 | ||
740 | def get_srcrev(d, method_name='sortable_revision'): | 756 | def _get_srcrev(d, method_name='sortable_revision'): |
741 | """ | 757 | """ |
742 | Return the revision string, usually for use in the version string (PV) of the current package | 758 | Return the revision string, usually for use in the version string (PV) of the current package |
743 | Most packages usually only have one SCM so we just pass on the call. | 759 | Most packages usually only have one SCM so we just pass on the call. |
@@ -751,23 +767,34 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
751 | that fetcher provides a method with the given name and the same signature as sortable_revision. | 767 | that fetcher provides a method with the given name and the same signature as sortable_revision. |
752 | """ | 768 | """ |
753 | 769 | ||
770 | d.setVar("__BBSRCREV_SEEN", "1") | ||
771 | recursion = d.getVar("__BBINSRCREV") | ||
772 | if recursion: | ||
773 | raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI") | ||
774 | d.setVar("__BBINSRCREV", True) | ||
775 | |||
754 | scms = [] | 776 | scms = [] |
777 | revs = [] | ||
755 | fetcher = Fetch(d.getVar('SRC_URI').split(), d) | 778 | fetcher = Fetch(d.getVar('SRC_URI').split(), d) |
756 | urldata = fetcher.ud | 779 | urldata = fetcher.ud |
757 | for u in urldata: | 780 | for u in urldata: |
758 | if urldata[u].method.supports_srcrev(): | 781 | if urldata[u].method.supports_srcrev(): |
759 | scms.append(u) | 782 | scms.append(u) |
760 | 783 | ||
761 | if len(scms) == 0: | 784 | if not scms: |
762 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") | 785 | d.delVar("__BBINSRCREV") |
786 | return "", revs | ||
787 | |||
763 | 788 | ||
764 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: | 789 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: |
765 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) | 790 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) |
791 | revs.append(rev) | ||
766 | if len(rev) > 10: | 792 | if len(rev) > 10: |
767 | rev = rev[:10] | 793 | rev = rev[:10] |
794 | d.delVar("__BBINSRCREV") | ||
768 | if autoinc: | 795 | if autoinc: |
769 | return "AUTOINC+" + rev | 796 | return "AUTOINC+" + rev, revs |
770 | return rev | 797 | return rev, revs |
771 | 798 | ||
772 | # | 799 | # |
773 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 800 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
@@ -783,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
783 | ud = urldata[scm] | 810 | ud = urldata[scm] |
784 | for name in ud.names: | 811 | for name in ud.names: |
785 | autoinc, rev = getattr(ud.method, method_name)(ud, d, name) | 812 | autoinc, rev = getattr(ud.method, method_name)(ud, d, name) |
813 | revs.append(rev) | ||
786 | seenautoinc = seenautoinc or autoinc | 814 | seenautoinc = seenautoinc or autoinc |
787 | if len(rev) > 10: | 815 | if len(rev) > 10: |
788 | rev = rev[:10] | 816 | rev = rev[:10] |
@@ -799,12 +827,70 @@ def get_srcrev(d, method_name='sortable_revision'): | |||
799 | if seenautoinc: | 827 | if seenautoinc: |
800 | format = "AUTOINC+" + format | 828 | format = "AUTOINC+" + format |
801 | 829 | ||
802 | return format | 830 | d.delVar("__BBINSRCREV") |
831 | return format, revs | ||
832 | |||
833 | def get_hashvalue(d, method_name='sortable_revision'): | ||
834 | pkgv, revs = _get_srcrev(d, method_name=method_name) | ||
835 | return " ".join(revs) | ||
836 | |||
837 | def get_pkgv_string(d, method_name='sortable_revision'): | ||
838 | pkgv, revs = _get_srcrev(d, method_name=method_name) | ||
839 | return pkgv | ||
840 | |||
841 | def get_srcrev(d, method_name='sortable_revision'): | ||
842 | pkgv, revs = _get_srcrev(d, method_name=method_name) | ||
843 | if not pkgv: | ||
844 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") | ||
845 | return pkgv | ||
803 | 846 | ||
804 | def localpath(url, d): | 847 | def localpath(url, d): |
805 | fetcher = bb.fetch2.Fetch([url], d) | 848 | fetcher = bb.fetch2.Fetch([url], d) |
806 | return fetcher.localpath(url) | 849 | return fetcher.localpath(url) |
807 | 850 | ||
851 | # Need to export PATH as binary could be in metadata paths | ||
852 | # rather than host provided | ||
853 | # Also include some other variables. | ||
854 | FETCH_EXPORT_VARS = ['HOME', 'PATH', | ||
855 | 'HTTP_PROXY', 'http_proxy', | ||
856 | 'HTTPS_PROXY', 'https_proxy', | ||
857 | 'FTP_PROXY', 'ftp_proxy', | ||
858 | 'FTPS_PROXY', 'ftps_proxy', | ||
859 | 'NO_PROXY', 'no_proxy', | ||
860 | 'ALL_PROXY', 'all_proxy', | ||
861 | 'GIT_PROXY_COMMAND', | ||
862 | 'GIT_SSH', | ||
863 | 'GIT_SSH_COMMAND', | ||
864 | 'GIT_SSL_CAINFO', | ||
865 | 'GIT_SMART_HTTP', | ||
866 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', | ||
867 | 'SOCKS5_USER', 'SOCKS5_PASSWD', | ||
868 | 'DBUS_SESSION_BUS_ADDRESS', | ||
869 | 'P4CONFIG', | ||
870 | 'SSL_CERT_FILE', | ||
871 | 'NODE_EXTRA_CA_CERTS', | ||
872 | 'AWS_PROFILE', | ||
873 | 'AWS_ACCESS_KEY_ID', | ||
874 | 'AWS_SECRET_ACCESS_KEY', | ||
875 | 'AWS_ROLE_ARN', | ||
876 | 'AWS_WEB_IDENTITY_TOKEN_FILE', | ||
877 | 'AWS_DEFAULT_REGION', | ||
878 | 'AWS_SESSION_TOKEN', | ||
879 | 'GIT_CACHE_PATH', | ||
880 | 'REMOTE_CONTAINERS_IPC', | ||
881 | 'SSL_CERT_DIR'] | ||
882 | |||
883 | def get_fetcher_environment(d): | ||
884 | newenv = {} | ||
885 | origenv = d.getVar("BB_ORIGENV") | ||
886 | for name in bb.fetch2.FETCH_EXPORT_VARS: | ||
887 | value = d.getVar(name) | ||
888 | if not value and origenv: | ||
889 | value = origenv.getVar(name) | ||
890 | if value: | ||
891 | newenv[name] = value | ||
892 | return newenv | ||
893 | |||
808 | def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | 894 | def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): |
809 | """ | 895 | """ |
810 | Run cmd returning the command output | 896 | Run cmd returning the command output |
@@ -813,25 +899,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
813 | Optionally remove the files/directories listed in cleanup upon failure | 899 | Optionally remove the files/directories listed in cleanup upon failure |
814 | """ | 900 | """ |
815 | 901 | ||
816 | # Need to export PATH as binary could be in metadata paths | 902 | exportvars = FETCH_EXPORT_VARS |
817 | # rather than host provided | ||
818 | # Also include some other variables. | ||
819 | # FIXME: Should really include all export varaiables? | ||
820 | exportvars = ['HOME', 'PATH', | ||
821 | 'HTTP_PROXY', 'http_proxy', | ||
822 | 'HTTPS_PROXY', 'https_proxy', | ||
823 | 'FTP_PROXY', 'ftp_proxy', | ||
824 | 'FTPS_PROXY', 'ftps_proxy', | ||
825 | 'NO_PROXY', 'no_proxy', | ||
826 | 'ALL_PROXY', 'all_proxy', | ||
827 | 'GIT_PROXY_COMMAND', | ||
828 | 'GIT_SSH', | ||
829 | 'GIT_SSL_CAINFO', | ||
830 | 'GIT_SMART_HTTP', | ||
831 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', | ||
832 | 'SOCKS5_USER', 'SOCKS5_PASSWD', | ||
833 | 'DBUS_SESSION_BUS_ADDRESS', | ||
834 | 'P4CONFIG'] | ||
835 | 903 | ||
836 | if not cleanup: | 904 | if not cleanup: |
837 | cleanup = [] | 905 | cleanup = [] |
@@ -868,14 +936,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
868 | (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) | 936 | (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) |
869 | success = True | 937 | success = True |
870 | except bb.process.NotFoundError as e: | 938 | except bb.process.NotFoundError as e: |
871 | error_message = "Fetch command %s" % (e.command) | 939 | error_message = "Fetch command %s not found" % (e.command) |
872 | except bb.process.ExecutionError as e: | 940 | except bb.process.ExecutionError as e: |
873 | if e.stdout: | 941 | if e.stdout: |
874 | output = "output:\n%s\n%s" % (e.stdout, e.stderr) | 942 | output = "output:\n%s\n%s" % (e.stdout, e.stderr) |
875 | elif e.stderr: | 943 | elif e.stderr: |
876 | output = "output:\n%s" % e.stderr | 944 | output = "output:\n%s" % e.stderr |
877 | else: | 945 | else: |
878 | output = "no output" | 946 | if log: |
947 | output = "see logfile for output" | ||
948 | else: | ||
949 | output = "no output" | ||
879 | error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) | 950 | error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) |
880 | except bb.process.CmdError as e: | 951 | except bb.process.CmdError as e: |
881 | error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) | 952 | error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) |
@@ -937,6 +1008,7 @@ def build_mirroruris(origud, mirrors, ld): | |||
937 | 1008 | ||
938 | try: | 1009 | try: |
939 | newud = FetchData(newuri, ld) | 1010 | newud = FetchData(newuri, ld) |
1011 | newud.ignore_checksums = True | ||
940 | newud.setup_localpath(ld) | 1012 | newud.setup_localpath(ld) |
941 | except bb.fetch2.BBFetchException as e: | 1013 | except bb.fetch2.BBFetchException as e: |
942 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) | 1014 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) |
@@ -1046,7 +1118,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1046 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) | 1118 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) |
1047 | logger.debug(str(e)) | 1119 | logger.debug(str(e)) |
1048 | try: | 1120 | try: |
1049 | ud.method.clean(ud, ld) | 1121 | if ud.method.cleanup_upon_failure(): |
1122 | ud.method.clean(ud, ld) | ||
1050 | except UnboundLocalError: | 1123 | except UnboundLocalError: |
1051 | pass | 1124 | pass |
1052 | return False | 1125 | return False |
@@ -1057,6 +1130,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1057 | 1130 | ||
1058 | def ensure_symlink(target, link_name): | 1131 | def ensure_symlink(target, link_name): |
1059 | if not os.path.exists(link_name): | 1132 | if not os.path.exists(link_name): |
1133 | dirname = os.path.dirname(link_name) | ||
1134 | bb.utils.mkdirhier(dirname) | ||
1060 | if os.path.islink(link_name): | 1135 | if os.path.islink(link_name): |
1061 | # Broken symbolic link | 1136 | # Broken symbolic link |
1062 | os.unlink(link_name) | 1137 | os.unlink(link_name) |
@@ -1140,11 +1215,11 @@ def srcrev_internal_helper(ud, d, name): | |||
1140 | pn = d.getVar("PN") | 1215 | pn = d.getVar("PN") |
1141 | attempts = [] | 1216 | attempts = [] |
1142 | if name != '' and pn: | 1217 | if name != '' and pn: |
1143 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) | 1218 | attempts.append("SRCREV_%s:pn-%s" % (name, pn)) |
1144 | if name != '': | 1219 | if name != '': |
1145 | attempts.append("SRCREV_%s" % name) | 1220 | attempts.append("SRCREV_%s" % name) |
1146 | if pn: | 1221 | if pn: |
1147 | attempts.append("SRCREV_pn-%s" % pn) | 1222 | attempts.append("SRCREV:pn-%s" % pn) |
1148 | attempts.append("SRCREV") | 1223 | attempts.append("SRCREV") |
1149 | 1224 | ||
1150 | for a in attempts: | 1225 | for a in attempts: |
@@ -1169,6 +1244,7 @@ def srcrev_internal_helper(ud, d, name): | |||
1169 | if srcrev == "INVALID" or not srcrev: | 1244 | if srcrev == "INVALID" or not srcrev: |
1170 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) | 1245 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) |
1171 | if srcrev == "AUTOINC": | 1246 | if srcrev == "AUTOINC": |
1247 | d.setVar("__BBAUTOREV_ACTED_UPON", True) | ||
1172 | srcrev = ud.method.latest_revision(ud, d, name) | 1248 | srcrev = ud.method.latest_revision(ud, d, name) |
1173 | 1249 | ||
1174 | return srcrev | 1250 | return srcrev |
@@ -1180,23 +1256,21 @@ def get_checksum_file_list(d): | |||
1180 | SRC_URI as a space-separated string | 1256 | SRC_URI as a space-separated string |
1181 | """ | 1257 | """ |
1182 | fetch = Fetch([], d, cache = False, localonly = True) | 1258 | fetch = Fetch([], d, cache = False, localonly = True) |
1183 | |||
1184 | dl_dir = d.getVar('DL_DIR') | ||
1185 | filelist = [] | 1259 | filelist = [] |
1186 | for u in fetch.urls: | 1260 | for u in fetch.urls: |
1187 | ud = fetch.ud[u] | 1261 | ud = fetch.ud[u] |
1188 | |||
1189 | if ud and isinstance(ud.method, local.Local): | 1262 | if ud and isinstance(ud.method, local.Local): |
1190 | paths = ud.method.localpaths(ud, d) | 1263 | found = False |
1264 | paths = ud.method.localfile_searchpaths(ud, d) | ||
1191 | for f in paths: | 1265 | for f in paths: |
1192 | pth = ud.decodedurl | 1266 | pth = ud.decodedurl |
1193 | if f.startswith(dl_dir): | 1267 | if os.path.exists(f): |
1194 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else | 1268 | found = True |
1195 | if os.path.exists(f): | ||
1196 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f))) | ||
1197 | else: | ||
1198 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f))) | ||
1199 | filelist.append(f + ":" + str(os.path.exists(f))) | 1269 | filelist.append(f + ":" + str(os.path.exists(f))) |
1270 | if not found: | ||
1271 | bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found" | ||
1272 | "\nThe following paths were searched:" | ||
1273 | "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths))) | ||
1200 | 1274 | ||
1201 | return " ".join(filelist) | 1275 | return " ".join(filelist) |
1202 | 1276 | ||
@@ -1243,18 +1317,13 @@ class FetchData(object): | |||
1243 | 1317 | ||
1244 | if checksum_name in self.parm: | 1318 | if checksum_name in self.parm: |
1245 | checksum_expected = self.parm[checksum_name] | 1319 | checksum_expected = self.parm[checksum_name] |
1246 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: | 1320 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]: |
1247 | checksum_expected = None | 1321 | checksum_expected = None |
1248 | else: | 1322 | else: |
1249 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) | 1323 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) |
1250 | 1324 | ||
1251 | setattr(self, "%s_expected" % checksum_id, checksum_expected) | 1325 | setattr(self, "%s_expected" % checksum_id, checksum_expected) |
1252 | 1326 | ||
1253 | for checksum_id in CHECKSUM_LIST: | ||
1254 | configure_checksum(checksum_id) | ||
1255 | |||
1256 | self.ignore_checksums = False | ||
1257 | |||
1258 | self.names = self.parm.get("name",'default').split(',') | 1327 | self.names = self.parm.get("name",'default').split(',') |
1259 | 1328 | ||
1260 | self.method = None | 1329 | self.method = None |
@@ -1276,6 +1345,11 @@ class FetchData(object): | |||
1276 | if hasattr(self.method, "urldata_init"): | 1345 | if hasattr(self.method, "urldata_init"): |
1277 | self.method.urldata_init(self, d) | 1346 | self.method.urldata_init(self, d) |
1278 | 1347 | ||
1348 | for checksum_id in CHECKSUM_LIST: | ||
1349 | configure_checksum(checksum_id) | ||
1350 | |||
1351 | self.ignore_checksums = False | ||
1352 | |||
1279 | if "localpath" in self.parm: | 1353 | if "localpath" in self.parm: |
1280 | # if user sets localpath for file, use it instead. | 1354 | # if user sets localpath for file, use it instead. |
1281 | self.localpath = self.parm["localpath"] | 1355 | self.localpath = self.parm["localpath"] |
@@ -1355,6 +1429,9 @@ class FetchMethod(object): | |||
1355 | Is localpath something that can be represented by a checksum? | 1429 | Is localpath something that can be represented by a checksum? |
1356 | """ | 1430 | """ |
1357 | 1431 | ||
1432 | # We cannot compute checksums for None | ||
1433 | if urldata.localpath is None: | ||
1434 | return False | ||
1358 | # We cannot compute checksums for directories | 1435 | # We cannot compute checksums for directories |
1359 | if os.path.isdir(urldata.localpath): | 1436 | if os.path.isdir(urldata.localpath): |
1360 | return False | 1437 | return False |
@@ -1367,6 +1444,12 @@ class FetchMethod(object): | |||
1367 | """ | 1444 | """ |
1368 | return False | 1445 | return False |
1369 | 1446 | ||
1447 | def cleanup_upon_failure(self): | ||
1448 | """ | ||
1449 | When a fetch fails, should clean() be called? | ||
1450 | """ | ||
1451 | return True | ||
1452 | |||
1370 | def verify_donestamp(self, ud, d): | 1453 | def verify_donestamp(self, ud, d): |
1371 | """ | 1454 | """ |
1372 | Verify the donestamp file | 1455 | Verify the donestamp file |
@@ -1434,30 +1517,33 @@ class FetchMethod(object): | |||
1434 | cmd = None | 1517 | cmd = None |
1435 | 1518 | ||
1436 | if unpack: | 1519 | if unpack: |
1520 | tar_cmd = 'tar --extract --no-same-owner' | ||
1521 | if 'striplevel' in urldata.parm: | ||
1522 | tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel'] | ||
1437 | if file.endswith('.tar'): | 1523 | if file.endswith('.tar'): |
1438 | cmd = 'tar x --no-same-owner -f %s' % file | 1524 | cmd = '%s -f %s' % (tar_cmd, file) |
1439 | elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): | 1525 | elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): |
1440 | cmd = 'tar xz --no-same-owner -f %s' % file | 1526 | cmd = '%s -z -f %s' % (tar_cmd, file) |
1441 | elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): | 1527 | elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): |
1442 | cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file | 1528 | cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd) |
1443 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): | 1529 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): |
1444 | cmd = 'gzip -dc %s > %s' % (file, efile) | 1530 | cmd = 'gzip -dc %s > %s' % (file, efile) |
1445 | elif file.endswith('.bz2'): | 1531 | elif file.endswith('.bz2'): |
1446 | cmd = 'bzip2 -dc %s > %s' % (file, efile) | 1532 | cmd = 'bzip2 -dc %s > %s' % (file, efile) |
1447 | elif file.endswith('.txz') or file.endswith('.tar.xz'): | 1533 | elif file.endswith('.txz') or file.endswith('.tar.xz'): |
1448 | cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file | 1534 | cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd) |
1449 | elif file.endswith('.xz'): | 1535 | elif file.endswith('.xz'): |
1450 | cmd = 'xz -dc %s > %s' % (file, efile) | 1536 | cmd = 'xz -dc %s > %s' % (file, efile) |
1451 | elif file.endswith('.tar.lz'): | 1537 | elif file.endswith('.tar.lz'): |
1452 | cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file | 1538 | cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd) |
1453 | elif file.endswith('.lz'): | 1539 | elif file.endswith('.lz'): |
1454 | cmd = 'lzip -dc %s > %s' % (file, efile) | 1540 | cmd = 'lzip -dc %s > %s' % (file, efile) |
1455 | elif file.endswith('.tar.7z'): | 1541 | elif file.endswith('.tar.7z'): |
1456 | cmd = '7z x -so %s | tar x --no-same-owner -f -' % file | 1542 | cmd = '7z x -so %s | %s -f -' % (file, tar_cmd) |
1457 | elif file.endswith('.7z'): | 1543 | elif file.endswith('.7z'): |
1458 | cmd = '7za x -y %s 1>/dev/null' % file | 1544 | cmd = '7za x -y %s 1>/dev/null' % file |
1459 | elif file.endswith('.tzst') or file.endswith('.tar.zst'): | 1545 | elif file.endswith('.tzst') or file.endswith('.tar.zst'): |
1460 | cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file | 1546 | cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd) |
1461 | elif file.endswith('.zst'): | 1547 | elif file.endswith('.zst'): |
1462 | cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) | 1548 | cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) |
1463 | elif file.endswith('.zip') or file.endswith('.jar'): | 1549 | elif file.endswith('.zip') or file.endswith('.jar'): |
@@ -1490,7 +1576,7 @@ class FetchMethod(object): | |||
1490 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) | 1576 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) |
1491 | else: | 1577 | else: |
1492 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) | 1578 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) |
1493 | cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) | 1579 | cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) |
1494 | 1580 | ||
1495 | # If 'subdir' param exists, create a dir and use it as destination for unpack cmd | 1581 | # If 'subdir' param exists, create a dir and use it as destination for unpack cmd |
1496 | if 'subdir' in urldata.parm: | 1582 | if 'subdir' in urldata.parm: |
@@ -1506,6 +1592,7 @@ class FetchMethod(object): | |||
1506 | unpackdir = rootdir | 1592 | unpackdir = rootdir |
1507 | 1593 | ||
1508 | if not unpack or not cmd: | 1594 | if not unpack or not cmd: |
1595 | urldata.unpack_tracer.unpack("file-copy", unpackdir) | ||
1509 | # If file == dest, then avoid any copies, as we already put the file into dest! | 1596 | # If file == dest, then avoid any copies, as we already put the file into dest! |
1510 | dest = os.path.join(unpackdir, os.path.basename(file)) | 1597 | dest = os.path.join(unpackdir, os.path.basename(file)) |
1511 | if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): | 1598 | if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): |
@@ -1520,6 +1607,8 @@ class FetchMethod(object): | |||
1520 | destdir = urlpath.rsplit("/", 1)[0] + '/' | 1607 | destdir = urlpath.rsplit("/", 1)[0] + '/' |
1521 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) | 1608 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) |
1522 | cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) | 1609 | cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) |
1610 | else: | ||
1611 | urldata.unpack_tracer.unpack("archive-extract", unpackdir) | ||
1523 | 1612 | ||
1524 | if not cmd: | 1613 | if not cmd: |
1525 | return | 1614 | return |
@@ -1611,12 +1700,61 @@ class FetchMethod(object): | |||
1611 | """ | 1700 | """ |
1612 | return [] | 1701 | return [] |
1613 | 1702 | ||
1703 | |||
1704 | class DummyUnpackTracer(object): | ||
1705 | """ | ||
1706 | Abstract API definition for a class that traces unpacked source files back | ||
1707 | to their respective upstream SRC_URI entries, for software composition | ||
1708 | analysis, license compliance and detailed SBOM generation purposes. | ||
1709 | User may load their own unpack tracer class (instead of the dummy | ||
1710 | one) by setting the BB_UNPACK_TRACER_CLASS config parameter. | ||
1711 | """ | ||
1712 | def start(self, unpackdir, urldata_dict, d): | ||
1713 | """ | ||
1714 | Start tracing the core Fetch.unpack process, using an index to map | ||
1715 | unpacked files to each SRC_URI entry. | ||
1716 | This method is called by Fetch.unpack and it may receive nested calls by | ||
1717 | gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit | ||
1718 | URLs and by recursively calling Fetch.unpack from new (nested) Fetch | ||
1719 | instances. | ||
1720 | """ | ||
1721 | return | ||
1722 | def start_url(self, url): | ||
1723 | """Start tracing url unpack process. | ||
1724 | This method is called by Fetch.unpack before the fetcher-specific unpack | ||
1725 | method starts, and it may receive nested calls by gitsm and npmsw | ||
1726 | fetchers. | ||
1727 | """ | ||
1728 | return | ||
1729 | def unpack(self, unpack_type, destdir): | ||
1730 | """ | ||
1731 | Set unpack_type and destdir for current url. | ||
1732 | This method is called by the fetcher-specific unpack method after url | ||
1733 | tracing started. | ||
1734 | """ | ||
1735 | return | ||
1736 | def finish_url(self, url): | ||
1737 | """Finish tracing url unpack process and update the file index. | ||
1738 | This method is called by Fetch.unpack after the fetcher-specific unpack | ||
1739 | method finished its job, and it may receive nested calls by gitsm | ||
1740 | and npmsw fetchers. | ||
1741 | """ | ||
1742 | return | ||
1743 | def complete(self): | ||
1744 | """ | ||
1745 | Finish tracing the Fetch.unpack process, and check if all nested | ||
1746 | Fecth.unpack calls (if any) have been completed; if so, save collected | ||
1747 | metadata. | ||
1748 | """ | ||
1749 | return | ||
1750 | |||
1751 | |||
1614 | class Fetch(object): | 1752 | class Fetch(object): |
1615 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): | 1753 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): |
1616 | if localonly and cache: | 1754 | if localonly and cache: |
1617 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") | 1755 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") |
1618 | 1756 | ||
1619 | if len(urls) == 0: | 1757 | if not urls: |
1620 | urls = d.getVar("SRC_URI").split() | 1758 | urls = d.getVar("SRC_URI").split() |
1621 | self.urls = urls | 1759 | self.urls = urls |
1622 | self.d = d | 1760 | self.d = d |
@@ -1631,10 +1769,30 @@ class Fetch(object): | |||
1631 | if key in urldata_cache: | 1769 | if key in urldata_cache: |
1632 | self.ud = urldata_cache[key] | 1770 | self.ud = urldata_cache[key] |
1633 | 1771 | ||
1772 | # the unpack_tracer object needs to be made available to possible nested | ||
1773 | # Fetch instances (when those are created by gitsm and npmsw fetchers) | ||
1774 | # so we set it as a global variable | ||
1775 | global unpack_tracer | ||
1776 | try: | ||
1777 | unpack_tracer | ||
1778 | except NameError: | ||
1779 | class_path = d.getVar("BB_UNPACK_TRACER_CLASS") | ||
1780 | if class_path: | ||
1781 | # use user-defined unpack tracer class | ||
1782 | import importlib | ||
1783 | module_name, _, class_name = class_path.rpartition(".") | ||
1784 | module = importlib.import_module(module_name) | ||
1785 | class_ = getattr(module, class_name) | ||
1786 | unpack_tracer = class_() | ||
1787 | else: | ||
1788 | # fall back to the dummy/abstract class | ||
1789 | unpack_tracer = DummyUnpackTracer() | ||
1790 | |||
1634 | for url in urls: | 1791 | for url in urls: |
1635 | if url not in self.ud: | 1792 | if url not in self.ud: |
1636 | try: | 1793 | try: |
1637 | self.ud[url] = FetchData(url, d, localonly) | 1794 | self.ud[url] = FetchData(url, d, localonly) |
1795 | self.ud[url].unpack_tracer = unpack_tracer | ||
1638 | except NonLocalMethod: | 1796 | except NonLocalMethod: |
1639 | if localonly: | 1797 | if localonly: |
1640 | self.ud[url] = None | 1798 | self.ud[url] = None |
@@ -1673,6 +1831,7 @@ class Fetch(object): | |||
1673 | network = self.d.getVar("BB_NO_NETWORK") | 1831 | network = self.d.getVar("BB_NO_NETWORK") |
1674 | premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) | 1832 | premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) |
1675 | 1833 | ||
1834 | checksum_missing_messages = [] | ||
1676 | for u in urls: | 1835 | for u in urls: |
1677 | ud = self.ud[u] | 1836 | ud = self.ud[u] |
1678 | ud.setup_localpath(self.d) | 1837 | ud.setup_localpath(self.d) |
@@ -1684,7 +1843,6 @@ class Fetch(object): | |||
1684 | 1843 | ||
1685 | try: | 1844 | try: |
1686 | self.d.setVar("BB_NO_NETWORK", network) | 1845 | self.d.setVar("BB_NO_NETWORK", network) |
1687 | |||
1688 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): | 1846 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): |
1689 | done = True | 1847 | done = True |
1690 | elif m.try_premirror(ud, self.d): | 1848 | elif m.try_premirror(ud, self.d): |
@@ -1705,7 +1863,9 @@ class Fetch(object): | |||
1705 | self.d.setVar("BB_NO_NETWORK", "1") | 1863 | self.d.setVar("BB_NO_NETWORK", "1") |
1706 | 1864 | ||
1707 | firsterr = None | 1865 | firsterr = None |
1708 | verified_stamp = m.verify_donestamp(ud, self.d) | 1866 | verified_stamp = False |
1867 | if done: | ||
1868 | verified_stamp = m.verify_donestamp(ud, self.d) | ||
1709 | if not done and (not verified_stamp or m.need_update(ud, self.d)): | 1869 | if not done and (not verified_stamp or m.need_update(ud, self.d)): |
1710 | try: | 1870 | try: |
1711 | if not trusted_network(self.d, ud.url): | 1871 | if not trusted_network(self.d, ud.url): |
@@ -1735,7 +1895,7 @@ class Fetch(object): | |||
1735 | logger.debug(str(e)) | 1895 | logger.debug(str(e)) |
1736 | firsterr = e | 1896 | firsterr = e |
1737 | # Remove any incomplete fetch | 1897 | # Remove any incomplete fetch |
1738 | if not verified_stamp: | 1898 | if not verified_stamp and m.cleanup_upon_failure(): |
1739 | m.clean(ud, self.d) | 1899 | m.clean(ud, self.d) |
1740 | logger.debug("Trying MIRRORS") | 1900 | logger.debug("Trying MIRRORS") |
1741 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) | 1901 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
@@ -1754,17 +1914,28 @@ class Fetch(object): | |||
1754 | raise ChecksumError("Stale Error Detected") | 1914 | raise ChecksumError("Stale Error Detected") |
1755 | 1915 | ||
1756 | except BBFetchException as e: | 1916 | except BBFetchException as e: |
1757 | if isinstance(e, ChecksumError): | 1917 | if isinstance(e, NoChecksumError): |
1918 | (message, _) = e.args | ||
1919 | checksum_missing_messages.append(message) | ||
1920 | continue | ||
1921 | elif isinstance(e, ChecksumError): | ||
1758 | logger.error("Checksum failure fetching %s" % u) | 1922 | logger.error("Checksum failure fetching %s" % u) |
1759 | raise | 1923 | raise |
1760 | 1924 | ||
1761 | finally: | 1925 | finally: |
1762 | if ud.lockfile: | 1926 | if ud.lockfile: |
1763 | bb.utils.unlockfile(lf) | 1927 | bb.utils.unlockfile(lf) |
1928 | if checksum_missing_messages: | ||
1929 | logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages)) | ||
1930 | raise BBFetchException("There was some missing checksums in the recipe") | ||
1764 | 1931 | ||
1765 | def checkstatus(self, urls=None): | 1932 | def checkstatus(self, urls=None): |
1766 | """ | 1933 | """ |
1767 | Check all urls exist upstream | 1934 | Check all URLs exist upstream. |
1935 | |||
1936 | Returns None if the URLs exist, raises FetchError if the check wasn't | ||
1937 | successful but there wasn't an error (such as file not found), and | ||
1938 | raises other exceptions in error cases. | ||
1768 | """ | 1939 | """ |
1769 | 1940 | ||
1770 | if not urls: | 1941 | if not urls: |
@@ -1787,7 +1958,7 @@ class Fetch(object): | |||
1787 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) | 1958 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) |
1788 | 1959 | ||
1789 | if not ret: | 1960 | if not ret: |
1790 | raise FetchError("URL %s doesn't work" % u, u) | 1961 | raise FetchError("URL doesn't work", u) |
1791 | 1962 | ||
1792 | def unpack(self, root, urls=None): | 1963 | def unpack(self, root, urls=None): |
1793 | """ | 1964 | """ |
@@ -1797,6 +1968,8 @@ class Fetch(object): | |||
1797 | if not urls: | 1968 | if not urls: |
1798 | urls = self.urls | 1969 | urls = self.urls |
1799 | 1970 | ||
1971 | unpack_tracer.start(root, self.ud, self.d) | ||
1972 | |||
1800 | for u in urls: | 1973 | for u in urls: |
1801 | ud = self.ud[u] | 1974 | ud = self.ud[u] |
1802 | ud.setup_localpath(self.d) | 1975 | ud.setup_localpath(self.d) |
@@ -1804,11 +1977,15 @@ class Fetch(object): | |||
1804 | if ud.lockfile: | 1977 | if ud.lockfile: |
1805 | lf = bb.utils.lockfile(ud.lockfile) | 1978 | lf = bb.utils.lockfile(ud.lockfile) |
1806 | 1979 | ||
1980 | unpack_tracer.start_url(u) | ||
1807 | ud.method.unpack(ud, root, self.d) | 1981 | ud.method.unpack(ud, root, self.d) |
1982 | unpack_tracer.finish_url(u) | ||
1808 | 1983 | ||
1809 | if ud.lockfile: | 1984 | if ud.lockfile: |
1810 | bb.utils.unlockfile(lf) | 1985 | bb.utils.unlockfile(lf) |
1811 | 1986 | ||
1987 | unpack_tracer.complete() | ||
1988 | |||
1812 | def clean(self, urls=None): | 1989 | def clean(self, urls=None): |
1813 | """ | 1990 | """ |
1814 | Clean files that the fetcher gets or places | 1991 | Clean files that the fetcher gets or places |
@@ -1908,6 +2085,9 @@ from . import repo | |||
1908 | from . import clearcase | 2085 | from . import clearcase |
1909 | from . import npm | 2086 | from . import npm |
1910 | from . import npmsw | 2087 | from . import npmsw |
2088 | from . import az | ||
2089 | from . import crate | ||
2090 | from . import gcp | ||
1911 | 2091 | ||
1912 | methods.append(local.Local()) | 2092 | methods.append(local.Local()) |
1913 | methods.append(wget.Wget()) | 2093 | methods.append(wget.Wget()) |
@@ -1927,3 +2107,6 @@ methods.append(repo.Repo()) | |||
1927 | methods.append(clearcase.ClearCase()) | 2107 | methods.append(clearcase.ClearCase()) |
1928 | methods.append(npm.Npm()) | 2108 | methods.append(npm.Npm()) |
1929 | methods.append(npmsw.NpmShrinkWrap()) | 2109 | methods.append(npmsw.NpmShrinkWrap()) |
2110 | methods.append(az.Az()) | ||
2111 | methods.append(crate.Crate()) | ||
2112 | methods.append(gcp.GCP()) | ||
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py new file mode 100644 index 0000000000..3ccc594c22 --- /dev/null +++ b/bitbake/lib/bb/fetch2/az.py | |||
@@ -0,0 +1,93 @@ | |||
1 | """ | ||
2 | BitBake 'Fetch' Azure Storage implementation | ||
3 | |||
4 | """ | ||
5 | |||
6 | # Copyright (C) 2021 Alejandro Hernandez Samaniego | ||
7 | # | ||
8 | # Based on bb.fetch2.wget: | ||
9 | # Copyright (C) 2003, 2004 Chris Larson | ||
10 | # | ||
11 | # SPDX-License-Identifier: GPL-2.0-only | ||
12 | # | ||
13 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
14 | |||
15 | import shlex | ||
16 | import os | ||
17 | import bb | ||
18 | from bb.fetch2 import FetchError | ||
19 | from bb.fetch2 import logger | ||
20 | from bb.fetch2.wget import Wget | ||
21 | |||
22 | |||
23 | class Az(Wget): | ||
24 | |||
25 | def supports(self, ud, d): | ||
26 | """ | ||
27 | Check to see if a given url can be fetched from Azure Storage | ||
28 | """ | ||
29 | return ud.type in ['az'] | ||
30 | |||
31 | |||
32 | def checkstatus(self, fetch, ud, d, try_again=True): | ||
33 | |||
34 | # checkstatus discards parameters either way, we need to do this before adding the SAS | ||
35 | ud.url = ud.url.replace('az://','https://').split(';')[0] | ||
36 | |||
37 | az_sas = d.getVar('AZ_SAS') | ||
38 | if az_sas and az_sas not in ud.url: | ||
39 | ud.url += az_sas | ||
40 | |||
41 | return Wget.checkstatus(self, fetch, ud, d, try_again) | ||
42 | |||
43 | # Override download method, include retries | ||
44 | def download(self, ud, d, retries=3): | ||
45 | """Fetch urls""" | ||
46 | |||
47 | # If were reaching the account transaction limit we might be refused a connection, | ||
48 | # retrying allows us to avoid false negatives since the limit changes over time | ||
49 | fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5' | ||
50 | |||
51 | # We need to provide a localpath to avoid wget using the SAS | ||
52 | # ud.localfile either has the downloadfilename or ud.path | ||
53 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) | ||
54 | bb.utils.mkdirhier(os.path.dirname(localpath)) | ||
55 | fetchcmd += " -O %s" % shlex.quote(localpath) | ||
56 | |||
57 | |||
58 | if ud.user and ud.pswd: | ||
59 | fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) | ||
60 | |||
61 | # Check if a Shared Access Signature was given and use it | ||
62 | az_sas = d.getVar('AZ_SAS') | ||
63 | |||
64 | if az_sas: | ||
65 | azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) | ||
66 | else: | ||
67 | azuri = '%s%s%s' % ('https://', ud.host, ud.path) | ||
68 | |||
69 | if os.path.exists(ud.localpath): | ||
70 | # file exists, but we didnt complete it.. trying again. | ||
71 | fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) | ||
72 | else: | ||
73 | fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) | ||
74 | |||
75 | try: | ||
76 | self._runwget(ud, d, fetchcmd, False) | ||
77 | except FetchError as e: | ||
78 | # Azure fails on handshake sometimes when using wget after some stress, producing a | ||
79 | # FetchError from the fetcher, if the artifact exists retyring should succeed | ||
80 | if 'Unable to establish SSL connection' in str(e): | ||
81 | logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries) | ||
82 | self.download(ud, d, retries -1) | ||
83 | |||
84 | # Sanity check since wget can pretend it succeed when it didn't | ||
85 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
86 | if not os.path.exists(ud.localpath): | ||
87 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri) | ||
88 | |||
89 | if os.path.getsize(ud.localpath) == 0: | ||
90 | os.remove(ud.localpath) | ||
91 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri) | ||
92 | |||
93 | return True | ||
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py new file mode 100644 index 0000000000..01d49435c3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/crate.py | |||
@@ -0,0 +1,141 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementation for crates.io | ||
5 | """ | ||
6 | |||
7 | # Copyright (C) 2016 Doug Goldstein | ||
8 | # | ||
9 | # SPDX-License-Identifier: GPL-2.0-only | ||
10 | # | ||
11 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
12 | |||
13 | import hashlib | ||
14 | import json | ||
15 | import os | ||
16 | import subprocess | ||
17 | import bb | ||
18 | from bb.fetch2 import logger, subprocess_setup, UnpackError | ||
19 | from bb.fetch2.wget import Wget | ||
20 | |||
21 | |||
22 | class Crate(Wget): | ||
23 | |||
24 | """Class to fetch crates via wget""" | ||
25 | |||
26 | def _cargo_bitbake_path(self, rootdir): | ||
27 | return os.path.join(rootdir, "cargo_home", "bitbake") | ||
28 | |||
29 | def supports(self, ud, d): | ||
30 | """ | ||
31 | Check to see if a given url is for this fetcher | ||
32 | """ | ||
33 | return ud.type in ['crate'] | ||
34 | |||
35 | def recommends_checksum(self, urldata): | ||
36 | return True | ||
37 | |||
38 | def urldata_init(self, ud, d): | ||
39 | """ | ||
40 | Sets up to download the respective crate from crates.io | ||
41 | """ | ||
42 | |||
43 | if ud.type == 'crate': | ||
44 | self._crate_urldata_init(ud, d) | ||
45 | |||
46 | super(Crate, self).urldata_init(ud, d) | ||
47 | |||
48 | def _crate_urldata_init(self, ud, d): | ||
49 | """ | ||
50 | Sets up the download for a crate | ||
51 | """ | ||
52 | |||
53 | # URL syntax is: crate://NAME/VERSION | ||
54 | # break the URL apart by / | ||
55 | parts = ud.url.split('/') | ||
56 | if len(parts) < 5: | ||
57 | raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url) | ||
58 | |||
59 | # version is expected to be the last token | ||
60 | # but ignore possible url parameters which will be used | ||
61 | # by the top fetcher class | ||
62 | version = parts[-1].split(";")[0] | ||
63 | # second to last field is name | ||
64 | name = parts[-2] | ||
65 | # host (this is to allow custom crate registries to be specified | ||
66 | host = '/'.join(parts[2:-2]) | ||
67 | |||
68 | # if using upstream just fix it up nicely | ||
69 | if host == 'crates.io': | ||
70 | host = 'crates.io/api/v1/crates' | ||
71 | |||
72 | ud.url = "https://%s/%s/%s/download" % (host, name, version) | ||
73 | ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) | ||
74 | if 'name' not in ud.parm: | ||
75 | ud.parm['name'] = '%s-%s' % (name, version) | ||
76 | |||
77 | logger.debug2("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename'])) | ||
78 | |||
79 | def unpack(self, ud, rootdir, d): | ||
80 | """ | ||
81 | Uses the crate to build the necessary paths for cargo to utilize it | ||
82 | """ | ||
83 | if ud.type == 'crate': | ||
84 | return self._crate_unpack(ud, rootdir, d) | ||
85 | else: | ||
86 | super(Crate, self).unpack(ud, rootdir, d) | ||
87 | |||
88 | def _crate_unpack(self, ud, rootdir, d): | ||
89 | """ | ||
90 | Unpacks a crate | ||
91 | """ | ||
92 | thefile = ud.localpath | ||
93 | |||
94 | # possible metadata we need to write out | ||
95 | metadata = {} | ||
96 | |||
97 | # change to the rootdir to unpack but save the old working dir | ||
98 | save_cwd = os.getcwd() | ||
99 | os.chdir(rootdir) | ||
100 | |||
101 | bp = d.getVar('BP') | ||
102 | if bp == ud.parm.get('name'): | ||
103 | cmd = "tar -xz --no-same-owner -f %s" % thefile | ||
104 | ud.unpack_tracer.unpack("crate-extract", rootdir) | ||
105 | else: | ||
106 | cargo_bitbake = self._cargo_bitbake_path(rootdir) | ||
107 | ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake) | ||
108 | |||
109 | cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake) | ||
110 | |||
111 | # ensure we've got these paths made | ||
112 | bb.utils.mkdirhier(cargo_bitbake) | ||
113 | |||
114 | # generate metadata necessary | ||
115 | with open(thefile, 'rb') as f: | ||
116 | # get the SHA256 of the original tarball | ||
117 | tarhash = hashlib.sha256(f.read()).hexdigest() | ||
118 | |||
119 | metadata['files'] = {} | ||
120 | metadata['package'] = tarhash | ||
121 | |||
122 | path = d.getVar('PATH') | ||
123 | if path: | ||
124 | cmd = "PATH=\"%s\" %s" % (path, cmd) | ||
125 | bb.note("Unpacking %s to %s/" % (thefile, os.getcwd())) | ||
126 | |||
127 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) | ||
128 | |||
129 | os.chdir(save_cwd) | ||
130 | |||
131 | if ret != 0: | ||
132 | raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url) | ||
133 | |||
134 | # if we have metadata to write out.. | ||
135 | if len(metadata) > 0: | ||
136 | cratepath = os.path.splitext(os.path.basename(thefile))[0] | ||
137 | bbpath = self._cargo_bitbake_path(rootdir) | ||
138 | mdfile = '.cargo-checksum.json' | ||
139 | mdpath = os.path.join(bbpath, cratepath, mdfile) | ||
140 | with open(mdpath, "w") as f: | ||
141 | json.dump(metadata, f) | ||
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py new file mode 100644 index 0000000000..eb3e0c6a6b --- /dev/null +++ b/bitbake/lib/bb/fetch2/gcp.py | |||
@@ -0,0 +1,102 @@ | |||
1 | """ | ||
2 | BitBake 'Fetch' implementation for Google Cloup Platform Storage. | ||
3 | |||
4 | Class for fetching files from Google Cloud Storage using the | ||
5 | Google Cloud Storage Python Client. The GCS Python Client must | ||
6 | be correctly installed, configured and authenticated prior to use. | ||
7 | Additionally, gsutil must also be installed. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2023, Snap Inc. | ||
12 | # | ||
13 | # Based in part on bb.fetch2.s3: | ||
14 | # Copyright (C) 2017 Andre McCurdy | ||
15 | # | ||
16 | # SPDX-License-Identifier: GPL-2.0-only | ||
17 | # | ||
18 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
19 | |||
20 | import os | ||
21 | import bb | ||
22 | import urllib.parse, urllib.error | ||
23 | from bb.fetch2 import FetchMethod | ||
24 | from bb.fetch2 import FetchError | ||
25 | from bb.fetch2 import logger | ||
26 | from bb.fetch2 import runfetchcmd | ||
27 | |||
28 | class GCP(FetchMethod): | ||
29 | """ | ||
30 | Class to fetch urls via GCP's Python API. | ||
31 | """ | ||
32 | def __init__(self): | ||
33 | self.gcp_client = None | ||
34 | |||
35 | def supports(self, ud, d): | ||
36 | """ | ||
37 | Check to see if a given url can be fetched with GCP. | ||
38 | """ | ||
39 | return ud.type in ['gs'] | ||
40 | |||
41 | def recommends_checksum(self, urldata): | ||
42 | return True | ||
43 | |||
44 | def urldata_init(self, ud, d): | ||
45 | if 'downloadfilename' in ud.parm: | ||
46 | ud.basename = ud.parm['downloadfilename'] | ||
47 | else: | ||
48 | ud.basename = os.path.basename(ud.path) | ||
49 | |||
50 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | ||
51 | ud.basecmd = "gsutil stat" | ||
52 | |||
53 | def get_gcp_client(self): | ||
54 | from google.cloud import storage | ||
55 | self.gcp_client = storage.Client(project=None) | ||
56 | |||
57 | def download(self, ud, d): | ||
58 | """ | ||
59 | Fetch urls using the GCP API. | ||
60 | Assumes localpath was called first. | ||
61 | """ | ||
62 | logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") | ||
63 | if self.gcp_client is None: | ||
64 | self.get_gcp_client() | ||
65 | |||
66 | bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") | ||
67 | runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d) | ||
68 | |||
69 | # Path sometimes has leading slash, so strip it | ||
70 | path = ud.path.lstrip("/") | ||
71 | blob = self.gcp_client.bucket(ud.host).blob(path) | ||
72 | blob.download_to_filename(ud.localpath) | ||
73 | |||
74 | # Additional sanity checks copied from the wget class (although there | ||
75 | # are no known issues which mean these are required, treat the GCP API | ||
76 | # tool with a little healthy suspicion). | ||
77 | if not os.path.exists(ud.localpath): | ||
78 | raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!") | ||
79 | |||
80 | if os.path.getsize(ud.localpath) == 0: | ||
81 | os.remove(ud.localpath) | ||
82 | raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.") | ||
83 | |||
84 | return True | ||
85 | |||
86 | def checkstatus(self, fetch, ud, d): | ||
87 | """ | ||
88 | Check the status of a URL. | ||
89 | """ | ||
90 | logger.debug2(f"Checking status of gs://{ud.host}{ud.path}") | ||
91 | if self.gcp_client is None: | ||
92 | self.get_gcp_client() | ||
93 | |||
94 | bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") | ||
95 | runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d) | ||
96 | |||
97 | # Path sometimes has leading slash, so strip it | ||
98 | path = ud.path.lstrip("/") | ||
99 | if self.gcp_client.bucket(ud.host).blob(path).exists() == False: | ||
100 | raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist") | ||
101 | else: | ||
102 | return True | ||
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index e3ba80a3f5..c7ff769fdf 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
@@ -44,13 +44,27 @@ Supported SRC_URI options are: | |||
44 | 44 | ||
45 | - nobranch | 45 | - nobranch |
46 | Don't check the SHA validation for branch. set this option for the recipe | 46 | Don't check the SHA validation for branch. set this option for the recipe |
47 | referring to commit which is valid in tag instead of branch. | 47 | referring to commit which is valid in any namespace (branch, tag, ...) |
48 | instead of branch. | ||
48 | The default is "0", set nobranch=1 if needed. | 49 | The default is "0", set nobranch=1 if needed. |
49 | 50 | ||
51 | - subpath | ||
52 | Limit the checkout to a specific subpath of the tree. | ||
53 | By default, checkout the whole tree, set subpath=<path> if needed | ||
54 | |||
55 | - destsuffix | ||
56 | The name of the path in which to place the checkout. | ||
57 | By default, the path is git/, set destsuffix=<suffix> if needed | ||
58 | |||
50 | - usehead | 59 | - usehead |
51 | For local git:// urls to use the current branch HEAD as the revision for use with | 60 | For local git:// urls to use the current branch HEAD as the revision for use with |
52 | AUTOREV. Implies nobranch. | 61 | AUTOREV. Implies nobranch. |
53 | 62 | ||
63 | - lfs | ||
64 | Enable the checkout to use LFS for large files. This will download all LFS files | ||
65 | in the download step, as the unpack step does not have network access. | ||
66 | The default is "1", set lfs=0 to skip. | ||
67 | |||
54 | """ | 68 | """ |
55 | 69 | ||
56 | # Copyright (C) 2005 Richard Purdie | 70 | # Copyright (C) 2005 Richard Purdie |
@@ -64,14 +78,20 @@ import fnmatch | |||
64 | import os | 78 | import os |
65 | import re | 79 | import re |
66 | import shlex | 80 | import shlex |
81 | import shutil | ||
67 | import subprocess | 82 | import subprocess |
68 | import tempfile | 83 | import tempfile |
69 | import bb | 84 | import bb |
70 | import bb.progress | 85 | import bb.progress |
86 | from contextlib import contextmanager | ||
71 | from bb.fetch2 import FetchMethod | 87 | from bb.fetch2 import FetchMethod |
72 | from bb.fetch2 import runfetchcmd | 88 | from bb.fetch2 import runfetchcmd |
73 | from bb.fetch2 import logger | 89 | from bb.fetch2 import logger |
90 | from bb.fetch2 import trusted_network | ||
91 | |||
74 | 92 | ||
93 | sha1_re = re.compile(r'^[0-9a-f]{40}$') | ||
94 | slash_re = re.compile(r"/+") | ||
75 | 95 | ||
76 | class GitProgressHandler(bb.progress.LineFilterProgressHandler): | 96 | class GitProgressHandler(bb.progress.LineFilterProgressHandler): |
77 | """Extract progress information from git output""" | 97 | """Extract progress information from git output""" |
@@ -130,6 +150,9 @@ class Git(FetchMethod): | |||
130 | def supports_checksum(self, urldata): | 150 | def supports_checksum(self, urldata): |
131 | return False | 151 | return False |
132 | 152 | ||
153 | def cleanup_upon_failure(self): | ||
154 | return False | ||
155 | |||
133 | def urldata_init(self, ud, d): | 156 | def urldata_init(self, ud, d): |
134 | """ | 157 | """ |
135 | init git specific variable within url data | 158 | init git specific variable within url data |
@@ -141,6 +164,11 @@ class Git(FetchMethod): | |||
141 | ud.proto = 'file' | 164 | ud.proto = 'file' |
142 | else: | 165 | else: |
143 | ud.proto = "git" | 166 | ud.proto = "git" |
167 | if ud.host == "github.com" and ud.proto == "git": | ||
168 | # github stopped supporting git protocol | ||
169 | # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git | ||
170 | ud.proto = "https" | ||
171 | bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url) | ||
144 | 172 | ||
145 | if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): | 173 | if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): |
146 | raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) | 174 | raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) |
@@ -164,11 +192,18 @@ class Git(FetchMethod): | |||
164 | ud.nocheckout = 1 | 192 | ud.nocheckout = 1 |
165 | 193 | ||
166 | ud.unresolvedrev = {} | 194 | ud.unresolvedrev = {} |
167 | branches = ud.parm.get("branch", "master").split(',') | 195 | branches = ud.parm.get("branch", "").split(',') |
196 | if branches == [""] and not ud.nobranch: | ||
197 | bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url) | ||
198 | branches = ["master"] | ||
168 | if len(branches) != len(ud.names): | 199 | if len(branches) != len(ud.names): |
169 | raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) | 200 | raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) |
170 | 201 | ||
171 | ud.cloneflags = "-s -n" | 202 | ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" |
203 | |||
204 | ud.cloneflags = "-n" | ||
205 | if not ud.noshared: | ||
206 | ud.cloneflags += " -s" | ||
172 | if ud.bareclone: | 207 | if ud.bareclone: |
173 | ud.cloneflags += " --mirror" | 208 | ud.cloneflags += " --mirror" |
174 | 209 | ||
@@ -227,7 +262,7 @@ class Git(FetchMethod): | |||
227 | for name in ud.names: | 262 | for name in ud.names: |
228 | ud.unresolvedrev[name] = 'HEAD' | 263 | ud.unresolvedrev[name] = 'HEAD' |
229 | 264 | ||
230 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" | 265 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all" |
231 | 266 | ||
232 | write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" | 267 | write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" |
233 | ud.write_tarballs = write_tarballs != "0" or ud.rebaseable | 268 | ud.write_tarballs = write_tarballs != "0" or ud.rebaseable |
@@ -236,20 +271,20 @@ class Git(FetchMethod): | |||
236 | ud.setup_revisions(d) | 271 | ud.setup_revisions(d) |
237 | 272 | ||
238 | for name in ud.names: | 273 | for name in ud.names: |
239 | # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one | 274 | # Ensure any revision that doesn't look like a SHA-1 is translated into one |
240 | if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]): | 275 | if not sha1_re.match(ud.revisions[name] or ''): |
241 | if ud.revisions[name]: | 276 | if ud.revisions[name]: |
242 | ud.unresolvedrev[name] = ud.revisions[name] | 277 | ud.unresolvedrev[name] = ud.revisions[name] |
243 | ud.revisions[name] = self.latest_revision(ud, d, name) | 278 | ud.revisions[name] = self.latest_revision(ud, d, name) |
244 | 279 | ||
245 | gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_')) | 280 | gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) |
246 | if gitsrcname.startswith('.'): | 281 | if gitsrcname.startswith('.'): |
247 | gitsrcname = gitsrcname[1:] | 282 | gitsrcname = gitsrcname[1:] |
248 | 283 | ||
249 | # for rebaseable git repo, it is necessary to keep mirror tar ball | 284 | # For a rebaseable git repo, it is necessary to keep a mirror tar ball |
250 | # per revision, so that even the revision disappears from the | 285 | # per revision, so that even if the revision disappears from the |
251 | # upstream repo in the future, the mirror will remain intact and still | 286 | # upstream repo in the future, the mirror will remain intact and still |
252 | # contains the revision | 287 | # contain the revision |
253 | if ud.rebaseable: | 288 | if ud.rebaseable: |
254 | for name in ud.names: | 289 | for name in ud.names: |
255 | gitsrcname = gitsrcname + '_' + ud.revisions[name] | 290 | gitsrcname = gitsrcname + '_' + ud.revisions[name] |
@@ -293,7 +328,10 @@ class Git(FetchMethod): | |||
293 | return ud.clonedir | 328 | return ud.clonedir |
294 | 329 | ||
295 | def need_update(self, ud, d): | 330 | def need_update(self, ud, d): |
296 | return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud) | 331 | return self.clonedir_need_update(ud, d) \ |
332 | or self.shallow_tarball_need_update(ud) \ | ||
333 | or self.tarball_need_update(ud) \ | ||
334 | or self.lfs_need_update(ud, d) | ||
297 | 335 | ||
298 | def clonedir_need_update(self, ud, d): | 336 | def clonedir_need_update(self, ud, d): |
299 | if not os.path.exists(ud.clonedir): | 337 | if not os.path.exists(ud.clonedir): |
@@ -305,6 +343,15 @@ class Git(FetchMethod): | |||
305 | return True | 343 | return True |
306 | return False | 344 | return False |
307 | 345 | ||
346 | def lfs_need_update(self, ud, d): | ||
347 | if self.clonedir_need_update(ud, d): | ||
348 | return True | ||
349 | |||
350 | for name in ud.names: | ||
351 | if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir): | ||
352 | return True | ||
353 | return False | ||
354 | |||
308 | def clonedir_need_shallow_revs(self, ud, d): | 355 | def clonedir_need_shallow_revs(self, ud, d): |
309 | for rev in ud.shallow_revs: | 356 | for rev in ud.shallow_revs: |
310 | try: | 357 | try: |
@@ -324,6 +371,16 @@ class Git(FetchMethod): | |||
324 | # is not possible | 371 | # is not possible |
325 | if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")): | 372 | if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")): |
326 | return True | 373 | return True |
374 | # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0 | ||
375 | # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then | ||
376 | # we need to try premirrors first as using upstream is destined to fail. | ||
377 | if not trusted_network(d, ud.url): | ||
378 | return True | ||
379 | # the following check is to ensure incremental fetch in downloads, this is | ||
380 | # because the premirror might be old and does not contain the new rev required, | ||
381 | # and this will cause a total removal and new clone. So if we can reach to | ||
382 | # network, we prefer upstream over premirror, though the premirror might contain | ||
383 | # the new rev. | ||
327 | if os.path.exists(ud.clonedir): | 384 | if os.path.exists(ud.clonedir): |
328 | return False | 385 | return False |
329 | return True | 386 | return True |
@@ -337,17 +394,54 @@ class Git(FetchMethod): | |||
337 | if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): | 394 | if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): |
338 | ud.localpath = ud.fullshallow | 395 | ud.localpath = ud.fullshallow |
339 | return | 396 | return |
340 | elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir): | 397 | elif os.path.exists(ud.fullmirror) and self.need_update(ud, d): |
341 | bb.utils.mkdirhier(ud.clonedir) | 398 | if not os.path.exists(ud.clonedir): |
342 | runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir) | 399 | bb.utils.mkdirhier(ud.clonedir) |
343 | 400 | runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir) | |
401 | else: | ||
402 | tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) | ||
403 | runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir) | ||
404 | output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir) | ||
405 | if 'mirror' in output: | ||
406 | runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir) | ||
407 | runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir) | ||
408 | fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd) | ||
409 | runfetchcmd(fetch_cmd, d, workdir=ud.clonedir) | ||
344 | repourl = self._get_repo_url(ud) | 410 | repourl = self._get_repo_url(ud) |
345 | 411 | ||
412 | needs_clone = False | ||
413 | if os.path.exists(ud.clonedir): | ||
414 | # The directory may exist, but not be the top level of a bare git | ||
415 | # repository in which case it needs to be deleted and re-cloned. | ||
416 | try: | ||
417 | # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel | ||
418 | output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir) | ||
419 | toplevel = output.rstrip() | ||
420 | |||
421 | if not bb.utils.path_is_descendant(toplevel, ud.clonedir): | ||
422 | logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir) | ||
423 | needs_clone = True | ||
424 | except bb.fetch2.FetchError as e: | ||
425 | logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e) | ||
426 | needs_clone = True | ||
427 | except FileNotFoundError as e: | ||
428 | logger.warning("%s", e) | ||
429 | needs_clone = True | ||
430 | |||
431 | if needs_clone: | ||
432 | shutil.rmtree(ud.clonedir) | ||
433 | else: | ||
434 | needs_clone = True | ||
435 | |||
346 | # If the repo still doesn't exist, fallback to cloning it | 436 | # If the repo still doesn't exist, fallback to cloning it |
347 | if not os.path.exists(ud.clonedir): | 437 | if needs_clone: |
348 | # We do this since git will use a "-l" option automatically for local urls where possible | 438 | # We do this since git will use a "-l" option automatically for local urls where possible, |
439 | # but it doesn't work when git/objects is a symlink, only works when it is a directory. | ||
349 | if repourl.startswith("file://"): | 440 | if repourl.startswith("file://"): |
350 | repourl = repourl[7:] | 441 | repourl_path = repourl[7:] |
442 | objects = os.path.join(repourl_path, 'objects') | ||
443 | if os.path.isdir(objects) and not os.path.islink(objects): | ||
444 | repourl = repourl_path | ||
351 | clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir) | 445 | clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir) |
352 | if ud.proto.lower() != 'file': | 446 | if ud.proto.lower() != 'file': |
353 | bb.fetch2.check_network_access(d, clone_cmd, ud.url) | 447 | bb.fetch2.check_network_access(d, clone_cmd, ud.url) |
@@ -361,7 +455,11 @@ class Git(FetchMethod): | |||
361 | runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir) | 455 | runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir) |
362 | 456 | ||
363 | runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir) | 457 | runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir) |
364 | fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl)) | 458 | |
459 | if ud.nobranch: | ||
460 | fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl)) | ||
461 | else: | ||
462 | fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl)) | ||
365 | if ud.proto.lower() != 'file': | 463 | if ud.proto.lower() != 'file': |
366 | bb.fetch2.check_network_access(d, fetch_cmd, ud.url) | 464 | bb.fetch2.check_network_access(d, fetch_cmd, ud.url) |
367 | progresshandler = GitProgressHandler(d) | 465 | progresshandler = GitProgressHandler(d) |
@@ -384,17 +482,16 @@ class Git(FetchMethod): | |||
384 | if missing_rev: | 482 | if missing_rev: |
385 | raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) | 483 | raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) |
386 | 484 | ||
387 | if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud): | 485 | if self.lfs_need_update(ud, d): |
388 | # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching | 486 | # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching |
389 | # of all LFS blobs needed at the the srcrev. | 487 | # of all LFS blobs needed at the srcrev. |
390 | # | 488 | # |
391 | # It would be nice to just do this inline here by running 'git-lfs fetch' | 489 | # It would be nice to just do this inline here by running 'git-lfs fetch' |
392 | # on the bare clonedir, but that operation requires a working copy on some | 490 | # on the bare clonedir, but that operation requires a working copy on some |
393 | # releases of Git LFS. | 491 | # releases of Git LFS. |
394 | tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) | 492 | with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir: |
395 | try: | ||
396 | # Do the checkout. This implicitly involves a Git LFS fetch. | 493 | # Do the checkout. This implicitly involves a Git LFS fetch. |
397 | self.unpack(ud, tmpdir, d) | 494 | Git.unpack(self, ud, tmpdir, d) |
398 | 495 | ||
399 | # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into | 496 | # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into |
400 | # the bare clonedir. | 497 | # the bare clonedir. |
@@ -408,12 +505,24 @@ class Git(FetchMethod): | |||
408 | # Only do this if the unpack resulted in a .git/lfs directory being | 505 | # Only do this if the unpack resulted in a .git/lfs directory being |
409 | # created; this only happens if at least one blob needed to be | 506 | # created; this only happens if at least one blob needed to be |
410 | # downloaded. | 507 | # downloaded. |
411 | if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")): | 508 | if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")): |
412 | runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir) | 509 | runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir) |
413 | finally: | ||
414 | bb.utils.remove(tmpdir, recurse=True) | ||
415 | 510 | ||
416 | def build_mirror_data(self, ud, d): | 511 | def build_mirror_data(self, ud, d): |
512 | |||
513 | # Create as a temp file and move atomically into position to avoid races | ||
514 | @contextmanager | ||
515 | def create_atomic(filename): | ||
516 | fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) | ||
517 | try: | ||
518 | yield tfile | ||
519 | umask = os.umask(0o666) | ||
520 | os.umask(umask) | ||
521 | os.chmod(tfile, (0o666 & ~umask)) | ||
522 | os.rename(tfile, filename) | ||
523 | finally: | ||
524 | os.close(fd) | ||
525 | |||
417 | if ud.shallow and ud.write_shallow_tarballs: | 526 | if ud.shallow and ud.write_shallow_tarballs: |
418 | if not os.path.exists(ud.fullshallow): | 527 | if not os.path.exists(ud.fullshallow): |
419 | if os.path.islink(ud.fullshallow): | 528 | if os.path.islink(ud.fullshallow): |
@@ -424,7 +533,8 @@ class Git(FetchMethod): | |||
424 | self.clone_shallow_local(ud, shallowclone, d) | 533 | self.clone_shallow_local(ud, shallowclone, d) |
425 | 534 | ||
426 | logger.info("Creating tarball of git repository") | 535 | logger.info("Creating tarball of git repository") |
427 | runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone) | 536 | with create_atomic(ud.fullshallow) as tfile: |
537 | runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone) | ||
428 | runfetchcmd("touch %s.done" % ud.fullshallow, d) | 538 | runfetchcmd("touch %s.done" % ud.fullshallow, d) |
429 | finally: | 539 | finally: |
430 | bb.utils.remove(tempdir, recurse=True) | 540 | bb.utils.remove(tempdir, recurse=True) |
@@ -433,7 +543,11 @@ class Git(FetchMethod): | |||
433 | os.unlink(ud.fullmirror) | 543 | os.unlink(ud.fullmirror) |
434 | 544 | ||
435 | logger.info("Creating tarball of git repository") | 545 | logger.info("Creating tarball of git repository") |
436 | runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) | 546 | with create_atomic(ud.fullmirror) as tfile: |
547 | mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, | ||
548 | quiet=True, workdir=ud.clonedir) | ||
549 | runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." | ||
550 | % (tfile, mtime), d, workdir=ud.clonedir) | ||
437 | runfetchcmd("touch %s.done" % ud.fullmirror, d) | 551 | runfetchcmd("touch %s.done" % ud.fullmirror, d) |
438 | 552 | ||
439 | def clone_shallow_local(self, ud, dest, d): | 553 | def clone_shallow_local(self, ud, dest, d): |
@@ -495,18 +609,31 @@ class Git(FetchMethod): | |||
495 | def unpack(self, ud, destdir, d): | 609 | def unpack(self, ud, destdir, d): |
496 | """ unpack the downloaded src to destdir""" | 610 | """ unpack the downloaded src to destdir""" |
497 | 611 | ||
498 | subdir = ud.parm.get("subpath", "") | 612 | subdir = ud.parm.get("subdir") |
499 | if subdir != "": | 613 | subpath = ud.parm.get("subpath") |
500 | readpathspec = ":%s" % subdir | 614 | readpathspec = "" |
501 | def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/')) | 615 | def_destsuffix = "git/" |
502 | else: | 616 | |
503 | readpathspec = "" | 617 | if subpath: |
504 | def_destsuffix = "git/" | 618 | readpathspec = ":%s" % subpath |
619 | def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/')) | ||
620 | |||
621 | if subdir: | ||
622 | # If 'subdir' param exists, create a dir and use it as destination for unpack cmd | ||
623 | if os.path.isabs(subdir): | ||
624 | if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)): | ||
625 | raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url) | ||
626 | destdir = subdir | ||
627 | else: | ||
628 | destdir = os.path.join(destdir, subdir) | ||
629 | def_destsuffix = "" | ||
505 | 630 | ||
506 | destsuffix = ud.parm.get("destsuffix", def_destsuffix) | 631 | destsuffix = ud.parm.get("destsuffix", def_destsuffix) |
507 | destdir = ud.destdir = os.path.join(destdir, destsuffix) | 632 | destdir = ud.destdir = os.path.join(destdir, destsuffix) |
508 | if os.path.exists(destdir): | 633 | if os.path.exists(destdir): |
509 | bb.utils.prunedir(destdir) | 634 | bb.utils.prunedir(destdir) |
635 | if not ud.bareclone: | ||
636 | ud.unpack_tracer.unpack("git", destdir) | ||
510 | 637 | ||
511 | need_lfs = self._need_lfs(ud) | 638 | need_lfs = self._need_lfs(ud) |
512 | 639 | ||
@@ -516,13 +643,12 @@ class Git(FetchMethod): | |||
516 | source_found = False | 643 | source_found = False |
517 | source_error = [] | 644 | source_error = [] |
518 | 645 | ||
519 | if not source_found: | 646 | clonedir_is_up_to_date = not self.clonedir_need_update(ud, d) |
520 | clonedir_is_up_to_date = not self.clonedir_need_update(ud, d) | 647 | if clonedir_is_up_to_date: |
521 | if clonedir_is_up_to_date: | 648 | runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d) |
522 | runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d) | 649 | source_found = True |
523 | source_found = True | 650 | else: |
524 | else: | 651 | source_error.append("clone directory not available or not up to date: " + ud.clonedir) |
525 | source_error.append("clone directory not available or not up to date: " + ud.clonedir) | ||
526 | 652 | ||
527 | if not source_found: | 653 | if not source_found: |
528 | if ud.shallow: | 654 | if ud.shallow: |
@@ -546,9 +672,11 @@ class Git(FetchMethod): | |||
546 | raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl)) | 672 | raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl)) |
547 | elif not need_lfs: | 673 | elif not need_lfs: |
548 | bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) | 674 | bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) |
675 | else: | ||
676 | runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) | ||
549 | 677 | ||
550 | if not ud.nocheckout: | 678 | if not ud.nocheckout: |
551 | if subdir != "": | 679 | if subpath: |
552 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, | 680 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, |
553 | workdir=destdir) | 681 | workdir=destdir) |
554 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) | 682 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) |
@@ -597,6 +725,35 @@ class Git(FetchMethod): | |||
597 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) | 725 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) |
598 | return output.split()[0] != "0" | 726 | return output.split()[0] != "0" |
599 | 727 | ||
728 | def _lfs_objects_downloaded(self, ud, d, name, wd): | ||
729 | """ | ||
730 | Verifies whether the LFS objects for requested revisions have already been downloaded | ||
731 | """ | ||
732 | # Bail out early if this repository doesn't use LFS | ||
733 | if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd): | ||
734 | return True | ||
735 | |||
736 | # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file | ||
737 | # existence. | ||
738 | # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git | ||
739 | cmd = "%s lfs ls-files -l %s" \ | ||
740 | % (ud.basecmd, ud.revisions[name]) | ||
741 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() | ||
742 | # Do not do any further matching if no objects are managed by LFS | ||
743 | if not output: | ||
744 | return True | ||
745 | |||
746 | # Match all lines beginning with the hexadecimal OID | ||
747 | oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)") | ||
748 | for line in output.split("\n"): | ||
749 | oid = re.search(oid_regex, line) | ||
750 | if not oid: | ||
751 | bb.warn("git lfs ls-files output '%s' did not match expected format." % line) | ||
752 | if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))): | ||
753 | return False | ||
754 | |||
755 | return True | ||
756 | |||
600 | def _need_lfs(self, ud): | 757 | def _need_lfs(self, ud): |
601 | return ud.parm.get("lfs", "1") == "1" | 758 | return ud.parm.get("lfs", "1") == "1" |
602 | 759 | ||
@@ -605,13 +762,11 @@ class Git(FetchMethod): | |||
605 | Check if the repository has 'lfs' (large file) content | 762 | Check if the repository has 'lfs' (large file) content |
606 | """ | 763 | """ |
607 | 764 | ||
608 | if not ud.nobranch: | 765 | if ud.nobranch: |
609 | branchname = ud.branches[ud.names[0]] | 766 | # If no branch is specified, use the current git commit |
610 | else: | 767 | refname = self._build_revision(ud, d, ud.names[0]) |
611 | branchname = "master" | 768 | elif wd == ud.clonedir: |
612 | 769 | # The bare clonedir doesn't use the remote names; it has the branch immediately. | |
613 | # The bare clonedir doesn't use the remote names; it has the branch immediately. | ||
614 | if wd == ud.clonedir: | ||
615 | refname = ud.branches[ud.names[0]] | 770 | refname = ud.branches[ud.names[0]] |
616 | else: | 771 | else: |
617 | refname = "origin/%s" % ud.branches[ud.names[0]] | 772 | refname = "origin/%s" % ud.branches[ud.names[0]] |
@@ -654,7 +809,6 @@ class Git(FetchMethod): | |||
654 | Return a unique key for the url | 809 | Return a unique key for the url |
655 | """ | 810 | """ |
656 | # Collapse adjacent slashes | 811 | # Collapse adjacent slashes |
657 | slash_re = re.compile(r"/+") | ||
658 | return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] | 812 | return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] |
659 | 813 | ||
660 | def _lsremote(self, ud, d, search): | 814 | def _lsremote(self, ud, d, search): |
@@ -687,6 +841,12 @@ class Git(FetchMethod): | |||
687 | """ | 841 | """ |
688 | Compute the HEAD revision for the url | 842 | Compute the HEAD revision for the url |
689 | """ | 843 | """ |
844 | if not d.getVar("__BBSRCREV_SEEN"): | ||
845 | raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path)) | ||
846 | |||
847 | # Ensure we mark as not cached | ||
848 | bb.fetch2.mark_recipe_nocache(d) | ||
849 | |||
690 | output = self._lsremote(ud, d, "") | 850 | output = self._lsremote(ud, d, "") |
691 | # Tags of the form ^{} may not work, need to fallback to other form | 851 | # Tags of the form ^{} may not work, need to fallback to other form |
692 | if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: | 852 | if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: |
@@ -711,38 +871,42 @@ class Git(FetchMethod): | |||
711 | """ | 871 | """ |
712 | pupver = ('', '') | 872 | pupver = ('', '') |
713 | 873 | ||
714 | tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)") | ||
715 | try: | 874 | try: |
716 | output = self._lsremote(ud, d, "refs/tags/*") | 875 | output = self._lsremote(ud, d, "refs/tags/*") |
717 | except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e: | 876 | except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e: |
718 | bb.note("Could not list remote: %s" % str(e)) | 877 | bb.note("Could not list remote: %s" % str(e)) |
719 | return pupver | 878 | return pupver |
720 | 879 | ||
880 | rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)") | ||
881 | pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)") | ||
882 | nonrel_re = re.compile(r"(alpha|beta|rc|final)+") | ||
883 | |||
721 | verstring = "" | 884 | verstring = "" |
722 | revision = "" | ||
723 | for line in output.split("\n"): | 885 | for line in output.split("\n"): |
724 | if not line: | 886 | if not line: |
725 | break | 887 | break |
726 | 888 | ||
727 | tag_head = line.split("/")[-1] | 889 | m = rev_tag_re.match(line) |
890 | if not m: | ||
891 | continue | ||
892 | |||
893 | (revision, tag) = m.groups() | ||
894 | |||
728 | # Ignore non-released branches | 895 | # Ignore non-released branches |
729 | m = re.search(r"(alpha|beta|rc|final)+", tag_head) | 896 | if nonrel_re.search(tag): |
730 | if m: | ||
731 | continue | 897 | continue |
732 | 898 | ||
733 | # search for version in the line | 899 | # search for version in the line |
734 | tag = tagregex.search(tag_head) | 900 | m = pver_re.search(tag) |
735 | if tag is None: | 901 | if not m: |
736 | continue | 902 | continue |
737 | 903 | ||
738 | tag = tag.group('pver') | 904 | pver = m.group('pver').replace("_", ".") |
739 | tag = tag.replace("_", ".") | ||
740 | 905 | ||
741 | if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0: | 906 | if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0: |
742 | continue | 907 | continue |
743 | 908 | ||
744 | verstring = tag | 909 | verstring = pver |
745 | revision = line.split()[0] | ||
746 | pupver = (verstring, revision) | 910 | pupver = (verstring, revision) |
747 | 911 | ||
748 | return pupver | 912 | return pupver |
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index a4527bf364..f7f3af7212 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -88,9 +88,9 @@ class GitSM(Git): | |||
88 | subrevision[m] = module_hash.split()[2] | 88 | subrevision[m] = module_hash.split()[2] |
89 | 89 | ||
90 | # Convert relative to absolute uri based on parent uri | 90 | # Convert relative to absolute uri based on parent uri |
91 | if uris[m].startswith('..'): | 91 | if uris[m].startswith('..') or uris[m].startswith('./'): |
92 | newud = copy.copy(ud) | 92 | newud = copy.copy(ud) |
93 | newud.path = os.path.realpath(os.path.join(newud.path, uris[m])) | 93 | newud.path = os.path.normpath(os.path.join(newud.path, uris[m])) |
94 | uris[m] = Git._get_repo_url(self, newud) | 94 | uris[m] = Git._get_repo_url(self, newud) |
95 | 95 | ||
96 | for module in submodules: | 96 | for module in submodules: |
@@ -115,10 +115,21 @@ class GitSM(Git): | |||
115 | # This has to be a file reference | 115 | # This has to be a file reference |
116 | proto = "file" | 116 | proto = "file" |
117 | url = "gitsm://" + uris[module] | 117 | url = "gitsm://" + uris[module] |
118 | if url.endswith("{}{}".format(ud.host, ud.path)): | ||
119 | raise bb.fetch2.FetchError("Submodule refers to the parent repository. This will cause deadlock situation in current version of Bitbake." \ | ||
120 | "Consider using git fetcher instead.") | ||
118 | 121 | ||
119 | url += ';protocol=%s' % proto | 122 | url += ';protocol=%s' % proto |
120 | url += ";name=%s" % module | 123 | url += ";name=%s" % module |
121 | url += ";subpath=%s" % module | 124 | url += ";subpath=%s" % module |
125 | url += ";nobranch=1" | ||
126 | url += ";lfs=%s" % self._need_lfs(ud) | ||
127 | # Note that adding "user=" here to give credentials to the | ||
128 | # submodule is not supported. Since using SRC_URI to give git:// | ||
129 | # URL a password is not supported, one have to use one of the | ||
130 | # recommended way (eg. ~/.netrc or SSH config) which does specify | ||
131 | # the user (See comment in git.py). | ||
132 | # So, we will not take patches adding "user=" support here. | ||
122 | 133 | ||
123 | ld = d.createCopy() | 134 | ld = d.createCopy() |
124 | # Not necessary to set SRC_URI, since we're passing the URI to | 135 | # Not necessary to set SRC_URI, since we're passing the URI to |
@@ -140,16 +151,6 @@ class GitSM(Git): | |||
140 | if Git.need_update(self, ud, d): | 151 | if Git.need_update(self, ud, d): |
141 | return True | 152 | return True |
142 | 153 | ||
143 | try: | ||
144 | # Check for the nugget dropped by the download operation | ||
145 | known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \ | ||
146 | (ud.basecmd), d, workdir=ud.clonedir) | ||
147 | |||
148 | if ud.revisions[ud.names[0]] in known_srcrevs.split(): | ||
149 | return False | ||
150 | except bb.fetch2.FetchError: | ||
151 | pass | ||
152 | |||
153 | need_update_list = [] | 154 | need_update_list = [] |
154 | def need_update_submodule(ud, url, module, modpath, workdir, d): | 155 | def need_update_submodule(ud, url, module, modpath, workdir, d): |
155 | url += ";bareclone=1;nobranch=1" | 156 | url += ";bareclone=1;nobranch=1" |
@@ -172,13 +173,8 @@ class GitSM(Git): | |||
172 | shutil.rmtree(tmpdir) | 173 | shutil.rmtree(tmpdir) |
173 | else: | 174 | else: |
174 | self.process_submodules(ud, ud.clonedir, need_update_submodule, d) | 175 | self.process_submodules(ud, ud.clonedir, need_update_submodule, d) |
175 | if len(need_update_list) == 0: | ||
176 | # We already have the required commits of all submodules. Drop | ||
177 | # a nugget so we don't need to check again. | ||
178 | runfetchcmd("%s config --add bitbake.srcrev %s" % \ | ||
179 | (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) | ||
180 | 176 | ||
181 | if len(need_update_list) > 0: | 177 | if need_update_list: |
182 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) | 178 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) |
183 | return True | 179 | return True |
184 | 180 | ||
@@ -209,9 +205,6 @@ class GitSM(Git): | |||
209 | shutil.rmtree(tmpdir) | 205 | shutil.rmtree(tmpdir) |
210 | else: | 206 | else: |
211 | self.process_submodules(ud, ud.clonedir, download_submodule, d) | 207 | self.process_submodules(ud, ud.clonedir, download_submodule, d) |
212 | # Drop a nugget for the srcrev we've fetched (used by need_update) | ||
213 | runfetchcmd("%s config --add bitbake.srcrev %s" % \ | ||
214 | (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) | ||
215 | 208 | ||
216 | def unpack(self, ud, destdir, d): | 209 | def unpack(self, ud, destdir, d): |
217 | def unpack_submodules(ud, url, module, modpath, workdir, d): | 210 | def unpack_submodules(ud, url, module, modpath, workdir, d): |
@@ -225,6 +218,10 @@ class GitSM(Git): | |||
225 | 218 | ||
226 | try: | 219 | try: |
227 | newfetch = Fetch([url], d, cache=False) | 220 | newfetch = Fetch([url], d, cache=False) |
221 | # modpath is needed by unpack tracer to calculate submodule | ||
222 | # checkout dir | ||
223 | new_ud = newfetch.ud[url] | ||
224 | new_ud.modpath = modpath | ||
228 | newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) | 225 | newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) |
229 | except Exception as e: | 226 | except Exception as e: |
230 | logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) | 227 | logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) |
@@ -250,10 +247,12 @@ class GitSM(Git): | |||
250 | ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) | 247 | ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) |
251 | 248 | ||
252 | if not ud.bareclone and ret: | 249 | if not ud.bareclone and ret: |
253 | # All submodules should already be downloaded and configured in the tree. This simply sets | 250 | # All submodules should already be downloaded and configured in the tree. This simply |
254 | # up the configuration and checks out the files. The main project config should remain | 251 | # sets up the configuration and checks out the files. The main project config should |
255 | # unmodified, and no download from the internet should occur. | 252 | # remain unmodified, and no download from the internet should occur. As such, lfs smudge |
256 | runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) | 253 | # should also be skipped as these files were already smudged in the fetch stage if lfs |
254 | # was enabled. | ||
255 | runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) | ||
257 | 256 | ||
258 | def implicit_urldata(self, ud, d): | 257 | def implicit_urldata(self, ud, d): |
259 | import shutil, subprocess, tempfile | 258 | import shutil, subprocess, tempfile |
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py index 063e13008a..cbff8c490c 100644 --- a/bitbake/lib/bb/fetch2/hg.py +++ b/bitbake/lib/bb/fetch2/hg.py | |||
@@ -242,6 +242,7 @@ class Hg(FetchMethod): | |||
242 | revflag = "-r %s" % ud.revision | 242 | revflag = "-r %s" % ud.revision |
243 | subdir = ud.parm.get("destsuffix", ud.module) | 243 | subdir = ud.parm.get("destsuffix", ud.module) |
244 | codir = "%s/%s" % (destdir, subdir) | 244 | codir = "%s/%s" % (destdir, subdir) |
245 | ud.unpack_tracer.unpack("hg", codir) | ||
245 | 246 | ||
246 | scmdata = ud.parm.get("scmdata", "") | 247 | scmdata = ud.parm.get("scmdata", "") |
247 | if scmdata != "nokeep": | 248 | if scmdata != "nokeep": |
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index e7d1c8c58f..7d7668110e 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
@@ -41,9 +41,9 @@ class Local(FetchMethod): | |||
41 | """ | 41 | """ |
42 | Return the local filename of a given url assuming a successful fetch. | 42 | Return the local filename of a given url assuming a successful fetch. |
43 | """ | 43 | """ |
44 | return self.localpaths(urldata, d)[-1] | 44 | return self.localfile_searchpaths(urldata, d)[-1] |
45 | 45 | ||
46 | def localpaths(self, urldata, d): | 46 | def localfile_searchpaths(self, urldata, d): |
47 | """ | 47 | """ |
48 | Return the local filename of a given url assuming a successful fetch. | 48 | Return the local filename of a given url assuming a successful fetch. |
49 | """ | 49 | """ |
@@ -51,18 +51,14 @@ class Local(FetchMethod): | |||
51 | path = urldata.decodedurl | 51 | path = urldata.decodedurl |
52 | newpath = path | 52 | newpath = path |
53 | if path[0] == "/": | 53 | if path[0] == "/": |
54 | logger.debug2("Using absolute %s" % (path)) | ||
54 | return [path] | 55 | return [path] |
55 | filespath = d.getVar('FILESPATH') | 56 | filespath = d.getVar('FILESPATH') |
56 | if filespath: | 57 | if filespath: |
57 | logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) | 58 | logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) |
58 | newpath, hist = bb.utils.which(filespath, path, history=True) | 59 | newpath, hist = bb.utils.which(filespath, path, history=True) |
60 | logger.debug2("Using %s for %s" % (newpath, path)) | ||
59 | searched.extend(hist) | 61 | searched.extend(hist) |
60 | if not os.path.exists(newpath): | ||
61 | dldirfile = os.path.join(d.getVar("DL_DIR"), path) | ||
62 | logger.debug2("Defaulting to %s for %s" % (dldirfile, path)) | ||
63 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) | ||
64 | searched.append(dldirfile) | ||
65 | return searched | ||
66 | return searched | 62 | return searched |
67 | 63 | ||
68 | def need_update(self, ud, d): | 64 | def need_update(self, ud, d): |
@@ -78,9 +74,7 @@ class Local(FetchMethod): | |||
78 | filespath = d.getVar('FILESPATH') | 74 | filespath = d.getVar('FILESPATH') |
79 | if filespath: | 75 | if filespath: |
80 | locations = filespath.split(":") | 76 | locations = filespath.split(":") |
81 | locations.append(d.getVar("DL_DIR")) | 77 | msg = "Unable to find file " + urldata.url + " anywhere to download to " + urldata.localpath + ". The paths that were searched were:\n " + "\n ".join(locations) |
82 | |||
83 | msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) | ||
84 | raise FetchError(msg) | 78 | raise FetchError(msg) |
85 | 79 | ||
86 | return True | 80 | return True |
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py index 47898509ff..15f3f19bc8 100644 --- a/bitbake/lib/bb/fetch2/npm.py +++ b/bitbake/lib/bb/fetch2/npm.py | |||
@@ -44,17 +44,24 @@ def npm_package(package): | |||
44 | """Convert the npm package name to remove unsupported character""" | 44 | """Convert the npm package name to remove unsupported character""" |
45 | # Scoped package names (with the @) use the same naming convention | 45 | # Scoped package names (with the @) use the same naming convention |
46 | # as the 'npm pack' command. | 46 | # as the 'npm pack' command. |
47 | if package.startswith("@"): | 47 | name = re.sub("/", "-", package) |
48 | return re.sub("/", "-", package[1:]) | 48 | name = name.lower() |
49 | return package | 49 | name = re.sub(r"[^\-a-z0-9]", "", name) |
50 | name = name.strip("-") | ||
51 | return name | ||
52 | |||
50 | 53 | ||
51 | def npm_filename(package, version): | 54 | def npm_filename(package, version): |
52 | """Get the filename of a npm package""" | 55 | """Get the filename of a npm package""" |
53 | return npm_package(package) + "-" + version + ".tgz" | 56 | return npm_package(package) + "-" + version + ".tgz" |
54 | 57 | ||
55 | def npm_localfile(package, version): | 58 | def npm_localfile(package, version=None): |
56 | """Get the local filename of a npm package""" | 59 | """Get the local filename of a npm package""" |
57 | return os.path.join("npm2", npm_filename(package, version)) | 60 | if version is not None: |
61 | filename = npm_filename(package, version) | ||
62 | else: | ||
63 | filename = package | ||
64 | return os.path.join("npm2", filename) | ||
58 | 65 | ||
59 | def npm_integrity(integrity): | 66 | def npm_integrity(integrity): |
60 | """ | 67 | """ |
@@ -69,41 +76,52 @@ def npm_unpack(tarball, destdir, d): | |||
69 | bb.utils.mkdirhier(destdir) | 76 | bb.utils.mkdirhier(destdir) |
70 | cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball) | 77 | cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball) |
71 | cmd += " --no-same-owner" | 78 | cmd += " --no-same-owner" |
79 | cmd += " --delay-directory-restore" | ||
72 | cmd += " --strip-components=1" | 80 | cmd += " --strip-components=1" |
73 | runfetchcmd(cmd, d, workdir=destdir) | 81 | runfetchcmd(cmd, d, workdir=destdir) |
82 | runfetchcmd("chmod -R +X '%s'" % (destdir), d, quiet=True, workdir=destdir) | ||
74 | 83 | ||
75 | class NpmEnvironment(object): | 84 | class NpmEnvironment(object): |
76 | """ | 85 | """ |
77 | Using a npm config file seems more reliable than using cli arguments. | 86 | Using a npm config file seems more reliable than using cli arguments. |
78 | This class allows to create a controlled environment for npm commands. | 87 | This class allows to create a controlled environment for npm commands. |
79 | """ | 88 | """ |
80 | def __init__(self, d, configs=None): | 89 | def __init__(self, d, configs=[], npmrc=None): |
81 | self.d = d | 90 | self.d = d |
82 | self.configs = configs | 91 | |
92 | self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1) | ||
93 | for key, value in configs: | ||
94 | self.user_config.write("%s=%s\n" % (key, value)) | ||
95 | |||
96 | if npmrc: | ||
97 | self.global_config_name = npmrc | ||
98 | else: | ||
99 | self.global_config_name = "/dev/null" | ||
100 | |||
101 | def __del__(self): | ||
102 | if self.user_config: | ||
103 | self.user_config.close() | ||
83 | 104 | ||
84 | def run(self, cmd, args=None, configs=None, workdir=None): | 105 | def run(self, cmd, args=None, configs=None, workdir=None): |
85 | """Run npm command in a controlled environment""" | 106 | """Run npm command in a controlled environment""" |
86 | with tempfile.TemporaryDirectory() as tmpdir: | 107 | with tempfile.TemporaryDirectory() as tmpdir: |
87 | d = bb.data.createCopy(self.d) | 108 | d = bb.data.createCopy(self.d) |
109 | d.setVar("PATH", d.getVar("PATH")) # PATH might contain $HOME - evaluate it before patching | ||
88 | d.setVar("HOME", tmpdir) | 110 | d.setVar("HOME", tmpdir) |
89 | 111 | ||
90 | cfgfile = os.path.join(tmpdir, "npmrc") | ||
91 | |||
92 | if not workdir: | 112 | if not workdir: |
93 | workdir = tmpdir | 113 | workdir = tmpdir |
94 | 114 | ||
95 | def _run(cmd): | 115 | def _run(cmd): |
96 | cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd | 116 | cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config.name) + cmd |
97 | cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd | 117 | cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd |
98 | return runfetchcmd(cmd, d, workdir=workdir) | 118 | return runfetchcmd(cmd, d, workdir=workdir) |
99 | 119 | ||
100 | if self.configs: | ||
101 | for key, value in self.configs: | ||
102 | _run("npm config set %s %s" % (key, shlex.quote(value))) | ||
103 | |||
104 | if configs: | 120 | if configs: |
121 | bb.warn("Use of configs argument of NpmEnvironment.run() function" | ||
122 | " is deprecated. Please use args argument instead.") | ||
105 | for key, value in configs: | 123 | for key, value in configs: |
106 | _run("npm config set %s %s" % (key, shlex.quote(value))) | 124 | cmd += " --%s=%s" % (key, shlex.quote(value)) |
107 | 125 | ||
108 | if args: | 126 | if args: |
109 | for key, value in args: | 127 | for key, value in args: |
@@ -142,12 +160,12 @@ class Npm(FetchMethod): | |||
142 | raise ParameterError("Invalid 'version' parameter", ud.url) | 160 | raise ParameterError("Invalid 'version' parameter", ud.url) |
143 | 161 | ||
144 | # Extract the 'registry' part of the url | 162 | # Extract the 'registry' part of the url |
145 | ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0]) | 163 | ud.registry = re.sub(r"^npm://", "https://", ud.url.split(";")[0]) |
146 | 164 | ||
147 | # Using the 'downloadfilename' parameter as local filename | 165 | # Using the 'downloadfilename' parameter as local filename |
148 | # or the npm package name. | 166 | # or the npm package name. |
149 | if "downloadfilename" in ud.parm: | 167 | if "downloadfilename" in ud.parm: |
150 | ud.localfile = d.expand(ud.parm["downloadfilename"]) | 168 | ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"])) |
151 | else: | 169 | else: |
152 | ud.localfile = npm_localfile(ud.package, ud.version) | 170 | ud.localfile = npm_localfile(ud.package, ud.version) |
153 | 171 | ||
@@ -165,14 +183,14 @@ class Npm(FetchMethod): | |||
165 | 183 | ||
166 | def _resolve_proxy_url(self, ud, d): | 184 | def _resolve_proxy_url(self, ud, d): |
167 | def _npm_view(): | 185 | def _npm_view(): |
168 | configs = [] | 186 | args = [] |
169 | configs.append(("json", "true")) | 187 | args.append(("json", "true")) |
170 | configs.append(("registry", ud.registry)) | 188 | args.append(("registry", ud.registry)) |
171 | pkgver = shlex.quote(ud.package + "@" + ud.version) | 189 | pkgver = shlex.quote(ud.package + "@" + ud.version) |
172 | cmd = ud.basecmd + " view %s" % pkgver | 190 | cmd = ud.basecmd + " view %s" % pkgver |
173 | env = NpmEnvironment(d) | 191 | env = NpmEnvironment(d) |
174 | check_network_access(d, cmd, ud.registry) | 192 | check_network_access(d, cmd, ud.registry) |
175 | view_string = env.run(cmd, configs=configs) | 193 | view_string = env.run(cmd, args=args) |
176 | 194 | ||
177 | if not view_string: | 195 | if not view_string: |
178 | raise FetchError("Unavailable package %s" % pkgver, ud.url) | 196 | raise FetchError("Unavailable package %s" % pkgver, ud.url) |
@@ -280,6 +298,7 @@ class Npm(FetchMethod): | |||
280 | destsuffix = ud.parm.get("destsuffix", "npm") | 298 | destsuffix = ud.parm.get("destsuffix", "npm") |
281 | destdir = os.path.join(rootdir, destsuffix) | 299 | destdir = os.path.join(rootdir, destsuffix) |
282 | npm_unpack(ud.localpath, destdir, d) | 300 | npm_unpack(ud.localpath, destdir, d) |
301 | ud.unpack_tracer.unpack("npm", destdir) | ||
283 | 302 | ||
284 | def clean(self, ud, d): | 303 | def clean(self, ud, d): |
285 | """Clean any existing full or partial download""" | 304 | """Clean any existing full or partial download""" |
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py index 0c3511d8ab..ff5f8dc755 100644 --- a/bitbake/lib/bb/fetch2/npmsw.py +++ b/bitbake/lib/bb/fetch2/npmsw.py | |||
@@ -24,11 +24,14 @@ import bb | |||
24 | from bb.fetch2 import Fetch | 24 | from bb.fetch2 import Fetch |
25 | from bb.fetch2 import FetchMethod | 25 | from bb.fetch2 import FetchMethod |
26 | from bb.fetch2 import ParameterError | 26 | from bb.fetch2 import ParameterError |
27 | from bb.fetch2 import runfetchcmd | ||
27 | from bb.fetch2 import URI | 28 | from bb.fetch2 import URI |
28 | from bb.fetch2.npm import npm_integrity | 29 | from bb.fetch2.npm import npm_integrity |
29 | from bb.fetch2.npm import npm_localfile | 30 | from bb.fetch2.npm import npm_localfile |
30 | from bb.fetch2.npm import npm_unpack | 31 | from bb.fetch2.npm import npm_unpack |
31 | from bb.utils import is_semver | 32 | from bb.utils import is_semver |
33 | from bb.utils import lockfile | ||
34 | from bb.utils import unlockfile | ||
32 | 35 | ||
33 | def foreach_dependencies(shrinkwrap, callback=None, dev=False): | 36 | def foreach_dependencies(shrinkwrap, callback=None, dev=False): |
34 | """ | 37 | """ |
@@ -38,8 +41,9 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False): | |||
38 | with: | 41 | with: |
39 | name = the package name (string) | 42 | name = the package name (string) |
40 | params = the package parameters (dictionary) | 43 | params = the package parameters (dictionary) |
41 | deptree = the package dependency tree (array of strings) | 44 | destdir = the destination of the package (string) |
42 | """ | 45 | """ |
46 | # For handling old style dependencies entries in shinkwrap files | ||
43 | def _walk_deps(deps, deptree): | 47 | def _walk_deps(deps, deptree): |
44 | for name in deps: | 48 | for name in deps: |
45 | subtree = [*deptree, name] | 49 | subtree = [*deptree, name] |
@@ -49,9 +53,22 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False): | |||
49 | continue | 53 | continue |
50 | elif deps[name].get("bundled", False): | 54 | elif deps[name].get("bundled", False): |
51 | continue | 55 | continue |
52 | callback(name, deps[name], subtree) | 56 | destsubdirs = [os.path.join("node_modules", dep) for dep in subtree] |
53 | 57 | destsuffix = os.path.join(*destsubdirs) | |
54 | _walk_deps(shrinkwrap.get("dependencies", {}), []) | 58 | callback(name, deps[name], destsuffix) |
59 | |||
60 | # packages entry means new style shrinkwrap file, else use dependencies | ||
61 | packages = shrinkwrap.get("packages", None) | ||
62 | if packages is not None: | ||
63 | for package in packages: | ||
64 | if package != "": | ||
65 | name = package.split('node_modules/')[-1] | ||
66 | package_infos = packages.get(package, {}) | ||
67 | if dev == False and package_infos.get("dev", False): | ||
68 | continue | ||
69 | callback(name, package_infos, package) | ||
70 | else: | ||
71 | _walk_deps(shrinkwrap.get("dependencies", {}), []) | ||
55 | 72 | ||
56 | class NpmShrinkWrap(FetchMethod): | 73 | class NpmShrinkWrap(FetchMethod): |
57 | """Class to fetch all package from a shrinkwrap file""" | 74 | """Class to fetch all package from a shrinkwrap file""" |
@@ -72,19 +89,22 @@ class NpmShrinkWrap(FetchMethod): | |||
72 | # Resolve the dependencies | 89 | # Resolve the dependencies |
73 | ud.deps = [] | 90 | ud.deps = [] |
74 | 91 | ||
75 | def _resolve_dependency(name, params, deptree): | 92 | def _resolve_dependency(name, params, destsuffix): |
76 | url = None | 93 | url = None |
77 | localpath = None | 94 | localpath = None |
78 | extrapaths = [] | 95 | extrapaths = [] |
79 | destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] | 96 | unpack = True |
80 | destsuffix = os.path.join(*destsubdirs) | ||
81 | 97 | ||
82 | integrity = params.get("integrity", None) | 98 | integrity = params.get("integrity", None) |
83 | resolved = params.get("resolved", None) | 99 | resolved = params.get("resolved", None) |
84 | version = params.get("version", None) | 100 | version = params.get("version", None) |
85 | 101 | ||
86 | # Handle registry sources | 102 | # Handle registry sources |
87 | if is_semver(version) and resolved and integrity: | 103 | if is_semver(version) and integrity: |
104 | # Handle duplicate dependencies without url | ||
105 | if not resolved: | ||
106 | return | ||
107 | |||
88 | localfile = npm_localfile(name, version) | 108 | localfile = npm_localfile(name, version) |
89 | 109 | ||
90 | uri = URI(resolved) | 110 | uri = URI(resolved) |
@@ -109,7 +129,7 @@ class NpmShrinkWrap(FetchMethod): | |||
109 | 129 | ||
110 | # Handle http tarball sources | 130 | # Handle http tarball sources |
111 | elif version.startswith("http") and integrity: | 131 | elif version.startswith("http") and integrity: |
112 | localfile = os.path.join("npm2", os.path.basename(version)) | 132 | localfile = npm_localfile(os.path.basename(version)) |
113 | 133 | ||
114 | uri = URI(version) | 134 | uri = URI(version) |
115 | uri.params["downloadfilename"] = localfile | 135 | uri.params["downloadfilename"] = localfile |
@@ -121,8 +141,28 @@ class NpmShrinkWrap(FetchMethod): | |||
121 | 141 | ||
122 | localpath = os.path.join(d.getVar("DL_DIR"), localfile) | 142 | localpath = os.path.join(d.getVar("DL_DIR"), localfile) |
123 | 143 | ||
144 | # Handle local tarball and link sources | ||
145 | elif version.startswith("file"): | ||
146 | localpath = version[5:] | ||
147 | if not version.endswith(".tgz"): | ||
148 | unpack = False | ||
149 | |||
124 | # Handle git sources | 150 | # Handle git sources |
125 | elif version.startswith("git"): | 151 | elif version.startswith(("git", "bitbucket","gist")) or ( |
152 | not version.endswith((".tgz", ".tar", ".tar.gz")) | ||
153 | and not version.startswith((".", "@", "/")) | ||
154 | and "/" in version | ||
155 | ): | ||
156 | if version.startswith("github:"): | ||
157 | version = "git+https://github.com/" + version[len("github:"):] | ||
158 | elif version.startswith("gist:"): | ||
159 | version = "git+https://gist.github.com/" + version[len("gist:"):] | ||
160 | elif version.startswith("bitbucket:"): | ||
161 | version = "git+https://bitbucket.org/" + version[len("bitbucket:"):] | ||
162 | elif version.startswith("gitlab:"): | ||
163 | version = "git+https://gitlab.com/" + version[len("gitlab:"):] | ||
164 | elif not version.startswith(("git+","git:")): | ||
165 | version = "git+https://github.com/" + version | ||
126 | regex = re.compile(r""" | 166 | regex = re.compile(r""" |
127 | ^ | 167 | ^ |
128 | git\+ | 168 | git\+ |
@@ -148,15 +188,17 @@ class NpmShrinkWrap(FetchMethod): | |||
148 | 188 | ||
149 | url = str(uri) | 189 | url = str(uri) |
150 | 190 | ||
151 | # local tarball sources and local link sources are unsupported | ||
152 | else: | 191 | else: |
153 | raise ParameterError("Unsupported dependency: %s" % name, ud.url) | 192 | raise ParameterError("Unsupported dependency: %s" % name, ud.url) |
154 | 193 | ||
194 | # name is needed by unpack tracer for module mapping | ||
155 | ud.deps.append({ | 195 | ud.deps.append({ |
196 | "name": name, | ||
156 | "url": url, | 197 | "url": url, |
157 | "localpath": localpath, | 198 | "localpath": localpath, |
158 | "extrapaths": extrapaths, | 199 | "extrapaths": extrapaths, |
159 | "destsuffix": destsuffix, | 200 | "destsuffix": destsuffix, |
201 | "unpack": unpack, | ||
160 | }) | 202 | }) |
161 | 203 | ||
162 | try: | 204 | try: |
@@ -177,17 +219,23 @@ class NpmShrinkWrap(FetchMethod): | |||
177 | # This fetcher resolves multiple URIs from a shrinkwrap file and then | 219 | # This fetcher resolves multiple URIs from a shrinkwrap file and then |
178 | # forwards it to a proxy fetcher. The management of the donestamp file, | 220 | # forwards it to a proxy fetcher. The management of the donestamp file, |
179 | # the lockfile and the checksums are forwarded to the proxy fetcher. | 221 | # the lockfile and the checksums are forwarded to the proxy fetcher. |
180 | ud.proxy = Fetch([dep["url"] for dep in ud.deps], data) | 222 | shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]] |
223 | if shrinkwrap_urls: | ||
224 | ud.proxy = Fetch(shrinkwrap_urls, data) | ||
181 | ud.needdonestamp = False | 225 | ud.needdonestamp = False |
182 | 226 | ||
183 | @staticmethod | 227 | @staticmethod |
184 | def _foreach_proxy_method(ud, handle): | 228 | def _foreach_proxy_method(ud, handle): |
185 | returns = [] | 229 | returns = [] |
186 | for proxy_url in ud.proxy.urls: | 230 | #Check if there are dependencies before try to fetch them |
187 | proxy_ud = ud.proxy.ud[proxy_url] | 231 | if len(ud.deps) > 0: |
188 | proxy_d = ud.proxy.d | 232 | for proxy_url in ud.proxy.urls: |
189 | proxy_ud.setup_localpath(proxy_d) | 233 | proxy_ud = ud.proxy.ud[proxy_url] |
190 | returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) | 234 | proxy_d = ud.proxy.d |
235 | proxy_ud.setup_localpath(proxy_d) | ||
236 | lf = lockfile(proxy_ud.lockfile) | ||
237 | returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) | ||
238 | unlockfile(lf) | ||
191 | return returns | 239 | return returns |
192 | 240 | ||
193 | def verify_donestamp(self, ud, d): | 241 | def verify_donestamp(self, ud, d): |
@@ -224,6 +272,7 @@ class NpmShrinkWrap(FetchMethod): | |||
224 | destsuffix = ud.parm.get("destsuffix") | 272 | destsuffix = ud.parm.get("destsuffix") |
225 | if destsuffix: | 273 | if destsuffix: |
226 | destdir = os.path.join(rootdir, destsuffix) | 274 | destdir = os.path.join(rootdir, destsuffix) |
275 | ud.unpack_tracer.unpack("npm-shrinkwrap", destdir) | ||
227 | 276 | ||
228 | bb.utils.mkdirhier(destdir) | 277 | bb.utils.mkdirhier(destdir) |
229 | bb.utils.copyfile(ud.shrinkwrap_file, | 278 | bb.utils.copyfile(ud.shrinkwrap_file, |
@@ -237,7 +286,16 @@ class NpmShrinkWrap(FetchMethod): | |||
237 | 286 | ||
238 | for dep in manual: | 287 | for dep in manual: |
239 | depdestdir = os.path.join(destdir, dep["destsuffix"]) | 288 | depdestdir = os.path.join(destdir, dep["destsuffix"]) |
240 | npm_unpack(dep["localpath"], depdestdir, d) | 289 | if dep["url"]: |
290 | npm_unpack(dep["localpath"], depdestdir, d) | ||
291 | else: | ||
292 | depsrcdir= os.path.join(destdir, dep["localpath"]) | ||
293 | if dep["unpack"]: | ||
294 | npm_unpack(depsrcdir, depdestdir, d) | ||
295 | else: | ||
296 | bb.utils.mkdirhier(depdestdir) | ||
297 | cmd = 'cp -fpPRH "%s/." .' % (depsrcdir) | ||
298 | runfetchcmd(cmd, d, workdir=depdestdir) | ||
241 | 299 | ||
242 | def clean(self, ud, d): | 300 | def clean(self, ud, d): |
243 | """Clean any existing full or partial download""" | 301 | """Clean any existing full or partial download""" |
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py index d9ce44390c..495ac8a30a 100644 --- a/bitbake/lib/bb/fetch2/osc.py +++ b/bitbake/lib/bb/fetch2/osc.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | """ | 6 | """ |
@@ -9,6 +11,7 @@ Based on the svn "Fetch" implementation. | |||
9 | 11 | ||
10 | import logging | 12 | import logging |
11 | import os | 13 | import os |
14 | import re | ||
12 | import bb | 15 | import bb |
13 | from bb.fetch2 import FetchMethod | 16 | from bb.fetch2 import FetchMethod |
14 | from bb.fetch2 import FetchError | 17 | from bb.fetch2 import FetchError |
@@ -36,6 +39,7 @@ class Osc(FetchMethod): | |||
36 | # Create paths to osc checkouts | 39 | # Create paths to osc checkouts |
37 | oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc") | 40 | oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc") |
38 | relpath = self._strip_leading_slashes(ud.path) | 41 | relpath = self._strip_leading_slashes(ud.path) |
42 | ud.oscdir = oscdir | ||
39 | ud.pkgdir = os.path.join(oscdir, ud.host) | 43 | ud.pkgdir = os.path.join(oscdir, ud.host) |
40 | ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) | 44 | ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) |
41 | 45 | ||
@@ -43,13 +47,13 @@ class Osc(FetchMethod): | |||
43 | ud.revision = ud.parm['rev'] | 47 | ud.revision = ud.parm['rev'] |
44 | else: | 48 | else: |
45 | pv = d.getVar("PV", False) | 49 | pv = d.getVar("PV", False) |
46 | rev = bb.fetch2.srcrev_internal_helper(ud, d) | 50 | rev = bb.fetch2.srcrev_internal_helper(ud, d, '') |
47 | if rev: | 51 | if rev: |
48 | ud.revision = rev | 52 | ud.revision = rev |
49 | else: | 53 | else: |
50 | ud.revision = "" | 54 | ud.revision = "" |
51 | 55 | ||
52 | ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision)) | 56 | ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), relpath.replace('/', '.'), ud.revision)) |
53 | 57 | ||
54 | def _buildosccommand(self, ud, d, command): | 58 | def _buildosccommand(self, ud, d, command): |
55 | """ | 59 | """ |
@@ -59,26 +63,49 @@ class Osc(FetchMethod): | |||
59 | 63 | ||
60 | basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc" | 64 | basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc" |
61 | 65 | ||
62 | proto = ud.parm.get('protocol', 'ocs') | 66 | proto = ud.parm.get('protocol', 'https') |
63 | 67 | ||
64 | options = [] | 68 | options = [] |
65 | 69 | ||
66 | config = "-c %s" % self.generate_config(ud, d) | 70 | config = "-c %s" % self.generate_config(ud, d) |
67 | 71 | ||
68 | if ud.revision: | 72 | if getattr(ud, 'revision', ''): |
69 | options.append("-r %s" % ud.revision) | 73 | options.append("-r %s" % ud.revision) |
70 | 74 | ||
71 | coroot = self._strip_leading_slashes(ud.path) | 75 | coroot = self._strip_leading_slashes(ud.path) |
72 | 76 | ||
73 | if command == "fetch": | 77 | if command == "fetch": |
74 | osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) | 78 | osccmd = "%s %s -A %s://%s co %s/%s %s" % (basecmd, config, proto, ud.host, coroot, ud.module, " ".join(options)) |
75 | elif command == "update": | 79 | elif command == "update": |
76 | osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) | 80 | osccmd = "%s %s -A %s://%s up %s" % (basecmd, config, proto, ud.host, " ".join(options)) |
81 | elif command == "api_source": | ||
82 | osccmd = "%s %s -A %s://%s api source/%s/%s" % (basecmd, config, proto, ud.host, coroot, ud.module) | ||
77 | else: | 83 | else: |
78 | raise FetchError("Invalid osc command %s" % command, ud.url) | 84 | raise FetchError("Invalid osc command %s" % command, ud.url) |
79 | 85 | ||
80 | return osccmd | 86 | return osccmd |
81 | 87 | ||
88 | def _latest_revision(self, ud, d, name): | ||
89 | """ | ||
90 | Fetch latest revision for the given package | ||
91 | """ | ||
92 | api_source_cmd = self._buildosccommand(ud, d, "api_source") | ||
93 | |||
94 | output = runfetchcmd(api_source_cmd, d) | ||
95 | match = re.match(r'<directory ?.* rev="(\d+)".*>', output) | ||
96 | if match is None: | ||
97 | raise FetchError("Unable to parse osc response", ud.url) | ||
98 | return match.groups()[0] | ||
99 | |||
100 | def _revision_key(self, ud, d, name): | ||
101 | """ | ||
102 | Return a unique key for the url | ||
103 | """ | ||
104 | # Collapse adjacent slashes | ||
105 | slash_re = re.compile(r"/+") | ||
106 | rev = getattr(ud, 'revision', "latest") | ||
107 | return "osc:%s%s.%s.%s" % (ud.host, slash_re.sub(".", ud.path), name, rev) | ||
108 | |||
82 | def download(self, ud, d): | 109 | def download(self, ud, d): |
83 | """ | 110 | """ |
84 | Fetch url | 111 | Fetch url |
@@ -86,7 +113,7 @@ class Osc(FetchMethod): | |||
86 | 113 | ||
87 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") | 114 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
88 | 115 | ||
89 | if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): | 116 | if os.access(ud.moddir, os.R_OK): |
90 | oscupdatecmd = self._buildosccommand(ud, d, "update") | 117 | oscupdatecmd = self._buildosccommand(ud, d, "update") |
91 | logger.info("Update "+ ud.url) | 118 | logger.info("Update "+ ud.url) |
92 | # update sources there | 119 | # update sources there |
@@ -114,20 +141,23 @@ class Osc(FetchMethod): | |||
114 | Generate a .oscrc to be used for this run. | 141 | Generate a .oscrc to be used for this run. |
115 | """ | 142 | """ |
116 | 143 | ||
117 | config_path = os.path.join(d.getVar('OSCDIR'), "oscrc") | 144 | config_path = os.path.join(ud.oscdir, "oscrc") |
145 | if not os.path.exists(ud.oscdir): | ||
146 | bb.utils.mkdirhier(ud.oscdir) | ||
147 | |||
118 | if (os.path.exists(config_path)): | 148 | if (os.path.exists(config_path)): |
119 | os.remove(config_path) | 149 | os.remove(config_path) |
120 | 150 | ||
121 | f = open(config_path, 'w') | 151 | f = open(config_path, 'w') |
152 | proto = ud.parm.get('protocol', 'https') | ||
122 | f.write("[general]\n") | 153 | f.write("[general]\n") |
123 | f.write("apisrv = %s\n" % ud.host) | 154 | f.write("apiurl = %s://%s\n" % (proto, ud.host)) |
124 | f.write("scheme = http\n") | ||
125 | f.write("su-wrapper = su -c\n") | 155 | f.write("su-wrapper = su -c\n") |
126 | f.write("build-root = %s\n" % d.getVar('WORKDIR')) | 156 | f.write("build-root = %s\n" % d.getVar('WORKDIR')) |
127 | f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) | 157 | f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) |
128 | f.write("extra-pkgs = gzip\n") | 158 | f.write("extra-pkgs = gzip\n") |
129 | f.write("\n") | 159 | f.write("\n") |
130 | f.write("[%s]\n" % ud.host) | 160 | f.write("[%s://%s]\n" % (proto, ud.host)) |
131 | f.write("user = %s\n" % ud.parm["user"]) | 161 | f.write("user = %s\n" % ud.parm["user"]) |
132 | f.write("pass = %s\n" % ud.parm["pswd"]) | 162 | f.write("pass = %s\n" % ud.parm["pswd"]) |
133 | f.close() | 163 | f.close() |
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index e2a41a4a12..3b6fa4b1ec 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
@@ -134,7 +134,7 @@ class Perforce(FetchMethod): | |||
134 | 134 | ||
135 | ud.setup_revisions(d) | 135 | ud.setup_revisions(d) |
136 | 136 | ||
137 | ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision)) | 137 | ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision)) |
138 | 138 | ||
139 | def _buildp4command(self, ud, d, command, depot_filename=None): | 139 | def _buildp4command(self, ud, d, command, depot_filename=None): |
140 | """ | 140 | """ |
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py index ffca73c8e4..6b8ffd5359 100644 --- a/bitbake/lib/bb/fetch2/s3.py +++ b/bitbake/lib/bb/fetch2/s3.py | |||
@@ -18,10 +18,47 @@ The aws tool must be correctly installed and configured prior to use. | |||
18 | import os | 18 | import os |
19 | import bb | 19 | import bb |
20 | import urllib.request, urllib.parse, urllib.error | 20 | import urllib.request, urllib.parse, urllib.error |
21 | import re | ||
21 | from bb.fetch2 import FetchMethod | 22 | from bb.fetch2 import FetchMethod |
22 | from bb.fetch2 import FetchError | 23 | from bb.fetch2 import FetchError |
23 | from bb.fetch2 import runfetchcmd | 24 | from bb.fetch2 import runfetchcmd |
24 | 25 | ||
26 | def convertToBytes(value, unit): | ||
27 | value = float(value) | ||
28 | if (unit == "KiB"): | ||
29 | value = value*1024.0; | ||
30 | elif (unit == "MiB"): | ||
31 | value = value*1024.0*1024.0; | ||
32 | elif (unit == "GiB"): | ||
33 | value = value*1024.0*1024.0*1024.0; | ||
34 | return value | ||
35 | |||
36 | class S3ProgressHandler(bb.progress.LineFilterProgressHandler): | ||
37 | """ | ||
38 | Extract progress information from s3 cp output, e.g.: | ||
39 | Completed 5.1 KiB/8.8 GiB (12.0 MiB/s) with 1 file(s) remaining | ||
40 | """ | ||
41 | def __init__(self, d): | ||
42 | super(S3ProgressHandler, self).__init__(d) | ||
43 | # Send an initial progress event so the bar gets shown | ||
44 | self._fire_progress(0) | ||
45 | |||
46 | def writeline(self, line): | ||
47 | percs = re.findall(r'^Completed (\d+.{0,1}\d*) (\w+)\/(\d+.{0,1}\d*) (\w+) (\(.+\)) with\s+', line) | ||
48 | if percs: | ||
49 | completed = (percs[-1][0]) | ||
50 | completedUnit = (percs[-1][1]) | ||
51 | total = (percs[-1][2]) | ||
52 | totalUnit = (percs[-1][3]) | ||
53 | completed = convertToBytes(completed, completedUnit) | ||
54 | total = convertToBytes(total, totalUnit) | ||
55 | progress = (completed/total)*100.0 | ||
56 | rate = percs[-1][4] | ||
57 | self.update(progress, rate) | ||
58 | return False | ||
59 | return True | ||
60 | |||
61 | |||
25 | class S3(FetchMethod): | 62 | class S3(FetchMethod): |
26 | """Class to fetch urls via 'aws s3'""" | 63 | """Class to fetch urls via 'aws s3'""" |
27 | 64 | ||
@@ -52,7 +89,9 @@ class S3(FetchMethod): | |||
52 | 89 | ||
53 | cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) | 90 | cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) |
54 | bb.fetch2.check_network_access(d, cmd, ud.url) | 91 | bb.fetch2.check_network_access(d, cmd, ud.url) |
55 | runfetchcmd(cmd, d) | 92 | |
93 | progresshandler = S3ProgressHandler(d) | ||
94 | runfetchcmd(cmd, d, False, log=progresshandler) | ||
56 | 95 | ||
57 | # Additional sanity checks copied from the wget class (although there | 96 | # Additional sanity checks copied from the wget class (although there |
58 | # are no known issues which mean these are required, treat the aws cli | 97 | # are no known issues which mean these are required, treat the aws cli |
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py index f87f292e5d..7884cce949 100644 --- a/bitbake/lib/bb/fetch2/sftp.py +++ b/bitbake/lib/bb/fetch2/sftp.py | |||
@@ -103,7 +103,7 @@ class SFTP(FetchMethod): | |||
103 | if path[:3] == '/~/': | 103 | if path[:3] == '/~/': |
104 | path = path[3:] | 104 | path = path[3:] |
105 | 105 | ||
106 | remote = '%s%s:%s' % (user, urlo.hostname, path) | 106 | remote = '"%s%s:%s"' % (user, urlo.hostname, path) |
107 | 107 | ||
108 | cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) | 108 | cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) |
109 | 109 | ||
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py index 2c8557e1f8..0cbb2a6f25 100644 --- a/bitbake/lib/bb/fetch2/ssh.py +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
@@ -32,6 +32,7 @@ IETF secsh internet draft: | |||
32 | 32 | ||
33 | import re, os | 33 | import re, os |
34 | from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd | 34 | from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd |
35 | import urllib | ||
35 | 36 | ||
36 | 37 | ||
37 | __pattern__ = re.compile(r''' | 38 | __pattern__ = re.compile(r''' |
@@ -40,9 +41,9 @@ __pattern__ = re.compile(r''' | |||
40 | ( # Optional username/password block | 41 | ( # Optional username/password block |
41 | (?P<user>\S+) # username | 42 | (?P<user>\S+) # username |
42 | (:(?P<pass>\S+))? # colon followed by the password (optional) | 43 | (:(?P<pass>\S+))? # colon followed by the password (optional) |
43 | )? | ||
44 | (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) | 44 | (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) |
45 | @ | 45 | @ |
46 | )? | ||
46 | (?P<host>\S+?) # non-greedy match of the host | 47 | (?P<host>\S+?) # non-greedy match of the host |
47 | (:(?P<port>[0-9]+))? # colon followed by the port (optional) | 48 | (:(?P<port>[0-9]+))? # colon followed by the port (optional) |
48 | / | 49 | / |
@@ -70,6 +71,7 @@ class SSH(FetchMethod): | |||
70 | "git:// prefix with protocol=ssh", urldata.url) | 71 | "git:// prefix with protocol=ssh", urldata.url) |
71 | m = __pattern__.match(urldata.url) | 72 | m = __pattern__.match(urldata.url) |
72 | path = m.group('path') | 73 | path = m.group('path') |
74 | path = urllib.parse.unquote(path) | ||
73 | host = m.group('host') | 75 | host = m.group('host') |
74 | urldata.localpath = os.path.join(d.getVar('DL_DIR'), | 76 | urldata.localpath = os.path.join(d.getVar('DL_DIR'), |
75 | os.path.basename(os.path.normpath(path))) | 77 | os.path.basename(os.path.normpath(path))) |
@@ -96,6 +98,11 @@ class SSH(FetchMethod): | |||
96 | fr += '@%s' % host | 98 | fr += '@%s' % host |
97 | else: | 99 | else: |
98 | fr = host | 100 | fr = host |
101 | |||
102 | if path[0] != '~': | ||
103 | path = '/%s' % path | ||
104 | path = urllib.parse.unquote(path) | ||
105 | |||
99 | fr += ':%s' % path | 106 | fr += ':%s' % path |
100 | 107 | ||
101 | cmd = 'scp -B -r %s %s %s/' % ( | 108 | cmd = 'scp -B -r %s %s %s/' % ( |
@@ -108,3 +115,41 @@ class SSH(FetchMethod): | |||
108 | 115 | ||
109 | runfetchcmd(cmd, d) | 116 | runfetchcmd(cmd, d) |
110 | 117 | ||
118 | def checkstatus(self, fetch, urldata, d): | ||
119 | """ | ||
120 | Check the status of the url | ||
121 | """ | ||
122 | m = __pattern__.match(urldata.url) | ||
123 | path = m.group('path') | ||
124 | host = m.group('host') | ||
125 | port = m.group('port') | ||
126 | user = m.group('user') | ||
127 | password = m.group('pass') | ||
128 | |||
129 | if port: | ||
130 | portarg = '-P %s' % port | ||
131 | else: | ||
132 | portarg = '' | ||
133 | |||
134 | if user: | ||
135 | fr = user | ||
136 | if password: | ||
137 | fr += ':%s' % password | ||
138 | fr += '@%s' % host | ||
139 | else: | ||
140 | fr = host | ||
141 | |||
142 | if path[0] != '~': | ||
143 | path = '/%s' % path | ||
144 | path = urllib.parse.unquote(path) | ||
145 | |||
146 | cmd = 'ssh -o BatchMode=true %s %s [ -f %s ]' % ( | ||
147 | portarg, | ||
148 | fr, | ||
149 | path | ||
150 | ) | ||
151 | |||
152 | check_network_access(d, cmd, urldata.url) | ||
153 | runfetchcmd(cmd, d) | ||
154 | |||
155 | return True | ||
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index 8856ef1c62..d40e4d2909 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
@@ -57,7 +57,12 @@ class Svn(FetchMethod): | |||
57 | if 'rev' in ud.parm: | 57 | if 'rev' in ud.parm: |
58 | ud.revision = ud.parm['rev'] | 58 | ud.revision = ud.parm['rev'] |
59 | 59 | ||
60 | ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision)) | 60 | # Whether to use the @REV peg-revision syntax in the svn command or not |
61 | ud.pegrevision = True | ||
62 | if 'nopegrevision' in ud.parm: | ||
63 | ud.pegrevision = False | ||
64 | |||
65 | ud.localfile = d.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ["0", "1"][ud.pegrevision])) | ||
61 | 66 | ||
62 | def _buildsvncommand(self, ud, d, command): | 67 | def _buildsvncommand(self, ud, d, command): |
63 | """ | 68 | """ |
@@ -86,7 +91,7 @@ class Svn(FetchMethod): | |||
86 | if command == "info": | 91 | if command == "info": |
87 | svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) | 92 | svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) |
88 | elif command == "log1": | 93 | elif command == "log1": |
89 | svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) | 94 | svncmd = "%s log --limit 1 --quiet %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) |
90 | else: | 95 | else: |
91 | suffix = "" | 96 | suffix = "" |
92 | 97 | ||
@@ -98,7 +103,8 @@ class Svn(FetchMethod): | |||
98 | 103 | ||
99 | if ud.revision: | 104 | if ud.revision: |
100 | options.append("-r %s" % ud.revision) | 105 | options.append("-r %s" % ud.revision) |
101 | suffix = "@%s" % (ud.revision) | 106 | if ud.pegrevision: |
107 | suffix = "@%s" % (ud.revision) | ||
102 | 108 | ||
103 | if command == "fetch": | 109 | if command == "fetch": |
104 | transportuser = ud.parm.get("transportuser", "") | 110 | transportuser = ud.parm.get("transportuser", "") |
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index 6d82f3af07..fbfa6938ac 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod | |||
26 | from bb.fetch2 import FetchError | 26 | from bb.fetch2 import FetchError |
27 | from bb.fetch2 import logger | 27 | from bb.fetch2 import logger |
28 | from bb.fetch2 import runfetchcmd | 28 | from bb.fetch2 import runfetchcmd |
29 | from bb.utils import export_proxies | ||
30 | from bs4 import BeautifulSoup | 29 | from bs4 import BeautifulSoup |
31 | from bs4 import SoupStrainer | 30 | from bs4 import SoupStrainer |
32 | 31 | ||
@@ -52,18 +51,24 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler): | |||
52 | 51 | ||
53 | 52 | ||
54 | class Wget(FetchMethod): | 53 | class Wget(FetchMethod): |
54 | """Class to fetch urls via 'wget'""" | ||
55 | 55 | ||
56 | # CDNs like CloudFlare may do a 'browser integrity test' which can fail | 56 | # CDNs like CloudFlare may do a 'browser integrity test' which can fail |
57 | # with the standard wget/urllib User-Agent, so pretend to be a modern | 57 | # with the standard wget/urllib User-Agent, so pretend to be a modern |
58 | # browser. | 58 | # browser. |
59 | user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" | 59 | user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" |
60 | 60 | ||
61 | """Class to fetch urls via 'wget'""" | 61 | def check_certs(self, d): |
62 | """ | ||
63 | Should certificates be checked? | ||
64 | """ | ||
65 | return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0" | ||
66 | |||
62 | def supports(self, ud, d): | 67 | def supports(self, ud, d): |
63 | """ | 68 | """ |
64 | Check to see if a given url can be fetched with wget. | 69 | Check to see if a given url can be fetched with wget. |
65 | """ | 70 | """ |
66 | return ud.type in ['http', 'https', 'ftp'] | 71 | return ud.type in ['http', 'https', 'ftp', 'ftps'] |
67 | 72 | ||
68 | def recommends_checksum(self, urldata): | 73 | def recommends_checksum(self, urldata): |
69 | return True | 74 | return True |
@@ -82,7 +87,13 @@ class Wget(FetchMethod): | |||
82 | if not ud.localfile: | 87 | if not ud.localfile: |
83 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) | 88 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) |
84 | 89 | ||
85 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" | 90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" |
91 | |||
92 | if ud.type == 'ftp' or ud.type == 'ftps': | ||
93 | self.basecmd += " --passive-ftp" | ||
94 | |||
95 | if not self.check_certs(d): | ||
96 | self.basecmd += " --no-check-certificate" | ||
86 | 97 | ||
87 | def _runwget(self, ud, d, command, quiet, workdir=None): | 98 | def _runwget(self, ud, d, command, quiet, workdir=None): |
88 | 99 | ||
@@ -97,13 +108,22 @@ class Wget(FetchMethod): | |||
97 | 108 | ||
98 | fetchcmd = self.basecmd | 109 | fetchcmd = self.basecmd |
99 | 110 | ||
100 | if 'downloadfilename' in ud.parm: | 111 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" |
101 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) | 112 | bb.utils.mkdirhier(os.path.dirname(localpath)) |
102 | bb.utils.mkdirhier(os.path.dirname(localpath)) | 113 | fetchcmd += " -O %s" % shlex.quote(localpath) |
103 | fetchcmd += " -O %s" % shlex.quote(localpath) | ||
104 | 114 | ||
105 | if ud.user and ud.pswd: | 115 | if ud.user and ud.pswd: |
106 | fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) | 116 | fetchcmd += " --auth-no-challenge" |
117 | if ud.parm.get("redirectauth", "1") == "1": | ||
118 | # An undocumented feature of wget is that if the | ||
119 | # username/password are specified on the URI, wget will only | ||
120 | # send the Authorization header to the first host and not to | ||
121 | # any hosts that it is redirected to. With the increasing | ||
122 | # usage of temporary AWS URLs, this difference now matters as | ||
123 | # AWS will reject any request that has authentication both in | ||
124 | # the query parameters (from the redirect) and in the | ||
125 | # Authorization header. | ||
126 | fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd) | ||
107 | 127 | ||
108 | uri = ud.url.split(";")[0] | 128 | uri = ud.url.split(";")[0] |
109 | if os.path.exists(ud.localpath): | 129 | if os.path.exists(ud.localpath): |
@@ -114,6 +134,15 @@ class Wget(FetchMethod): | |||
114 | 134 | ||
115 | self._runwget(ud, d, fetchcmd, False) | 135 | self._runwget(ud, d, fetchcmd, False) |
116 | 136 | ||
137 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the | ||
138 | # original file, which might be a race (imagine two recipes referencing the same | ||
139 | # source, one with an incorrect checksum) | ||
140 | bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False) | ||
141 | |||
142 | # Remove the ".tmp" and move the file into position atomically | ||
143 | # Our lock prevents multiple writers but mirroring code may grab incomplete files | ||
144 | os.rename(localpath, localpath[:-4]) | ||
145 | |||
117 | # Sanity check since wget can pretend it succeed when it didn't | 146 | # Sanity check since wget can pretend it succeed when it didn't |
118 | # Also, this used to happen if sourceforge sent us to the mirror page | 147 | # Also, this used to happen if sourceforge sent us to the mirror page |
119 | if not os.path.exists(ud.localpath): | 148 | if not os.path.exists(ud.localpath): |
@@ -209,7 +238,7 @@ class Wget(FetchMethod): | |||
209 | # We let the request fail and expect it to be | 238 | # We let the request fail and expect it to be |
210 | # tried once more ("try_again" in check_status()), | 239 | # tried once more ("try_again" in check_status()), |
211 | # with the dead connection removed from the cache. | 240 | # with the dead connection removed from the cache. |
212 | # If it still fails, we give up, which can happend for bad | 241 | # If it still fails, we give up, which can happen for bad |
213 | # HTTP proxy settings. | 242 | # HTTP proxy settings. |
214 | fetch.connection_cache.remove_connection(h.host, h.port) | 243 | fetch.connection_cache.remove_connection(h.host, h.port) |
215 | raise urllib.error.URLError(err) | 244 | raise urllib.error.URLError(err) |
@@ -282,64 +311,76 @@ class Wget(FetchMethod): | |||
282 | newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) | 311 | newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) |
283 | newreq.get_method = req.get_method | 312 | newreq.get_method = req.get_method |
284 | return newreq | 313 | return newreq |
285 | exported_proxies = export_proxies(d) | ||
286 | |||
287 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] | ||
288 | if exported_proxies: | ||
289 | handlers.append(urllib.request.ProxyHandler()) | ||
290 | handlers.append(CacheHTTPHandler()) | ||
291 | # Since Python 2.7.9 ssl cert validation is enabled by default | ||
292 | # see PEP-0476, this causes verification errors on some https servers | ||
293 | # so disable by default. | ||
294 | import ssl | ||
295 | if hasattr(ssl, '_create_unverified_context'): | ||
296 | handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) | ||
297 | opener = urllib.request.build_opener(*handlers) | ||
298 | |||
299 | try: | ||
300 | uri = ud.url.split(";")[0] | ||
301 | r = urllib.request.Request(uri) | ||
302 | r.get_method = lambda: "HEAD" | ||
303 | # Some servers (FusionForge, as used on Alioth) require that the | ||
304 | # optional Accept header is set. | ||
305 | r.add_header("Accept", "*/*") | ||
306 | r.add_header("User-Agent", self.user_agent) | ||
307 | def add_basic_auth(login_str, request): | ||
308 | '''Adds Basic auth to http request, pass in login:password as string''' | ||
309 | import base64 | ||
310 | encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") | ||
311 | authheader = "Basic %s" % encodeuser | ||
312 | r.add_header("Authorization", authheader) | ||
313 | |||
314 | if ud.user and ud.pswd: | ||
315 | add_basic_auth(ud.user + ':' + ud.pswd, r) | ||
316 | 314 | ||
317 | try: | 315 | # We need to update the environment here as both the proxy and HTTPS |
318 | import netrc | 316 | # handlers need variables set. The proxy needs http_proxy and friends to |
319 | n = netrc.netrc() | 317 | # be set, and HTTPSHandler ends up calling into openssl to load the |
320 | login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname) | 318 | # certificates. In buildtools configurations this will be looking at the |
321 | add_basic_auth("%s:%s" % (login, password), r) | 319 | # wrong place for certificates by default: we set SSL_CERT_FILE to the |
322 | except (TypeError, ImportError, IOError, netrc.NetrcParseError): | 320 | # right location in the buildtools environment script but as BitBake |
323 | pass | 321 | # prunes prunes the environment this is lost. When binaries are executed |
324 | 322 | # runfetchcmd ensures these values are in the environment, but this is | |
325 | with opener.open(r) as response: | 323 | # pure Python so we need to update the environment. |
326 | pass | 324 | # |
327 | except urllib.error.URLError as e: | 325 | # Avoid tramping the environment too much by using bb.utils.environment |
328 | if try_again: | 326 | # to scope the changes to the build_opener request, which is when the |
329 | logger.debug2("checkstatus: trying again") | 327 | # environment lookups happen. |
330 | return self.checkstatus(fetch, ud, d, False) | 328 | newenv = bb.fetch2.get_fetcher_environment(d) |
329 | |||
330 | with bb.utils.environment(**newenv): | ||
331 | import ssl | ||
332 | |||
333 | if self.check_certs(d): | ||
334 | context = ssl.create_default_context() | ||
331 | else: | 335 | else: |
332 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 336 | context = ssl._create_unverified_context() |
333 | logger.debug2("checkstatus() urlopen failed: %s" % e) | 337 | |
334 | return False | 338 | handlers = [FixedHTTPRedirectHandler, |
335 | except ConnectionResetError as e: | 339 | HTTPMethodFallback, |
336 | if try_again: | 340 | urllib.request.ProxyHandler(), |
337 | logger.debug2("checkstatus: trying again") | 341 | CacheHTTPHandler(), |
338 | return self.checkstatus(fetch, ud, d, False) | 342 | urllib.request.HTTPSHandler(context=context)] |
339 | else: | 343 | opener = urllib.request.build_opener(*handlers) |
340 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 344 | |
341 | logger.debug2("checkstatus() urlopen failed: %s" % e) | 345 | try: |
342 | return False | 346 | uri_base = ud.url.split(";")[0] |
347 | uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path) | ||
348 | r = urllib.request.Request(uri) | ||
349 | r.get_method = lambda: "HEAD" | ||
350 | # Some servers (FusionForge, as used on Alioth) require that the | ||
351 | # optional Accept header is set. | ||
352 | r.add_header("Accept", "*/*") | ||
353 | r.add_header("User-Agent", self.user_agent) | ||
354 | def add_basic_auth(login_str, request): | ||
355 | '''Adds Basic auth to http request, pass in login:password as string''' | ||
356 | import base64 | ||
357 | encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") | ||
358 | authheader = "Basic %s" % encodeuser | ||
359 | r.add_header("Authorization", authheader) | ||
360 | |||
361 | if ud.user and ud.pswd: | ||
362 | add_basic_auth(ud.user + ':' + ud.pswd, r) | ||
363 | |||
364 | try: | ||
365 | import netrc | ||
366 | auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname) | ||
367 | if auth_data: | ||
368 | login, _, password = auth_data | ||
369 | add_basic_auth("%s:%s" % (login, password), r) | ||
370 | except (FileNotFoundError, netrc.NetrcParseError): | ||
371 | pass | ||
372 | |||
373 | with opener.open(r, timeout=30) as response: | ||
374 | pass | ||
375 | except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: | ||
376 | if try_again: | ||
377 | logger.debug2("checkstatus: trying again") | ||
378 | return self.checkstatus(fetch, ud, d, False) | ||
379 | else: | ||
380 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | ||
381 | logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e)) | ||
382 | return False | ||
383 | |||
343 | return True | 384 | return True |
344 | 385 | ||
345 | def _parse_path(self, regex, s): | 386 | def _parse_path(self, regex, s): |
@@ -472,7 +513,7 @@ class Wget(FetchMethod): | |||
472 | version_dir = ['', '', ''] | 513 | version_dir = ['', '', ''] |
473 | version = ['', '', ''] | 514 | version = ['', '', ''] |
474 | 515 | ||
475 | dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))") | 516 | dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))") |
476 | s = dirver_regex.search(dirver) | 517 | s = dirver_regex.search(dirver) |
477 | if s: | 518 | if s: |
478 | version_dir[1] = s.group('ver') | 519 | version_dir[1] = s.group('ver') |
@@ -548,7 +589,7 @@ class Wget(FetchMethod): | |||
548 | 589 | ||
549 | # src.rpm extension was added only for rpm package. Can be removed if the rpm | 590 | # src.rpm extension was added only for rpm package. Can be removed if the rpm |
550 | # packaged will always be considered as having to be manually upgraded | 591 | # packaged will always be considered as having to be manually upgraded |
551 | psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" | 592 | psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)" |
552 | 593 | ||
553 | # match name, version and archive type of a package | 594 | # match name, version and archive type of a package |
554 | package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)" | 595 | package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)" |
@@ -599,10 +640,10 @@ class Wget(FetchMethod): | |||
599 | # search for version matches on folders inside the path, like: | 640 | # search for version matches on folders inside the path, like: |
600 | # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz | 641 | # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz |
601 | dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") | 642 | dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") |
602 | m = dirver_regex.search(path) | 643 | m = dirver_regex.findall(path) |
603 | if m: | 644 | if m: |
604 | pn = d.getVar('PN') | 645 | pn = d.getVar('PN') |
605 | dirver = m.group('dirver') | 646 | dirver = m[-1][0] |
606 | 647 | ||
607 | dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn))) | 648 | dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn))) |
608 | if not dirver_pn_regex.search(dirver): | 649 | if not dirver_pn_regex.search(dirver): |
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py index 06bad495ac..bca8ebfa09 100755 --- a/bitbake/lib/bb/main.py +++ b/bitbake/lib/bb/main.py | |||
@@ -12,11 +12,12 @@ | |||
12 | import os | 12 | import os |
13 | import sys | 13 | import sys |
14 | import logging | 14 | import logging |
15 | import optparse | 15 | import argparse |
16 | import warnings | 16 | import warnings |
17 | import fcntl | 17 | import fcntl |
18 | import time | 18 | import time |
19 | import traceback | 19 | import traceback |
20 | import datetime | ||
20 | 21 | ||
21 | import bb | 22 | import bb |
22 | from bb import event | 23 | from bb import event |
@@ -43,18 +44,18 @@ def present_options(optionlist): | |||
43 | else: | 44 | else: |
44 | return optionlist[0] | 45 | return optionlist[0] |
45 | 46 | ||
46 | class BitbakeHelpFormatter(optparse.IndentedHelpFormatter): | 47 | class BitbakeHelpFormatter(argparse.HelpFormatter): |
47 | def format_option(self, option): | 48 | def _get_help_string(self, action): |
48 | # We need to do this here rather than in the text we supply to | 49 | # We need to do this here rather than in the text we supply to |
49 | # add_option() because we don't want to call list_extension_modules() | 50 | # add_option() because we don't want to call list_extension_modules() |
50 | # on every execution (since it imports all of the modules) | 51 | # on every execution (since it imports all of the modules) |
51 | # Note also that we modify option.help rather than the returned text | 52 | # Note also that we modify option.help rather than the returned text |
52 | # - this is so that we don't have to re-format the text ourselves | 53 | # - this is so that we don't have to re-format the text ourselves |
53 | if option.dest == 'ui': | 54 | if action.dest == 'ui': |
54 | valid_uis = list_extension_modules(bb.ui, 'main') | 55 | valid_uis = list_extension_modules(bb.ui, 'main') |
55 | option.help = option.help.replace('@CHOICES@', present_options(valid_uis)) | 56 | return action.help.replace('@CHOICES@', present_options(valid_uis)) |
56 | 57 | ||
57 | return optparse.IndentedHelpFormatter.format_option(self, option) | 58 | return action.help |
58 | 59 | ||
59 | def list_extension_modules(pkg, checkattr): | 60 | def list_extension_modules(pkg, checkattr): |
60 | """ | 61 | """ |
@@ -112,189 +113,209 @@ def _showwarning(message, category, filename, lineno, file=None, line=None): | |||
112 | warnlog.warning(s) | 113 | warnlog.warning(s) |
113 | 114 | ||
114 | warnings.showwarning = _showwarning | 115 | warnings.showwarning = _showwarning |
115 | warnings.filterwarnings("ignore") | ||
116 | warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)") | ||
117 | warnings.filterwarnings("ignore", category=PendingDeprecationWarning) | ||
118 | warnings.filterwarnings("ignore", category=ImportWarning) | ||
119 | warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$") | ||
120 | warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers") | ||
121 | |||
122 | 116 | ||
123 | def create_bitbake_parser(): | 117 | def create_bitbake_parser(): |
124 | parser = optparse.OptionParser( | 118 | parser = argparse.ArgumentParser( |
125 | formatter=BitbakeHelpFormatter(), | 119 | description="""\ |
126 | version="BitBake Build Tool Core version %s" % bb.__version__, | 120 | It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which |
127 | usage="""%prog [options] [recipename/target recipe:do_task ...] | 121 | will provide the layer, BBFILES and other configuration information. |
128 | 122 | """, | |
129 | Executes the specified task (default is 'build') for a given set of target recipes (.bb files). | 123 | formatter_class=BitbakeHelpFormatter, |
130 | It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which | 124 | allow_abbrev=False, |
131 | will provide the layer, BBFILES and other configuration information.""") | 125 | add_help=False, # help is manually added below in a specific argument group |
132 | 126 | ) | |
133 | parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None, | 127 | |
134 | help="Execute tasks from a specific .bb recipe directly. WARNING: Does " | 128 | general_group = parser.add_argument_group('General options') |
135 | "not handle any dependencies from other recipes.") | 129 | task_group = parser.add_argument_group('Task control options') |
136 | 130 | exec_group = parser.add_argument_group('Execution control options') | |
137 | parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True, | 131 | logging_group = parser.add_argument_group('Logging/output control options') |
138 | help="Continue as much as possible after an error. While the target that " | 132 | server_group = parser.add_argument_group('Server options') |
139 | "failed and anything depending on it cannot be built, as much as " | 133 | config_group = parser.add_argument_group('Configuration options') |
140 | "possible will be built before stopping.") | 134 | |
141 | 135 | general_group.add_argument("targets", nargs="*", metavar="recipename/target", | |
142 | parser.add_option("-f", "--force", action="store_true", dest="force", default=False, | 136 | help="Execute the specified task (default is 'build') for these target " |
143 | help="Force the specified targets/task to run (invalidating any " | 137 | "recipes (.bb files).") |
144 | "existing stamp file).") | 138 | |
145 | 139 | general_group.add_argument("-s", "--show-versions", action="store_true", | |
146 | parser.add_option("-c", "--cmd", action="store", dest="cmd", | 140 | help="Show current and preferred versions of all recipes.") |
147 | help="Specify the task to execute. The exact options available " | 141 | |
148 | "depend on the metadata. Some examples might be 'compile'" | 142 | general_group.add_argument("-e", "--environment", action="store_true", |
149 | " or 'populate_sysroot' or 'listtasks' may give a list of " | 143 | dest="show_environment", |
150 | "the tasks available.") | 144 | help="Show the global or per-recipe environment complete with information" |
151 | 145 | " about where variables were set/changed.") | |
152 | parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp", | 146 | |
153 | help="Invalidate the stamp for the specified task such as 'compile' " | 147 | general_group.add_argument("-g", "--graphviz", action="store_true", dest="dot_graph", |
154 | "and then run the default task for the specified target(s).") | 148 | help="Save dependency tree information for the specified " |
155 | 149 | "targets in the dot syntax.") | |
156 | parser.add_option("-r", "--read", action="append", dest="prefile", default=[], | ||
157 | help="Read the specified file before bitbake.conf.") | ||
158 | |||
159 | parser.add_option("-R", "--postread", action="append", dest="postfile", default=[], | ||
160 | help="Read the specified file after bitbake.conf.") | ||
161 | |||
162 | parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, | ||
163 | help="Enable tracing of shell tasks (with 'set -x'). " | ||
164 | "Also print bb.note(...) messages to stdout (in " | ||
165 | "addition to writing them to ${T}/log.do_<task>).") | ||
166 | |||
167 | parser.add_option("-D", "--debug", action="count", dest="debug", default=0, | ||
168 | help="Increase the debug level. You can specify this " | ||
169 | "more than once. -D sets the debug level to 1, " | ||
170 | "where only bb.debug(1, ...) messages are printed " | ||
171 | "to stdout; -DD sets the debug level to 2, where " | ||
172 | "both bb.debug(1, ...) and bb.debug(2, ...) " | ||
173 | "messages are printed; etc. Without -D, no debug " | ||
174 | "messages are printed. Note that -D only affects " | ||
175 | "output to stdout. All debug messages are written " | ||
176 | "to ${T}/log.do_taskname, regardless of the debug " | ||
177 | "level.") | ||
178 | |||
179 | parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0, | ||
180 | help="Output less log message data to the terminal. You can specify this more than once.") | ||
181 | |||
182 | parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False, | ||
183 | help="Don't execute, just go through the motions.") | ||
184 | |||
185 | parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures", | ||
186 | default=[], metavar="SIGNATURE_HANDLER", | ||
187 | help="Dump out the signature construction information, with no task " | ||
188 | "execution. The SIGNATURE_HANDLER parameter is passed to the " | ||
189 | "handler. Two common values are none and printdiff but the handler " | ||
190 | "may define more/less. none means only dump the signature, printdiff" | ||
191 | " means compare the dumped signature with the cached one.") | ||
192 | |||
193 | parser.add_option("-p", "--parse-only", action="store_true", | ||
194 | dest="parse_only", default=False, | ||
195 | help="Quit after parsing the BB recipes.") | ||
196 | |||
197 | parser.add_option("-s", "--show-versions", action="store_true", | ||
198 | dest="show_versions", default=False, | ||
199 | help="Show current and preferred versions of all recipes.") | ||
200 | |||
201 | parser.add_option("-e", "--environment", action="store_true", | ||
202 | dest="show_environment", default=False, | ||
203 | help="Show the global or per-recipe environment complete with information" | ||
204 | " about where variables were set/changed.") | ||
205 | |||
206 | parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False, | ||
207 | help="Save dependency tree information for the specified " | ||
208 | "targets in the dot syntax.") | ||
209 | |||
210 | parser.add_option("-I", "--ignore-deps", action="append", | ||
211 | dest="extra_assume_provided", default=[], | ||
212 | help="Assume these dependencies don't exist and are already provided " | ||
213 | "(equivalent to ASSUME_PROVIDED). Useful to make dependency " | ||
214 | "graphs more appealing") | ||
215 | |||
216 | parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[], | ||
217 | help="Show debug logging for the specified logging domains") | ||
218 | |||
219 | parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False, | ||
220 | help="Profile the command and save reports.") | ||
221 | 150 | ||
222 | # @CHOICES@ is substituted out by BitbakeHelpFormatter above | 151 | # @CHOICES@ is substituted out by BitbakeHelpFormatter above |
223 | parser.add_option("-u", "--ui", action="store", dest="ui", | 152 | general_group.add_argument("-u", "--ui", |
224 | default=os.environ.get('BITBAKE_UI', 'knotty'), | 153 | default=os.environ.get('BITBAKE_UI', 'knotty'), |
225 | help="The user interface to use (@CHOICES@ - default %default).") | 154 | help="The user interface to use (@CHOICES@ - default %(default)s).") |
226 | 155 | ||
227 | parser.add_option("", "--token", action="store", dest="xmlrpctoken", | 156 | general_group.add_argument("--version", action="store_true", |
228 | default=os.environ.get("BBTOKEN"), | 157 | help="Show programs version and exit.") |
229 | help="Specify the connection token to be used when connecting " | 158 | |
230 | "to a remote server.") | 159 | general_group.add_argument('-h', '--help', action='help', |
231 | 160 | help='Show this help message and exit.') | |
232 | parser.add_option("", "--revisions-changed", action="store_true", | 161 | |
233 | dest="revisions_changed", default=False, | 162 | |
234 | help="Set the exit code depending on whether upstream floating " | 163 | task_group.add_argument("-f", "--force", action="store_true", |
235 | "revisions have changed or not.") | 164 | help="Force the specified targets/task to run (invalidating any " |
236 | 165 | "existing stamp file).") | |
237 | parser.add_option("", "--server-only", action="store_true", | 166 | |
238 | dest="server_only", default=False, | 167 | task_group.add_argument("-c", "--cmd", |
239 | help="Run bitbake without a UI, only starting a server " | 168 | help="Specify the task to execute. The exact options available " |
240 | "(cooker) process.") | 169 | "depend on the metadata. Some examples might be 'compile'" |
241 | 170 | " or 'populate_sysroot' or 'listtasks' may give a list of " | |
242 | parser.add_option("-B", "--bind", action="store", dest="bind", default=False, | 171 | "the tasks available.") |
243 | help="The name/address for the bitbake xmlrpc server to bind to.") | 172 | |
244 | 173 | task_group.add_argument("-C", "--clear-stamp", dest="invalidate_stamp", | |
245 | parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout", | 174 | help="Invalidate the stamp for the specified task such as 'compile' " |
246 | default=os.getenv("BB_SERVER_TIMEOUT"), | 175 | "and then run the default task for the specified target(s).") |
247 | help="Set timeout to unload bitbake server due to inactivity, " | 176 | |
248 | "set to -1 means no unload, " | 177 | task_group.add_argument("--runall", action="append", default=[], |
249 | "default: Environment variable BB_SERVER_TIMEOUT.") | 178 | help="Run the specified task for any recipe in the taskgraph of the " |
250 | 179 | "specified target (even if it wouldn't otherwise have run).") | |
251 | parser.add_option("", "--no-setscene", action="store_true", | 180 | |
252 | dest="nosetscene", default=False, | 181 | task_group.add_argument("--runonly", action="append", |
253 | help="Do not run any setscene tasks. sstate will be ignored and " | 182 | help="Run only the specified task within the taskgraph of the " |
254 | "everything needed, built.") | 183 | "specified targets (and any task dependencies those tasks may have).") |
255 | 184 | ||
256 | parser.add_option("", "--skip-setscene", action="store_true", | 185 | task_group.add_argument("--no-setscene", action="store_true", |
257 | dest="skipsetscene", default=False, | 186 | dest="nosetscene", |
258 | help="Skip setscene tasks if they would be executed. Tasks previously " | 187 | help="Do not run any setscene tasks. sstate will be ignored and " |
259 | "restored from sstate will be kept, unlike --no-setscene") | 188 | "everything needed, built.") |
260 | 189 | ||
261 | parser.add_option("", "--setscene-only", action="store_true", | 190 | task_group.add_argument("--skip-setscene", action="store_true", |
262 | dest="setsceneonly", default=False, | 191 | dest="skipsetscene", |
263 | help="Only run setscene tasks, don't run any real tasks.") | 192 | help="Skip setscene tasks if they would be executed. Tasks previously " |
264 | 193 | "restored from sstate will be kept, unlike --no-setscene.") | |
265 | parser.add_option("", "--remote-server", action="store", dest="remote_server", | 194 | |
266 | default=os.environ.get("BBSERVER"), | 195 | task_group.add_argument("--setscene-only", action="store_true", |
267 | help="Connect to the specified server.") | 196 | dest="setsceneonly", |
268 | 197 | help="Only run setscene tasks, don't run any real tasks.") | |
269 | parser.add_option("-m", "--kill-server", action="store_true", | 198 | |
270 | dest="kill_server", default=False, | 199 | |
271 | help="Terminate any running bitbake server.") | 200 | exec_group.add_argument("-n", "--dry-run", action="store_true", |
272 | 201 | help="Don't execute, just go through the motions.") | |
273 | parser.add_option("", "--observe-only", action="store_true", | 202 | |
274 | dest="observe_only", default=False, | 203 | exec_group.add_argument("-p", "--parse-only", action="store_true", |
275 | help="Connect to a server as an observing-only client.") | 204 | help="Quit after parsing the BB recipes.") |
276 | 205 | ||
277 | parser.add_option("", "--status-only", action="store_true", | 206 | exec_group.add_argument("-k", "--continue", action="store_false", dest="halt", |
278 | dest="status_only", default=False, | 207 | help="Continue as much as possible after an error. While the target that " |
279 | help="Check the status of the remote bitbake server.") | 208 | "failed and anything depending on it cannot be built, as much as " |
280 | 209 | "possible will be built before stopping.") | |
281 | parser.add_option("-w", "--write-log", action="store", dest="writeeventlog", | 210 | |
282 | default=os.environ.get("BBEVENTLOG"), | 211 | exec_group.add_argument("-P", "--profile", action="store_true", |
283 | help="Writes the event log of the build to a bitbake event json file. " | 212 | help="Profile the command and save reports.") |
284 | "Use '' (empty string) to assign the name automatically.") | 213 | |
285 | 214 | exec_group.add_argument("-S", "--dump-signatures", action="append", | |
286 | parser.add_option("", "--runall", action="append", dest="runall", | 215 | default=[], metavar="SIGNATURE_HANDLER", |
287 | help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).") | 216 | help="Dump out the signature construction information, with no task " |
288 | 217 | "execution. The SIGNATURE_HANDLER parameter is passed to the " | |
289 | parser.add_option("", "--runonly", action="append", dest="runonly", | 218 | "handler. Two common values are none and printdiff but the handler " |
290 | help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).") | 219 | "may define more/less. none means only dump the signature, printdiff" |
220 | " means recursively compare the dumped signature with the most recent" | ||
221 | " one in a local build or sstate cache (can be used to find out why tasks re-run" | ||
222 | " when that is not expected)") | ||
223 | |||
224 | exec_group.add_argument("--revisions-changed", action="store_true", | ||
225 | help="Set the exit code depending on whether upstream floating " | ||
226 | "revisions have changed or not.") | ||
227 | |||
228 | exec_group.add_argument("-b", "--buildfile", | ||
229 | help="Execute tasks from a specific .bb recipe directly. WARNING: Does " | ||
230 | "not handle any dependencies from other recipes.") | ||
231 | |||
232 | logging_group.add_argument("-D", "--debug", action="count", default=0, | ||
233 | help="Increase the debug level. You can specify this " | ||
234 | "more than once. -D sets the debug level to 1, " | ||
235 | "where only bb.debug(1, ...) messages are printed " | ||
236 | "to stdout; -DD sets the debug level to 2, where " | ||
237 | "both bb.debug(1, ...) and bb.debug(2, ...) " | ||
238 | "messages are printed; etc. Without -D, no debug " | ||
239 | "messages are printed. Note that -D only affects " | ||
240 | "output to stdout. All debug messages are written " | ||
241 | "to ${T}/log.do_taskname, regardless of the debug " | ||
242 | "level.") | ||
243 | |||
244 | logging_group.add_argument("-l", "--log-domains", action="append", dest="debug_domains", | ||
245 | default=[], | ||
246 | help="Show debug logging for the specified logging domains.") | ||
247 | |||
248 | logging_group.add_argument("-v", "--verbose", action="store_true", | ||
249 | help="Enable tracing of shell tasks (with 'set -x'). " | ||
250 | "Also print bb.note(...) messages to stdout (in " | ||
251 | "addition to writing them to ${T}/log.do_<task>).") | ||
252 | |||
253 | logging_group.add_argument("-q", "--quiet", action="count", default=0, | ||
254 | help="Output less log message data to the terminal. You can specify this " | ||
255 | "more than once.") | ||
256 | |||
257 | logging_group.add_argument("-w", "--write-log", dest="writeeventlog", | ||
258 | default=os.environ.get("BBEVENTLOG"), | ||
259 | help="Writes the event log of the build to a bitbake event json file. " | ||
260 | "Use '' (empty string) to assign the name automatically.") | ||
261 | |||
262 | |||
263 | server_group.add_argument("-B", "--bind", default=False, | ||
264 | help="The name/address for the bitbake xmlrpc server to bind to.") | ||
265 | |||
266 | server_group.add_argument("-T", "--idle-timeout", type=float, dest="server_timeout", | ||
267 | default=os.getenv("BB_SERVER_TIMEOUT"), | ||
268 | help="Set timeout to unload bitbake server due to inactivity, " | ||
269 | "set to -1 means no unload, " | ||
270 | "default: Environment variable BB_SERVER_TIMEOUT.") | ||
271 | |||
272 | server_group.add_argument("--remote-server", | ||
273 | default=os.environ.get("BBSERVER"), | ||
274 | help="Connect to the specified server.") | ||
275 | |||
276 | server_group.add_argument("-m", "--kill-server", action="store_true", | ||
277 | help="Terminate any running bitbake server.") | ||
278 | |||
279 | server_group.add_argument("--token", dest="xmlrpctoken", | ||
280 | default=os.environ.get("BBTOKEN"), | ||
281 | help="Specify the connection token to be used when connecting " | ||
282 | "to a remote server.") | ||
283 | |||
284 | server_group.add_argument("--observe-only", action="store_true", | ||
285 | help="Connect to a server as an observing-only client.") | ||
286 | |||
287 | server_group.add_argument("--status-only", action="store_true", | ||
288 | help="Check the status of the remote bitbake server.") | ||
289 | |||
290 | server_group.add_argument("--server-only", action="store_true", | ||
291 | help="Run bitbake without a UI, only starting a server " | ||
292 | "(cooker) process.") | ||
293 | |||
294 | |||
295 | config_group.add_argument("-r", "--read", action="append", dest="prefile", default=[], | ||
296 | help="Read the specified file before bitbake.conf.") | ||
297 | |||
298 | config_group.add_argument("-R", "--postread", action="append", dest="postfile", default=[], | ||
299 | help="Read the specified file after bitbake.conf.") | ||
300 | |||
301 | |||
302 | config_group.add_argument("-I", "--ignore-deps", action="append", | ||
303 | dest="extra_assume_provided", default=[], | ||
304 | help="Assume these dependencies don't exist and are already provided " | ||
305 | "(equivalent to ASSUME_PROVIDED). Useful to make dependency " | ||
306 | "graphs more appealing.") | ||
307 | |||
291 | return parser | 308 | return parser |
292 | 309 | ||
293 | 310 | ||
294 | class BitBakeConfigParameters(cookerdata.ConfigParameters): | 311 | class BitBakeConfigParameters(cookerdata.ConfigParameters): |
295 | def parseCommandLine(self, argv=sys.argv): | 312 | def parseCommandLine(self, argv=sys.argv): |
296 | parser = create_bitbake_parser() | 313 | parser = create_bitbake_parser() |
297 | options, targets = parser.parse_args(argv) | 314 | options = parser.parse_intermixed_args(argv[1:]) |
315 | |||
316 | if options.version: | ||
317 | print("BitBake Build Tool Core version %s" % bb.__version__) | ||
318 | sys.exit(0) | ||
298 | 319 | ||
299 | if options.quiet and options.verbose: | 320 | if options.quiet and options.verbose: |
300 | parser.error("options --quiet and --verbose are mutually exclusive") | 321 | parser.error("options --quiet and --verbose are mutually exclusive") |
@@ -326,7 +347,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters): | |||
326 | else: | 347 | else: |
327 | options.xmlrpcinterface = (None, 0) | 348 | options.xmlrpcinterface = (None, 0) |
328 | 349 | ||
329 | return options, targets[1:] | 350 | return options, options.targets |
330 | 351 | ||
331 | 352 | ||
332 | def bitbake_main(configParams, configuration): | 353 | def bitbake_main(configParams, configuration): |
@@ -391,6 +412,9 @@ def bitbake_main(configParams, configuration): | |||
391 | 412 | ||
392 | return 1 | 413 | return 1 |
393 | 414 | ||
415 | def timestamp(): | ||
416 | return datetime.datetime.now().strftime('%H:%M:%S.%f') | ||
417 | |||
394 | def setup_bitbake(configParams, extrafeatures=None): | 418 | def setup_bitbake(configParams, extrafeatures=None): |
395 | # Ensure logging messages get sent to the UI as events | 419 | # Ensure logging messages get sent to the UI as events |
396 | handler = bb.event.LogHandler() | 420 | handler = bb.event.LogHandler() |
@@ -398,6 +422,11 @@ def setup_bitbake(configParams, extrafeatures=None): | |||
398 | # In status only mode there are no logs and no UI | 422 | # In status only mode there are no logs and no UI |
399 | logger.addHandler(handler) | 423 | logger.addHandler(handler) |
400 | 424 | ||
425 | if configParams.dump_signatures: | ||
426 | if extrafeatures is None: | ||
427 | extrafeatures = [] | ||
428 | extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO) | ||
429 | |||
401 | if configParams.server_only: | 430 | if configParams.server_only: |
402 | featureset = [] | 431 | featureset = [] |
403 | ui_module = None | 432 | ui_module = None |
@@ -425,7 +454,7 @@ def setup_bitbake(configParams, extrafeatures=None): | |||
425 | retries = 8 | 454 | retries = 8 |
426 | while retries: | 455 | while retries: |
427 | try: | 456 | try: |
428 | topdir, lock = lockBitbake() | 457 | topdir, lock, lockfile = lockBitbake() |
429 | sockname = topdir + "/bitbake.sock" | 458 | sockname = topdir + "/bitbake.sock" |
430 | if lock: | 459 | if lock: |
431 | if configParams.status_only or configParams.kill_server: | 460 | if configParams.status_only or configParams.kill_server: |
@@ -436,18 +465,22 @@ def setup_bitbake(configParams, extrafeatures=None): | |||
436 | logger.info("Starting bitbake server...") | 465 | logger.info("Starting bitbake server...") |
437 | # Clear the event queue since we already displayed messages | 466 | # Clear the event queue since we already displayed messages |
438 | bb.event.ui_queue = [] | 467 | bb.event.ui_queue = [] |
439 | server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface) | 468 | server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface, configParams.profile) |
440 | 469 | ||
441 | else: | 470 | else: |
442 | logger.info("Reconnecting to bitbake server...") | 471 | logger.info("Reconnecting to bitbake server...") |
443 | if not os.path.exists(sockname): | 472 | if not os.path.exists(sockname): |
444 | logger.info("Previous bitbake instance shutting down?, waiting to retry...") | 473 | logger.info("Previous bitbake instance shutting down?, waiting to retry... (%s)" % timestamp()) |
474 | procs = bb.server.process.get_lockfile_process_msg(lockfile) | ||
475 | if procs: | ||
476 | logger.info("Processes holding bitbake.lock (missing socket %s):\n%s" % (sockname, procs)) | ||
477 | logger.info("Directory listing: %s" % (str(os.listdir(topdir)))) | ||
445 | i = 0 | 478 | i = 0 |
446 | lock = None | 479 | lock = None |
447 | # Wait for 5s or until we can get the lock | 480 | # Wait for 5s or until we can get the lock |
448 | while not lock and i < 50: | 481 | while not lock and i < 50: |
449 | time.sleep(0.1) | 482 | time.sleep(0.1) |
450 | _, lock = lockBitbake() | 483 | _, lock, _ = lockBitbake() |
451 | i += 1 | 484 | i += 1 |
452 | if lock: | 485 | if lock: |
453 | bb.utils.unlockfile(lock) | 486 | bb.utils.unlockfile(lock) |
@@ -466,9 +499,9 @@ def setup_bitbake(configParams, extrafeatures=None): | |||
466 | retries -= 1 | 499 | retries -= 1 |
467 | tryno = 8 - retries | 500 | tryno = 8 - retries |
468 | if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)): | 501 | if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)): |
469 | logger.info("Retrying server connection (#%d)..." % tryno) | 502 | logger.info("Retrying server connection (#%d)... (%s)" % (tryno, timestamp())) |
470 | else: | 503 | else: |
471 | logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc())) | 504 | logger.info("Retrying server connection (#%d)... (%s, %s)" % (tryno, traceback.format_exc(), timestamp())) |
472 | 505 | ||
473 | if not retries: | 506 | if not retries: |
474 | bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).") | 507 | bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).") |
@@ -497,5 +530,5 @@ def lockBitbake(): | |||
497 | bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?") | 530 | bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?") |
498 | raise BBMainFatal | 531 | raise BBMainFatal |
499 | lockfile = topdir + "/bitbake.lock" | 532 | lockfile = topdir + "/bitbake.lock" |
500 | return topdir, bb.utils.lockfile(lockfile, False, False) | 533 | return topdir, bb.utils.lockfile(lockfile, False, False), lockfile |
501 | 534 | ||
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py index 98f2109ed2..f928210351 100644 --- a/bitbake/lib/bb/monitordisk.py +++ b/bitbake/lib/bb/monitordisk.py | |||
@@ -76,7 +76,12 @@ def getDiskData(BBDirs): | |||
76 | return None | 76 | return None |
77 | 77 | ||
78 | action = pathSpaceInodeRe.group(1) | 78 | action = pathSpaceInodeRe.group(1) |
79 | if action not in ("ABORT", "STOPTASKS", "WARN"): | 79 | if action == "ABORT": |
80 | # Emit a deprecation warning | ||
81 | logger.warnonce("The BB_DISKMON_DIRS \"ABORT\" action has been renamed to \"HALT\", update configuration") | ||
82 | action = "HALT" | ||
83 | |||
84 | if action not in ("HALT", "STOPTASKS", "WARN"): | ||
80 | printErr("Unknown disk space monitor action: %s" % action) | 85 | printErr("Unknown disk space monitor action: %s" % action) |
81 | return None | 86 | return None |
82 | 87 | ||
@@ -177,7 +182,7 @@ class diskMonitor: | |||
177 | # use them to avoid printing too many warning messages | 182 | # use them to avoid printing too many warning messages |
178 | self.preFreeS = {} | 183 | self.preFreeS = {} |
179 | self.preFreeI = {} | 184 | self.preFreeI = {} |
180 | # This is for STOPTASKS and ABORT, to avoid printing the message | 185 | # This is for STOPTASKS and HALT, to avoid printing the message |
181 | # repeatedly while waiting for the tasks to finish | 186 | # repeatedly while waiting for the tasks to finish |
182 | self.checked = {} | 187 | self.checked = {} |
183 | for k in self.devDict: | 188 | for k in self.devDict: |
@@ -219,8 +224,8 @@ class diskMonitor: | |||
219 | self.checked[k] = True | 224 | self.checked[k] = True |
220 | rq.finish_runqueue(False) | 225 | rq.finish_runqueue(False) |
221 | bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) | 226 | bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) |
222 | elif action == "ABORT" and not self.checked[k]: | 227 | elif action == "HALT" and not self.checked[k]: |
223 | logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") | 228 | logger.error("Immediately halt since the disk space monitor action is \"HALT\"!") |
224 | self.checked[k] = True | 229 | self.checked[k] = True |
225 | rq.finish_runqueue(True) | 230 | rq.finish_runqueue(True) |
226 | bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) | 231 | bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) |
@@ -229,9 +234,10 @@ class diskMonitor: | |||
229 | freeInode = st.f_favail | 234 | freeInode = st.f_favail |
230 | 235 | ||
231 | if minInode and freeInode < minInode: | 236 | if minInode and freeInode < minInode: |
232 | # Some filesystems use dynamic inodes so can't run out | 237 | # Some filesystems use dynamic inodes so can't run out. |
233 | # (e.g. btrfs). This is reported by the inode count being 0. | 238 | # This is reported by the inode count being 0 (btrfs) or the free |
234 | if st.f_files == 0: | 239 | # inode count being -1 (cephfs). |
240 | if st.f_files == 0 or st.f_favail == -1: | ||
235 | self.devDict[k][2] = None | 241 | self.devDict[k][2] = None |
236 | continue | 242 | continue |
237 | # Always show warning, the self.checked would always be False if the action is WARN | 243 | # Always show warning, the self.checked would always be False if the action is WARN |
@@ -245,8 +251,8 @@ class diskMonitor: | |||
245 | self.checked[k] = True | 251 | self.checked[k] = True |
246 | rq.finish_runqueue(False) | 252 | rq.finish_runqueue(False) |
247 | bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) | 253 | bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) |
248 | elif action == "ABORT" and not self.checked[k]: | 254 | elif action == "HALT" and not self.checked[k]: |
249 | logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") | 255 | logger.error("Immediately halt since the disk space monitor action is \"HALT\"!") |
250 | self.checked[k] = True | 256 | self.checked[k] = True |
251 | rq.finish_runqueue(True) | 257 | rq.finish_runqueue(True) |
252 | bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) | 258 | bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) |
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py index 291b38ff7f..3e18596faa 100644 --- a/bitbake/lib/bb/msg.py +++ b/bitbake/lib/bb/msg.py | |||
@@ -30,7 +30,9 @@ class BBLogFormatter(logging.Formatter): | |||
30 | PLAIN = logging.INFO + 1 | 30 | PLAIN = logging.INFO + 1 |
31 | VERBNOTE = logging.INFO + 2 | 31 | VERBNOTE = logging.INFO + 2 |
32 | ERROR = logging.ERROR | 32 | ERROR = logging.ERROR |
33 | ERRORONCE = logging.ERROR - 1 | ||
33 | WARNING = logging.WARNING | 34 | WARNING = logging.WARNING |
35 | WARNONCE = logging.WARNING - 1 | ||
34 | CRITICAL = logging.CRITICAL | 36 | CRITICAL = logging.CRITICAL |
35 | 37 | ||
36 | levelnames = { | 38 | levelnames = { |
@@ -42,7 +44,9 @@ class BBLogFormatter(logging.Formatter): | |||
42 | PLAIN : '', | 44 | PLAIN : '', |
43 | VERBNOTE: 'NOTE', | 45 | VERBNOTE: 'NOTE', |
44 | WARNING : 'WARNING', | 46 | WARNING : 'WARNING', |
47 | WARNONCE : 'WARNING', | ||
45 | ERROR : 'ERROR', | 48 | ERROR : 'ERROR', |
49 | ERRORONCE : 'ERROR', | ||
46 | CRITICAL: 'ERROR', | 50 | CRITICAL: 'ERROR', |
47 | } | 51 | } |
48 | 52 | ||
@@ -58,7 +62,9 @@ class BBLogFormatter(logging.Formatter): | |||
58 | PLAIN : BASECOLOR, | 62 | PLAIN : BASECOLOR, |
59 | VERBNOTE: BASECOLOR, | 63 | VERBNOTE: BASECOLOR, |
60 | WARNING : YELLOW, | 64 | WARNING : YELLOW, |
65 | WARNONCE : YELLOW, | ||
61 | ERROR : RED, | 66 | ERROR : RED, |
67 | ERRORONCE : RED, | ||
62 | CRITICAL: RED, | 68 | CRITICAL: RED, |
63 | } | 69 | } |
64 | 70 | ||
@@ -121,6 +127,22 @@ class BBLogFilter(object): | |||
121 | return True | 127 | return True |
122 | return False | 128 | return False |
123 | 129 | ||
130 | class LogFilterShowOnce(logging.Filter): | ||
131 | def __init__(self): | ||
132 | self.seen_warnings = set() | ||
133 | self.seen_errors = set() | ||
134 | |||
135 | def filter(self, record): | ||
136 | if record.levelno == bb.msg.BBLogFormatter.WARNONCE: | ||
137 | if record.msg in self.seen_warnings: | ||
138 | return False | ||
139 | self.seen_warnings.add(record.msg) | ||
140 | if record.levelno == bb.msg.BBLogFormatter.ERRORONCE: | ||
141 | if record.msg in self.seen_errors: | ||
142 | return False | ||
143 | self.seen_errors.add(record.msg) | ||
144 | return True | ||
145 | |||
124 | class LogFilterGEQLevel(logging.Filter): | 146 | class LogFilterGEQLevel(logging.Filter): |
125 | def __init__(self, level): | 147 | def __init__(self, level): |
126 | self.strlevel = str(level) | 148 | self.strlevel = str(level) |
@@ -206,8 +228,9 @@ def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers | |||
206 | """Standalone logger creation function""" | 228 | """Standalone logger creation function""" |
207 | logger = logging.getLogger(name) | 229 | logger = logging.getLogger(name) |
208 | console = logging.StreamHandler(output) | 230 | console = logging.StreamHandler(output) |
231 | console.addFilter(bb.msg.LogFilterShowOnce()) | ||
209 | format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") | 232 | format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") |
210 | if color == 'always' or (color == 'auto' and output.isatty()): | 233 | if color == 'always' or (color == 'auto' and output.isatty() and os.environ.get('NO_COLOR', '') == ''): |
211 | format.enable_color() | 234 | format.enable_color() |
212 | console.setFormatter(format) | 235 | console.setFormatter(format) |
213 | if preserve_handlers: | 236 | if preserve_handlers: |
@@ -293,10 +316,17 @@ def setLoggingConfig(defaultconfig, userconfigfile=None): | |||
293 | 316 | ||
294 | # Convert all level parameters to integers in case users want to use the | 317 | # Convert all level parameters to integers in case users want to use the |
295 | # bitbake defined level names | 318 | # bitbake defined level names |
296 | for h in logconfig["handlers"].values(): | 319 | for name, h in logconfig["handlers"].items(): |
297 | if "level" in h: | 320 | if "level" in h: |
298 | h["level"] = bb.msg.stringToLevel(h["level"]) | 321 | h["level"] = bb.msg.stringToLevel(h["level"]) |
299 | 322 | ||
323 | # Every handler needs its own instance of the once filter. | ||
324 | once_filter_name = name + ".showonceFilter" | ||
325 | logconfig.setdefault("filters", {})[once_filter_name] = { | ||
326 | "()": "bb.msg.LogFilterShowOnce", | ||
327 | } | ||
328 | h.setdefault("filters", []).append(once_filter_name) | ||
329 | |||
300 | for l in logconfig["loggers"].values(): | 330 | for l in logconfig["loggers"].values(): |
301 | if "level" in l: | 331 | if "level" in l: |
302 | l["level"] = bb.msg.stringToLevel(l["level"]) | 332 | l["level"] = bb.msg.stringToLevel(l["level"]) |
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index c01807ba87..a4358f1374 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
@@ -60,6 +60,14 @@ def cached_mtime_noerror(f): | |||
60 | return 0 | 60 | return 0 |
61 | return __mtime_cache[f] | 61 | return __mtime_cache[f] |
62 | 62 | ||
63 | def check_mtime(f, mtime): | ||
64 | try: | ||
65 | current_mtime = os.stat(f)[stat.ST_MTIME] | ||
66 | __mtime_cache[f] = current_mtime | ||
67 | except OSError: | ||
68 | current_mtime = 0 | ||
69 | return current_mtime == mtime | ||
70 | |||
63 | def update_mtime(f): | 71 | def update_mtime(f): |
64 | try: | 72 | try: |
65 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 73 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] |
@@ -99,12 +107,12 @@ def supports(fn, data): | |||
99 | return 1 | 107 | return 1 |
100 | return 0 | 108 | return 0 |
101 | 109 | ||
102 | def handle(fn, data, include = 0): | 110 | def handle(fn, data, include=0, baseconfig=False): |
103 | """Call the handler that is appropriate for this file""" | 111 | """Call the handler that is appropriate for this file""" |
104 | for h in handlers: | 112 | for h in handlers: |
105 | if h['supports'](fn, data): | 113 | if h['supports'](fn, data): |
106 | with data.inchistory.include(fn): | 114 | with data.inchistory.include(fn): |
107 | return h['handle'](fn, data, include) | 115 | return h['handle'](fn, data, include, baseconfig) |
108 | raise ParseError("not a BitBake file", fn) | 116 | raise ParseError("not a BitBake file", fn) |
109 | 117 | ||
110 | def init(fn, data): | 118 | def init(fn, data): |
@@ -113,6 +121,8 @@ def init(fn, data): | |||
113 | return h['init'](data) | 121 | return h['init'](data) |
114 | 122 | ||
115 | def init_parser(d): | 123 | def init_parser(d): |
124 | if hasattr(bb.parse, "siggen"): | ||
125 | bb.parse.siggen.exit() | ||
116 | bb.parse.siggen = bb.siggen.init(d) | 126 | bb.parse.siggen = bb.siggen.init(d) |
117 | 127 | ||
118 | def resolve_file(fn, d): | 128 | def resolve_file(fn, d): |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index 50a88f7da7..7581d003fd 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
@@ -9,6 +9,7 @@ | |||
9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
10 | # | 10 | # |
11 | 11 | ||
12 | import sys | ||
12 | import bb | 13 | import bb |
13 | from bb import methodpool | 14 | from bb import methodpool |
14 | from bb.parse import logger | 15 | from bb.parse import logger |
@@ -130,6 +131,10 @@ class DataNode(AstNode): | |||
130 | else: | 131 | else: |
131 | val = groupd["value"] | 132 | val = groupd["value"] |
132 | 133 | ||
134 | if ":append" in key or ":remove" in key or ":prepend" in key: | ||
135 | if op in ["append", "prepend", "postdot", "predot", "ques"]: | ||
136 | bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.") | ||
137 | |||
133 | flag = None | 138 | flag = None |
134 | if 'flag' in groupd and groupd['flag'] is not None: | 139 | if 'flag' in groupd and groupd['flag'] is not None: |
135 | flag = groupd['flag'] | 140 | flag = groupd['flag'] |
@@ -145,7 +150,7 @@ class DataNode(AstNode): | |||
145 | data.setVar(key, val, parsing=True, **loginfo) | 150 | data.setVar(key, val, parsing=True, **loginfo) |
146 | 151 | ||
147 | class MethodNode(AstNode): | 152 | class MethodNode(AstNode): |
148 | tr_tbl = str.maketrans('/.+-@%&', '_______') | 153 | tr_tbl = str.maketrans('/.+-@%&~', '________') |
149 | 154 | ||
150 | def __init__(self, filename, lineno, func_name, body, python, fakeroot): | 155 | def __init__(self, filename, lineno, func_name, body, python, fakeroot): |
151 | AstNode.__init__(self, filename, lineno) | 156 | AstNode.__init__(self, filename, lineno) |
@@ -206,10 +211,12 @@ class ExportFuncsNode(AstNode): | |||
206 | 211 | ||
207 | def eval(self, data): | 212 | def eval(self, data): |
208 | 213 | ||
214 | sentinel = " # Export function set\n" | ||
209 | for func in self.n: | 215 | for func in self.n: |
210 | calledfunc = self.classname + "_" + func | 216 | calledfunc = self.classname + "_" + func |
211 | 217 | ||
212 | if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False): | 218 | basevar = data.getVar(func, False) |
219 | if basevar and sentinel not in basevar: | ||
213 | continue | 220 | continue |
214 | 221 | ||
215 | if data.getVar(func, False): | 222 | if data.getVar(func, False): |
@@ -219,19 +226,18 @@ class ExportFuncsNode(AstNode): | |||
219 | for flag in [ "func", "python" ]: | 226 | for flag in [ "func", "python" ]: |
220 | if data.getVarFlag(calledfunc, flag, False): | 227 | if data.getVarFlag(calledfunc, flag, False): |
221 | data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False)) | 228 | data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False)) |
222 | for flag in [ "dirs" ]: | 229 | for flag in ["dirs", "cleandirs", "fakeroot"]: |
223 | if data.getVarFlag(func, flag, False): | 230 | if data.getVarFlag(func, flag, False): |
224 | data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False)) | 231 | data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False)) |
225 | data.setVarFlag(func, "filename", "autogenerated") | 232 | data.setVarFlag(func, "filename", "autogenerated") |
226 | data.setVarFlag(func, "lineno", 1) | 233 | data.setVarFlag(func, "lineno", 1) |
227 | 234 | ||
228 | if data.getVarFlag(calledfunc, "python", False): | 235 | if data.getVarFlag(calledfunc, "python", False): |
229 | data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True) | 236 | data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True) |
230 | else: | 237 | else: |
231 | if "-" in self.classname: | 238 | if "-" in self.classname: |
232 | bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc)) | 239 | bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc)) |
233 | data.setVar(func, " " + calledfunc + "\n", parsing=True) | 240 | data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True) |
234 | data.setVarFlag(func, 'export_func', '1') | ||
235 | 241 | ||
236 | class AddTaskNode(AstNode): | 242 | class AddTaskNode(AstNode): |
237 | def __init__(self, filename, lineno, func, before, after): | 243 | def __init__(self, filename, lineno, func, before, after): |
@@ -265,6 +271,41 @@ class BBHandlerNode(AstNode): | |||
265 | data.setVarFlag(h, "handler", 1) | 271 | data.setVarFlag(h, "handler", 1) |
266 | data.setVar('__BBHANDLERS', bbhands) | 272 | data.setVar('__BBHANDLERS', bbhands) |
267 | 273 | ||
274 | class PyLibNode(AstNode): | ||
275 | def __init__(self, filename, lineno, libdir, namespace): | ||
276 | AstNode.__init__(self, filename, lineno) | ||
277 | self.libdir = libdir | ||
278 | self.namespace = namespace | ||
279 | |||
280 | def eval(self, data): | ||
281 | global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split() | ||
282 | for m in global_mods: | ||
283 | if m not in bb.utils._context: | ||
284 | bb.utils._context[m] = __import__(m) | ||
285 | |||
286 | libdir = data.expand(self.libdir) | ||
287 | if libdir not in sys.path: | ||
288 | sys.path.append(libdir) | ||
289 | try: | ||
290 | bb.utils._context[self.namespace] = __import__(self.namespace) | ||
291 | toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", []) | ||
292 | for i in toimport: | ||
293 | bb.utils._context[self.namespace] = __import__(self.namespace + "." + i) | ||
294 | mod = getattr(bb.utils._context[self.namespace], i) | ||
295 | fn = getattr(mod, "__file__") | ||
296 | funcs = {} | ||
297 | for f in dir(mod): | ||
298 | if f.startswith("_"): | ||
299 | continue | ||
300 | fcall = getattr(mod, f) | ||
301 | if not callable(fcall): | ||
302 | continue | ||
303 | funcs[f] = fcall | ||
304 | bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i)) | ||
305 | |||
306 | except AttributeError as e: | ||
307 | bb.error("Error importing OE modules: %s" % str(e)) | ||
308 | |||
268 | class InheritNode(AstNode): | 309 | class InheritNode(AstNode): |
269 | def __init__(self, filename, lineno, classes): | 310 | def __init__(self, filename, lineno, classes): |
270 | AstNode.__init__(self, filename, lineno) | 311 | AstNode.__init__(self, filename, lineno) |
@@ -273,6 +314,16 @@ class InheritNode(AstNode): | |||
273 | def eval(self, data): | 314 | def eval(self, data): |
274 | bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) | 315 | bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) |
275 | 316 | ||
317 | class InheritDeferredNode(AstNode): | ||
318 | def __init__(self, filename, lineno, classes): | ||
319 | AstNode.__init__(self, filename, lineno) | ||
320 | self.inherit = (classes, filename, lineno) | ||
321 | |||
322 | def eval(self, data): | ||
323 | inherits = data.getVar('__BBDEFINHERITS', False) or [] | ||
324 | inherits.append(self.inherit) | ||
325 | data.setVar('__BBDEFINHERITS', inherits) | ||
326 | |||
276 | def handleInclude(statements, filename, lineno, m, force): | 327 | def handleInclude(statements, filename, lineno, m, force): |
277 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) | 328 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) |
278 | 329 | ||
@@ -316,10 +367,17 @@ def handleDelTask(statements, filename, lineno, m): | |||
316 | def handleBBHandlers(statements, filename, lineno, m): | 367 | def handleBBHandlers(statements, filename, lineno, m): |
317 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) | 368 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) |
318 | 369 | ||
370 | def handlePyLib(statements, filename, lineno, m): | ||
371 | statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2))) | ||
372 | |||
319 | def handleInherit(statements, filename, lineno, m): | 373 | def handleInherit(statements, filename, lineno, m): |
320 | classes = m.group(1) | 374 | classes = m.group(1) |
321 | statements.append(InheritNode(filename, lineno, classes)) | 375 | statements.append(InheritNode(filename, lineno, classes)) |
322 | 376 | ||
377 | def handleInheritDeferred(statements, filename, lineno, m): | ||
378 | classes = m.group(1) | ||
379 | statements.append(InheritDeferredNode(filename, lineno, classes)) | ||
380 | |||
323 | def runAnonFuncs(d): | 381 | def runAnonFuncs(d): |
324 | code = [] | 382 | code = [] |
325 | for funcname in d.getVar("__BBANONFUNCS", False) or []: | 383 | for funcname in d.getVar("__BBANONFUNCS", False) or []: |
@@ -329,6 +387,10 @@ def runAnonFuncs(d): | |||
329 | def finalize(fn, d, variant = None): | 387 | def finalize(fn, d, variant = None): |
330 | saved_handlers = bb.event.get_handlers().copy() | 388 | saved_handlers = bb.event.get_handlers().copy() |
331 | try: | 389 | try: |
390 | # Found renamed variables. Exit immediately | ||
391 | if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: | ||
392 | raise bb.BBHandledException() | ||
393 | |||
332 | for var in d.getVar('__BBHANDLERS', False) or []: | 394 | for var in d.getVar('__BBHANDLERS', False) or []: |
333 | # try to add the handler | 395 | # try to add the handler |
334 | handlerfn = d.getVarFlag(var, "filename", False) | 396 | handlerfn = d.getVarFlag(var, "filename", False) |
@@ -353,6 +415,9 @@ def finalize(fn, d, variant = None): | |||
353 | 415 | ||
354 | d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) | 416 | d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) |
355 | 417 | ||
418 | if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"): | ||
419 | bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.") | ||
420 | |||
356 | bb.event.fire(bb.event.RecipeParsed(fn), d) | 421 | bb.event.fire(bb.event.RecipeParsed(fn), d) |
357 | finally: | 422 | finally: |
358 | bb.event.set_handlers(saved_handlers) | 423 | bb.event.set_handlers(saved_handlers) |
@@ -379,6 +444,14 @@ def multi_finalize(fn, d): | |||
379 | logger.debug("Appending .bbappend file %s to %s", append, fn) | 444 | logger.debug("Appending .bbappend file %s to %s", append, fn) |
380 | bb.parse.BBHandler.handle(append, d, True) | 445 | bb.parse.BBHandler.handle(append, d, True) |
381 | 446 | ||
447 | while True: | ||
448 | inherits = d.getVar('__BBDEFINHERITS', False) or [] | ||
449 | if not inherits: | ||
450 | break | ||
451 | inherit, filename, lineno = inherits.pop(0) | ||
452 | d.setVar('__BBDEFINHERITS', inherits) | ||
453 | bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True) | ||
454 | |||
382 | onlyfinalise = d.getVar("__ONLYFINALISE", False) | 455 | onlyfinalise = d.getVar("__ONLYFINALISE", False) |
383 | 456 | ||
384 | safe_d = d | 457 | safe_d = d |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index f8988b8631..c13e4b9755 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
@@ -19,11 +19,9 @@ from . import ConfHandler | |||
19 | from .. import resolve_file, ast, logger, ParseError | 19 | from .. import resolve_file, ast, logger, ParseError |
20 | from .ConfHandler import include, init | 20 | from .ConfHandler import include, init |
21 | 21 | ||
22 | # For compatibility | 22 | __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" ) |
23 | bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"]) | ||
24 | |||
25 | __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) | ||
26 | __inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) | 23 | __inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) |
24 | __inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" ) | ||
27 | __export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) | 25 | __export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) |
28 | __addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") | 26 | __addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") |
29 | __deltask_regexp__ = re.compile(r"deltask\s+(.+)") | 27 | __deltask_regexp__ = re.compile(r"deltask\s+(.+)") |
@@ -36,6 +34,7 @@ __infunc__ = [] | |||
36 | __inpython__ = False | 34 | __inpython__ = False |
37 | __body__ = [] | 35 | __body__ = [] |
38 | __classname__ = "" | 36 | __classname__ = "" |
37 | __residue__ = [] | ||
39 | 38 | ||
40 | cached_statements = {} | 39 | cached_statements = {} |
41 | 40 | ||
@@ -43,31 +42,46 @@ def supports(fn, d): | |||
43 | """Return True if fn has a supported extension""" | 42 | """Return True if fn has a supported extension""" |
44 | return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] | 43 | return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] |
45 | 44 | ||
46 | def inherit(files, fn, lineno, d): | 45 | def inherit(files, fn, lineno, d, deferred=False): |
47 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | 46 | __inherit_cache = d.getVar('__inherit_cache', False) or [] |
47 | #if "${" in files and not deferred: | ||
48 | # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno)) | ||
48 | files = d.expand(files).split() | 49 | files = d.expand(files).split() |
49 | for file in files: | 50 | for file in files: |
50 | if not os.path.isabs(file) and not file.endswith(".bbclass"): | 51 | classtype = d.getVar("__bbclasstype", False) |
51 | file = os.path.join('classes', '%s.bbclass' % file) | 52 | origfile = file |
52 | 53 | for t in ["classes-" + classtype, "classes"]: | |
53 | if not os.path.isabs(file): | 54 | file = origfile |
54 | bbpath = d.getVar("BBPATH") | 55 | if not os.path.isabs(file) and not file.endswith(".bbclass"): |
55 | abs_fn, attempts = bb.utils.which(bbpath, file, history=True) | 56 | file = os.path.join(t, '%s.bbclass' % file) |
56 | for af in attempts: | 57 | |
57 | if af != abs_fn: | 58 | if not os.path.isabs(file): |
58 | bb.parse.mark_dependency(d, af) | 59 | bbpath = d.getVar("BBPATH") |
59 | if abs_fn: | 60 | abs_fn, attempts = bb.utils.which(bbpath, file, history=True) |
60 | file = abs_fn | 61 | for af in attempts: |
62 | if af != abs_fn: | ||
63 | bb.parse.mark_dependency(d, af) | ||
64 | if abs_fn: | ||
65 | file = abs_fn | ||
66 | |||
67 | if os.path.exists(file): | ||
68 | break | ||
69 | |||
70 | if not os.path.exists(file): | ||
71 | raise ParseError("Could not inherit file %s" % (file), fn, lineno) | ||
61 | 72 | ||
62 | if not file in __inherit_cache: | 73 | if not file in __inherit_cache: |
63 | logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) | 74 | logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) |
64 | __inherit_cache.append( file ) | 75 | __inherit_cache.append( file ) |
65 | d.setVar('__inherit_cache', __inherit_cache) | 76 | d.setVar('__inherit_cache', __inherit_cache) |
66 | include(fn, file, lineno, d, "inherit") | 77 | try: |
78 | bb.parse.handle(file, d, True) | ||
79 | except (IOError, OSError) as exc: | ||
80 | raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno) | ||
67 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | 81 | __inherit_cache = d.getVar('__inherit_cache', False) or [] |
68 | 82 | ||
69 | def get_statements(filename, absolute_filename, base_name): | 83 | def get_statements(filename, absolute_filename, base_name): |
70 | global cached_statements | 84 | global cached_statements, __residue__, __body__ |
71 | 85 | ||
72 | try: | 86 | try: |
73 | return cached_statements[absolute_filename] | 87 | return cached_statements[absolute_filename] |
@@ -87,12 +101,17 @@ def get_statements(filename, absolute_filename, base_name): | |||
87 | # add a blank line to close out any python definition | 101 | # add a blank line to close out any python definition |
88 | feeder(lineno, "", filename, base_name, statements, eof=True) | 102 | feeder(lineno, "", filename, base_name, statements, eof=True) |
89 | 103 | ||
104 | if __residue__: | ||
105 | raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno) | ||
106 | if __body__: | ||
107 | raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno) | ||
108 | |||
90 | if filename.endswith(".bbclass") or filename.endswith(".inc"): | 109 | if filename.endswith(".bbclass") or filename.endswith(".inc"): |
91 | cached_statements[absolute_filename] = statements | 110 | cached_statements[absolute_filename] = statements |
92 | return statements | 111 | return statements |
93 | 112 | ||
94 | def handle(fn, d, include): | 113 | def handle(fn, d, include, baseconfig=False): |
95 | global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__ | 114 | global __infunc__, __body__, __residue__, __classname__ |
96 | __body__ = [] | 115 | __body__ = [] |
97 | __infunc__ = [] | 116 | __infunc__ = [] |
98 | __classname__ = "" | 117 | __classname__ = "" |
@@ -144,7 +163,7 @@ def handle(fn, d, include): | |||
144 | return d | 163 | return d |
145 | 164 | ||
146 | def feeder(lineno, s, fn, root, statements, eof=False): | 165 | def feeder(lineno, s, fn, root, statements, eof=False): |
147 | global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__ | 166 | global __inpython__, __infunc__, __body__, __residue__, __classname__ |
148 | 167 | ||
149 | # Check tabs in python functions: | 168 | # Check tabs in python functions: |
150 | # - def py_funcname(): covered by __inpython__ | 169 | # - def py_funcname(): covered by __inpython__ |
@@ -181,10 +200,10 @@ def feeder(lineno, s, fn, root, statements, eof=False): | |||
181 | 200 | ||
182 | if s and s[0] == '#': | 201 | if s and s[0] == '#': |
183 | if len(__residue__) != 0 and __residue__[0][0] != "#": | 202 | if len(__residue__) != 0 and __residue__[0][0] != "#": |
184 | bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) | 203 | bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s)) |
185 | 204 | ||
186 | if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): | 205 | if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): |
187 | bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) | 206 | bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__))) |
188 | 207 | ||
189 | if s and s[-1] == '\\': | 208 | if s and s[-1] == '\\': |
190 | __residue__.append(s[:-1]) | 209 | __residue__.append(s[:-1]) |
@@ -255,7 +274,12 @@ def feeder(lineno, s, fn, root, statements, eof=False): | |||
255 | ast.handleInherit(statements, fn, lineno, m) | 274 | ast.handleInherit(statements, fn, lineno, m) |
256 | return | 275 | return |
257 | 276 | ||
258 | return ConfHandler.feeder(lineno, s, fn, statements) | 277 | m = __inherit_def_regexp__.match(s) |
278 | if m: | ||
279 | ast.handleInheritDeferred(statements, fn, lineno, m) | ||
280 | return | ||
281 | |||
282 | return ConfHandler.feeder(lineno, s, fn, statements, conffile=False) | ||
259 | 283 | ||
260 | # Add us to the handlers list | 284 | # Add us to the handlers list |
261 | from .. import handlers | 285 | from .. import handlers |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index f171c5c932..7826dee7d3 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
@@ -20,8 +20,8 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle | |||
20 | __config_regexp__ = re.compile( r""" | 20 | __config_regexp__ = re.compile( r""" |
21 | ^ | 21 | ^ |
22 | (?P<exp>export\s+)? | 22 | (?P<exp>export\s+)? |
23 | (?P<var>[a-zA-Z0-9\-_+.${}/~]+?) | 23 | (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?) |
24 | (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? | 24 | (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])? |
25 | 25 | ||
26 | \s* ( | 26 | \s* ( |
27 | (?P<colon>:=) | | 27 | (?P<colon>:=) | |
@@ -45,13 +45,11 @@ __include_regexp__ = re.compile( r"include\s+(.+)" ) | |||
45 | __require_regexp__ = re.compile( r"require\s+(.+)" ) | 45 | __require_regexp__ = re.compile( r"require\s+(.+)" ) |
46 | __export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) | 46 | __export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) |
47 | __unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) | 47 | __unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) |
48 | __unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" ) | 48 | __unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" ) |
49 | __addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" ) | ||
49 | 50 | ||
50 | def init(data): | 51 | def init(data): |
51 | topdir = data.getVar('TOPDIR', False) | 52 | return |
52 | if not topdir: | ||
53 | data.setVar('TOPDIR', os.getcwd()) | ||
54 | |||
55 | 53 | ||
56 | def supports(fn, d): | 54 | def supports(fn, d): |
57 | return fn[-5:] == ".conf" | 55 | return fn[-5:] == ".conf" |
@@ -105,12 +103,12 @@ def include_single_file(parentfn, fn, lineno, data, error_out): | |||
105 | # We have an issue where a UI might want to enforce particular settings such as | 103 | # We have an issue where a UI might want to enforce particular settings such as |
106 | # an empty DISTRO variable. If configuration files do something like assigning | 104 | # an empty DISTRO variable. If configuration files do something like assigning |
107 | # a weak default, it turns out to be very difficult to filter out these changes, | 105 | # a weak default, it turns out to be very difficult to filter out these changes, |
108 | # particularly when the weak default might appear half way though parsing a chain | 106 | # particularly when the weak default might appear half way though parsing a chain |
109 | # of configuration files. We therefore let the UIs hook into configuration file | 107 | # of configuration files. We therefore let the UIs hook into configuration file |
110 | # parsing. This turns out to be a hard problem to solve any other way. | 108 | # parsing. This turns out to be a hard problem to solve any other way. |
111 | confFilters = [] | 109 | confFilters = [] |
112 | 110 | ||
113 | def handle(fn, data, include): | 111 | def handle(fn, data, include, baseconfig=False): |
114 | init(data) | 112 | init(data) |
115 | 113 | ||
116 | if include == 0: | 114 | if include == 0: |
@@ -128,21 +126,26 @@ def handle(fn, data, include): | |||
128 | s = f.readline() | 126 | s = f.readline() |
129 | if not s: | 127 | if not s: |
130 | break | 128 | break |
129 | origlineno = lineno | ||
130 | origline = s | ||
131 | w = s.strip() | 131 | w = s.strip() |
132 | # skip empty lines | 132 | # skip empty lines |
133 | if not w: | 133 | if not w: |
134 | continue | 134 | continue |
135 | s = s.rstrip() | 135 | s = s.rstrip() |
136 | while s[-1] == '\\': | 136 | while s[-1] == '\\': |
137 | s2 = f.readline().rstrip() | 137 | line = f.readline() |
138 | origline += line | ||
139 | s2 = line.rstrip() | ||
138 | lineno = lineno + 1 | 140 | lineno = lineno + 1 |
139 | if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : | 141 | if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : |
140 | bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) | 142 | bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline)) |
143 | |||
141 | s = s[:-1] + s2 | 144 | s = s[:-1] + s2 |
142 | # skip comments | 145 | # skip comments |
143 | if s[0] == '#': | 146 | if s[0] == '#': |
144 | continue | 147 | continue |
145 | feeder(lineno, s, abs_fn, statements) | 148 | feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig) |
146 | 149 | ||
147 | # DONE WITH PARSING... time to evaluate | 150 | # DONE WITH PARSING... time to evaluate |
148 | data.setVar('FILE', abs_fn) | 151 | data.setVar('FILE', abs_fn) |
@@ -150,14 +153,14 @@ def handle(fn, data, include): | |||
150 | if oldfile: | 153 | if oldfile: |
151 | data.setVar('FILE', oldfile) | 154 | data.setVar('FILE', oldfile) |
152 | 155 | ||
153 | f.close() | ||
154 | |||
155 | for f in confFilters: | 156 | for f in confFilters: |
156 | f(fn, data) | 157 | f(fn, data) |
157 | 158 | ||
158 | return data | 159 | return data |
159 | 160 | ||
160 | def feeder(lineno, s, fn, statements): | 161 | # baseconfig is set for the bblayers/layer.conf cookerdata config parsing |
162 | # The function is also used by BBHandler, conffile would be False | ||
163 | def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True): | ||
161 | m = __config_regexp__.match(s) | 164 | m = __config_regexp__.match(s) |
162 | if m: | 165 | if m: |
163 | groupd = m.groupdict() | 166 | groupd = m.groupdict() |
@@ -189,6 +192,11 @@ def feeder(lineno, s, fn, statements): | |||
189 | ast.handleUnsetFlag(statements, fn, lineno, m) | 192 | ast.handleUnsetFlag(statements, fn, lineno, m) |
190 | return | 193 | return |
191 | 194 | ||
195 | m = __addpylib_regexp__.match(s) | ||
196 | if baseconfig and conffile and m: | ||
197 | ast.handlePyLib(statements, fn, lineno, m) | ||
198 | return | ||
199 | |||
192 | raise ParseError("unparsed line: '%s'" % s, fn, lineno); | 200 | raise ParseError("unparsed line: '%s'" % s, fn, lineno); |
193 | 201 | ||
194 | # Add us to the handlers list | 202 | # Add us to the handlers list |
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index c6a209fb3f..bcca791edf 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py | |||
@@ -12,14 +12,14 @@ currently, providing a key/value store accessed by 'domain'. | |||
12 | # | 12 | # |
13 | 13 | ||
14 | import collections | 14 | import collections |
15 | import collections.abc | ||
15 | import contextlib | 16 | import contextlib |
16 | import functools | 17 | import functools |
17 | import logging | 18 | import logging |
18 | import os.path | 19 | import os.path |
19 | import sqlite3 | 20 | import sqlite3 |
20 | import sys | 21 | import sys |
21 | import warnings | 22 | from collections.abc import Mapping |
22 | from collections import Mapping | ||
23 | 23 | ||
24 | sqlversion = sqlite3.sqlite_version_info | 24 | sqlversion = sqlite3.sqlite_version_info |
25 | if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | 25 | if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): |
@@ -29,7 +29,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | |||
29 | logger = logging.getLogger("BitBake.PersistData") | 29 | logger = logging.getLogger("BitBake.PersistData") |
30 | 30 | ||
31 | @functools.total_ordering | 31 | @functools.total_ordering |
32 | class SQLTable(collections.MutableMapping): | 32 | class SQLTable(collections.abc.MutableMapping): |
33 | class _Decorators(object): | 33 | class _Decorators(object): |
34 | @staticmethod | 34 | @staticmethod |
35 | def retry(*, reconnect=True): | 35 | def retry(*, reconnect=True): |
@@ -63,7 +63,7 @@ class SQLTable(collections.MutableMapping): | |||
63 | """ | 63 | """ |
64 | Decorator that starts a database transaction and creates a database | 64 | Decorator that starts a database transaction and creates a database |
65 | cursor for performing queries. If no exception is thrown, the | 65 | cursor for performing queries. If no exception is thrown, the |
66 | database results are commited. If an exception occurs, the database | 66 | database results are committed. If an exception occurs, the database |
67 | is rolled back. In all cases, the cursor is closed after the | 67 | is rolled back. In all cases, the cursor is closed after the |
68 | function ends. | 68 | function ends. |
69 | 69 | ||
@@ -208,7 +208,7 @@ class SQLTable(collections.MutableMapping): | |||
208 | 208 | ||
209 | def __lt__(self, other): | 209 | def __lt__(self, other): |
210 | if not isinstance(other, Mapping): | 210 | if not isinstance(other, Mapping): |
211 | raise NotImplemented | 211 | raise NotImplementedError() |
212 | 212 | ||
213 | return len(self) < len(other) | 213 | return len(self) < len(other) |
214 | 214 | ||
@@ -238,55 +238,6 @@ class SQLTable(collections.MutableMapping): | |||
238 | def has_key(self, key): | 238 | def has_key(self, key): |
239 | return key in self | 239 | return key in self |
240 | 240 | ||
241 | |||
242 | class PersistData(object): | ||
243 | """Deprecated representation of the bitbake persistent data store""" | ||
244 | def __init__(self, d): | ||
245 | warnings.warn("Use of PersistData is deprecated. Please use " | ||
246 | "persist(domain, d) instead.", | ||
247 | category=DeprecationWarning, | ||
248 | stacklevel=2) | ||
249 | |||
250 | self.data = persist(d) | ||
251 | logger.debug("Using '%s' as the persistent data cache", | ||
252 | self.data.filename) | ||
253 | |||
254 | def addDomain(self, domain): | ||
255 | """ | ||
256 | Add a domain (pending deprecation) | ||
257 | """ | ||
258 | return self.data[domain] | ||
259 | |||
260 | def delDomain(self, domain): | ||
261 | """ | ||
262 | Removes a domain and all the data it contains | ||
263 | """ | ||
264 | del self.data[domain] | ||
265 | |||
266 | def getKeyValues(self, domain): | ||
267 | """ | ||
268 | Return a list of key + value pairs for a domain | ||
269 | """ | ||
270 | return list(self.data[domain].items()) | ||
271 | |||
272 | def getValue(self, domain, key): | ||
273 | """ | ||
274 | Return the value of a key for a domain | ||
275 | """ | ||
276 | return self.data[domain][key] | ||
277 | |||
278 | def setValue(self, domain, key, value): | ||
279 | """ | ||
280 | Sets the value of a key for a domain | ||
281 | """ | ||
282 | self.data[domain][key] = value | ||
283 | |||
284 | def delValue(self, domain, key): | ||
285 | """ | ||
286 | Deletes a key/value pair | ||
287 | """ | ||
288 | del self.data[domain][key] | ||
289 | |||
290 | def persist(domain, d): | 241 | def persist(domain, d): |
291 | """Convenience factory for SQLTable objects based upon metadata""" | 242 | """Convenience factory for SQLTable objects based upon metadata""" |
292 | import bb.utils | 243 | import bb.utils |
@@ -298,4 +249,23 @@ def persist(domain, d): | |||
298 | 249 | ||
299 | bb.utils.mkdirhier(cachedir) | 250 | bb.utils.mkdirhier(cachedir) |
300 | cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") | 251 | cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") |
301 | return SQLTable(cachefile, domain) | 252 | |
253 | try: | ||
254 | return SQLTable(cachefile, domain) | ||
255 | except sqlite3.OperationalError: | ||
256 | # Sqlite fails to open database when its path is too long. | ||
257 | # After testing, 504 is the biggest path length that can be opened by | ||
258 | # sqlite. | ||
259 | # Note: This code is called before sanity.bbclass and its path length | ||
260 | # check | ||
261 | max_len = 504 | ||
262 | if len(cachefile) > max_len: | ||
263 | logger.critical("The path of the cache file is too long " | ||
264 | "({0} chars > {1}) to be opened by sqlite! " | ||
265 | "Your cache file is \"{2}\"".format( | ||
266 | len(cachefile), | ||
267 | max_len, | ||
268 | cachefile)) | ||
269 | sys.exit(1) | ||
270 | else: | ||
271 | raise | ||
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py index 7c3995cce5..4c7b6d39df 100644 --- a/bitbake/lib/bb/process.py +++ b/bitbake/lib/bb/process.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -60,7 +62,7 @@ class Popen(subprocess.Popen): | |||
60 | "close_fds": True, | 62 | "close_fds": True, |
61 | "preexec_fn": subprocess_setup, | 63 | "preexec_fn": subprocess_setup, |
62 | "stdout": subprocess.PIPE, | 64 | "stdout": subprocess.PIPE, |
63 | "stderr": subprocess.STDOUT, | 65 | "stderr": subprocess.PIPE, |
64 | "stdin": subprocess.PIPE, | 66 | "stdin": subprocess.PIPE, |
65 | "shell": False, | 67 | "shell": False, |
66 | } | 68 | } |
@@ -142,7 +144,7 @@ def _logged_communicate(pipe, log, input, extrafiles): | |||
142 | while pipe.poll() is None: | 144 | while pipe.poll() is None: |
143 | read_all_pipes(log, rin, outdata, errdata) | 145 | read_all_pipes(log, rin, outdata, errdata) |
144 | 146 | ||
145 | # Pocess closed, drain all pipes... | 147 | # Process closed, drain all pipes... |
146 | read_all_pipes(log, rin, outdata, errdata) | 148 | read_all_pipes(log, rin, outdata, errdata) |
147 | finally: | 149 | finally: |
148 | log.flush() | 150 | log.flush() |
@@ -181,5 +183,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options): | |||
181 | stderr = stderr.decode("utf-8") | 183 | stderr = stderr.decode("utf-8") |
182 | 184 | ||
183 | if pipe.returncode != 0: | 185 | if pipe.returncode != 0: |
186 | if log: | ||
187 | # Don't duplicate the output in the exception if logging it | ||
188 | raise ExecutionError(cmd, pipe.returncode, None, None) | ||
184 | raise ExecutionError(cmd, pipe.returncode, stdout, stderr) | 189 | raise ExecutionError(cmd, pipe.returncode, stdout, stderr) |
185 | return stdout, stderr | 190 | return stdout, stderr |
diff --git a/bitbake/lib/bb/progress.py b/bitbake/lib/bb/progress.py index d051ba0198..9518be77fb 100644 --- a/bitbake/lib/bb/progress.py +++ b/bitbake/lib/bb/progress.py | |||
@@ -94,12 +94,15 @@ class LineFilterProgressHandler(ProgressHandler): | |||
94 | while True: | 94 | while True: |
95 | breakpos = self._linebuffer.find('\n') + 1 | 95 | breakpos = self._linebuffer.find('\n') + 1 |
96 | if breakpos == 0: | 96 | if breakpos == 0: |
97 | break | 97 | # for the case when the line with progress ends with only '\r' |
98 | breakpos = self._linebuffer.find('\r') + 1 | ||
99 | if breakpos == 0: | ||
100 | break | ||
98 | line = self._linebuffer[:breakpos] | 101 | line = self._linebuffer[:breakpos] |
99 | self._linebuffer = self._linebuffer[breakpos:] | 102 | self._linebuffer = self._linebuffer[breakpos:] |
100 | # Drop any line feeds and anything that precedes them | 103 | # Drop any line feeds and anything that precedes them |
101 | lbreakpos = line.rfind('\r') + 1 | 104 | lbreakpos = line.rfind('\r') + 1 |
102 | if lbreakpos: | 105 | if lbreakpos and lbreakpos != breakpos: |
103 | line = line[lbreakpos:] | 106 | line = line[lbreakpos:] |
104 | if self.writeline(filter_color(line)): | 107 | if self.writeline(filter_color(line)): |
105 | super().write(line) | 108 | super().write(line) |
@@ -145,7 +148,7 @@ class MultiStageProgressReporter: | |||
145 | for tasks made up of python code spread across multiple | 148 | for tasks made up of python code spread across multiple |
146 | classes / functions - the progress reporter object can | 149 | classes / functions - the progress reporter object can |
147 | be passed around or stored at the object level and calls | 150 | be passed around or stored at the object level and calls |
148 | to next_stage() and update() made whereever needed. | 151 | to next_stage() and update() made wherever needed. |
149 | """ | 152 | """ |
150 | def __init__(self, d, stage_weights, debug=False): | 153 | def __init__(self, d, stage_weights, debug=False): |
151 | """ | 154 | """ |
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py index b5a6cd0090..e11a4637d1 100644 --- a/bitbake/lib/bb/providers.py +++ b/bitbake/lib/bb/providers.py | |||
@@ -38,16 +38,17 @@ def findProviders(cfgData, dataCache, pkg_pn = None): | |||
38 | localdata = data.createCopy(cfgData) | 38 | localdata = data.createCopy(cfgData) |
39 | bb.data.expandKeys(localdata) | 39 | bb.data.expandKeys(localdata) |
40 | 40 | ||
41 | required = {} | ||
41 | preferred_versions = {} | 42 | preferred_versions = {} |
42 | latest_versions = {} | 43 | latest_versions = {} |
43 | 44 | ||
44 | for pn in pkg_pn: | 45 | for pn in pkg_pn: |
45 | (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn) | 46 | (last_ver, last_file, pref_ver, pref_file, req) = findBestProvider(pn, localdata, dataCache, pkg_pn) |
46 | preferred_versions[pn] = (pref_ver, pref_file) | 47 | preferred_versions[pn] = (pref_ver, pref_file) |
47 | latest_versions[pn] = (last_ver, last_file) | 48 | latest_versions[pn] = (last_ver, last_file) |
49 | required[pn] = req | ||
48 | 50 | ||
49 | return (latest_versions, preferred_versions) | 51 | return (latest_versions, preferred_versions, required) |
50 | |||
51 | 52 | ||
52 | def allProviders(dataCache): | 53 | def allProviders(dataCache): |
53 | """ | 54 | """ |
@@ -59,7 +60,6 @@ def allProviders(dataCache): | |||
59 | all_providers[pn].append((ver, fn)) | 60 | all_providers[pn].append((ver, fn)) |
60 | return all_providers | 61 | return all_providers |
61 | 62 | ||
62 | |||
63 | def sortPriorities(pn, dataCache, pkg_pn = None): | 63 | def sortPriorities(pn, dataCache, pkg_pn = None): |
64 | """ | 64 | """ |
65 | Reorder pkg_pn by file priority and default preference | 65 | Reorder pkg_pn by file priority and default preference |
@@ -87,6 +87,21 @@ def sortPriorities(pn, dataCache, pkg_pn = None): | |||
87 | 87 | ||
88 | return tmp_pn | 88 | return tmp_pn |
89 | 89 | ||
90 | def versionVariableMatch(cfgData, keyword, pn): | ||
91 | """ | ||
92 | Return the value of the <keyword>_VERSION variable if set. | ||
93 | """ | ||
94 | |||
95 | # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot | ||
96 | # hence we do this manually rather than use OVERRIDES | ||
97 | ver = cfgData.getVar("%s_VERSION:pn-%s" % (keyword, pn)) | ||
98 | if not ver: | ||
99 | ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn)) | ||
100 | if not ver: | ||
101 | ver = cfgData.getVar("%s_VERSION" % keyword) | ||
102 | |||
103 | return ver | ||
104 | |||
90 | def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): | 105 | def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): |
91 | """ | 106 | """ |
92 | Check if the version pe,pv,pr is the preferred one. | 107 | Check if the version pe,pv,pr is the preferred one. |
@@ -102,19 +117,28 @@ def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): | |||
102 | 117 | ||
103 | def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | 118 | def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): |
104 | """ | 119 | """ |
105 | Find the first provider in pkg_pn with a PREFERRED_VERSION set. | 120 | Find the first provider in pkg_pn with REQUIRED_VERSION or PREFERRED_VERSION set. |
106 | """ | 121 | """ |
107 | 122 | ||
108 | preferred_file = None | 123 | preferred_file = None |
109 | preferred_ver = None | 124 | preferred_ver = None |
125 | required = False | ||
110 | 126 | ||
111 | # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot | 127 | required_v = versionVariableMatch(cfgData, "REQUIRED", pn) |
112 | # hence we do this manually rather than use OVERRIDES | 128 | preferred_v = versionVariableMatch(cfgData, "PREFERRED", pn) |
113 | preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn) | 129 | |
114 | if not preferred_v: | 130 | itemstr = "" |
115 | preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn) | 131 | if item: |
116 | if not preferred_v: | 132 | itemstr = " (for item %s)" % item |
117 | preferred_v = cfgData.getVar("PREFERRED_VERSION") | 133 | |
134 | if required_v is not None: | ||
135 | if preferred_v is not None: | ||
136 | logger.warning("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v) | ||
137 | else: | ||
138 | logger.debug("REQUIRED_VERSION is set for package %s%s", pn, itemstr) | ||
139 | # REQUIRED_VERSION always takes precedence over PREFERRED_VERSION | ||
140 | preferred_v = required_v | ||
141 | required = True | ||
118 | 142 | ||
119 | if preferred_v: | 143 | if preferred_v: |
120 | m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v) | 144 | m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v) |
@@ -147,11 +171,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | |||
147 | pv_str = preferred_v | 171 | pv_str = preferred_v |
148 | if not (preferred_e is None): | 172 | if not (preferred_e is None): |
149 | pv_str = '%s:%s' % (preferred_e, pv_str) | 173 | pv_str = '%s:%s' % (preferred_e, pv_str) |
150 | itemstr = "" | ||
151 | if item: | ||
152 | itemstr = " (for item %s)" % item | ||
153 | if preferred_file is None: | 174 | if preferred_file is None: |
154 | logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr) | 175 | if not required: |
176 | logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr) | ||
155 | available_vers = [] | 177 | available_vers = [] |
156 | for file_set in pkg_pn: | 178 | for file_set in pkg_pn: |
157 | for f in file_set: | 179 | for f in file_set: |
@@ -163,12 +185,16 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | |||
163 | available_vers.append(ver_str) | 185 | available_vers.append(ver_str) |
164 | if available_vers: | 186 | if available_vers: |
165 | available_vers.sort() | 187 | available_vers.sort() |
166 | logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) | 188 | logger.warning("versions of %s available: %s", pn, ' '.join(available_vers)) |
189 | if required: | ||
190 | logger.error("required version %s of %s not available%s", pv_str, pn, itemstr) | ||
167 | else: | 191 | else: |
168 | logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) | 192 | if required: |
169 | 193 | logger.debug("selecting %s as REQUIRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) | |
170 | return (preferred_ver, preferred_file) | 194 | else: |
195 | logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) | ||
171 | 196 | ||
197 | return (preferred_ver, preferred_file, required) | ||
172 | 198 | ||
173 | def findLatestProvider(pn, cfgData, dataCache, file_set): | 199 | def findLatestProvider(pn, cfgData, dataCache, file_set): |
174 | """ | 200 | """ |
@@ -189,7 +215,6 @@ def findLatestProvider(pn, cfgData, dataCache, file_set): | |||
189 | 215 | ||
190 | return (latest, latest_f) | 216 | return (latest, latest_f) |
191 | 217 | ||
192 | |||
193 | def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | 218 | def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): |
194 | """ | 219 | """ |
195 | If there is a PREFERRED_VERSION, find the highest-priority bbfile | 220 | If there is a PREFERRED_VERSION, find the highest-priority bbfile |
@@ -198,17 +223,16 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | |||
198 | """ | 223 | """ |
199 | 224 | ||
200 | sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn) | 225 | sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn) |
201 | # Find the highest priority provider with a PREFERRED_VERSION set | 226 | # Find the highest priority provider with a REQUIRED_VERSION or PREFERRED_VERSION set |
202 | (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item) | 227 | (preferred_ver, preferred_file, required) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item) |
203 | # Find the latest version of the highest priority provider | 228 | # Find the latest version of the highest priority provider |
204 | (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0]) | 229 | (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0]) |
205 | 230 | ||
206 | if preferred_file is None: | 231 | if not required and preferred_file is None: |
207 | preferred_file = latest_f | 232 | preferred_file = latest_f |
208 | preferred_ver = latest | 233 | preferred_ver = latest |
209 | 234 | ||
210 | return (latest, latest_f, preferred_ver, preferred_file) | 235 | return (latest, latest_f, preferred_ver, preferred_file, required) |
211 | |||
212 | 236 | ||
213 | def _filterProviders(providers, item, cfgData, dataCache): | 237 | def _filterProviders(providers, item, cfgData, dataCache): |
214 | """ | 238 | """ |
@@ -234,10 +258,13 @@ def _filterProviders(providers, item, cfgData, dataCache): | |||
234 | 258 | ||
235 | logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) | 259 | logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) |
236 | 260 | ||
237 | # First add PREFERRED_VERSIONS | 261 | # First add REQUIRED_VERSIONS or PREFERRED_VERSIONS |
238 | for pn in sorted(pkg_pn): | 262 | for pn in sorted(pkg_pn): |
239 | sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn) | 263 | sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn) |
240 | preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item) | 264 | preferred_ver, preferred_file, required = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item) |
265 | if required and preferred_file is None: | ||
266 | return eligible | ||
267 | preferred_versions[pn] = (preferred_ver, preferred_file) | ||
241 | if preferred_versions[pn][1]: | 268 | if preferred_versions[pn][1]: |
242 | eligible.append(preferred_versions[pn][1]) | 269 | eligible.append(preferred_versions[pn][1]) |
243 | 270 | ||
@@ -249,7 +276,6 @@ def _filterProviders(providers, item, cfgData, dataCache): | |||
249 | eligible.append(preferred_versions[pn][1]) | 276 | eligible.append(preferred_versions[pn][1]) |
250 | 277 | ||
251 | if not eligible: | 278 | if not eligible: |
252 | logger.error("no eligible providers for %s", item) | ||
253 | return eligible | 279 | return eligible |
254 | 280 | ||
255 | # If pn == item, give it a slight default preference | 281 | # If pn == item, give it a slight default preference |
@@ -266,7 +292,6 @@ def _filterProviders(providers, item, cfgData, dataCache): | |||
266 | 292 | ||
267 | return eligible | 293 | return eligible |
268 | 294 | ||
269 | |||
270 | def filterProviders(providers, item, cfgData, dataCache): | 295 | def filterProviders(providers, item, cfgData, dataCache): |
271 | """ | 296 | """ |
272 | Take a list of providers and filter/reorder according to the | 297 | Take a list of providers and filter/reorder according to the |
@@ -371,8 +396,8 @@ def getRuntimeProviders(dataCache, rdepend): | |||
371 | return rproviders | 396 | return rproviders |
372 | 397 | ||
373 | # Only search dynamic packages if we can't find anything in other variables | 398 | # Only search dynamic packages if we can't find anything in other variables |
374 | for pattern in dataCache.packages_dynamic: | 399 | for pat_key in dataCache.packages_dynamic: |
375 | pattern = pattern.replace(r'+', r"\+") | 400 | pattern = pat_key.replace(r'+', r"\+") |
376 | if pattern in regexp_cache: | 401 | if pattern in regexp_cache: |
377 | regexp = regexp_cache[pattern] | 402 | regexp = regexp_cache[pattern] |
378 | else: | 403 | else: |
@@ -383,12 +408,11 @@ def getRuntimeProviders(dataCache, rdepend): | |||
383 | raise | 408 | raise |
384 | regexp_cache[pattern] = regexp | 409 | regexp_cache[pattern] = regexp |
385 | if regexp.match(rdepend): | 410 | if regexp.match(rdepend): |
386 | rproviders += dataCache.packages_dynamic[pattern] | 411 | rproviders += dataCache.packages_dynamic[pat_key] |
387 | logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) | 412 | logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) |
388 | 413 | ||
389 | return rproviders | 414 | return rproviders |
390 | 415 | ||
391 | |||
392 | def buildWorldTargetList(dataCache, task=None): | 416 | def buildWorldTargetList(dataCache, task=None): |
393 | """ | 417 | """ |
394 | Build package list for "bitbake world" | 418 | Build package list for "bitbake world" |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 54ef245a63..bc7e18175d 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -24,6 +24,7 @@ import pickle | |||
24 | from multiprocessing import Process | 24 | from multiprocessing import Process |
25 | import shlex | 25 | import shlex |
26 | import pprint | 26 | import pprint |
27 | import time | ||
27 | 28 | ||
28 | bblogger = logging.getLogger("BitBake") | 29 | bblogger = logging.getLogger("BitBake") |
29 | logger = logging.getLogger("BitBake.RunQueue") | 30 | logger = logging.getLogger("BitBake.RunQueue") |
@@ -85,15 +86,19 @@ class RunQueueStats: | |||
85 | """ | 86 | """ |
86 | Holds statistics on the tasks handled by the associated runQueue | 87 | Holds statistics on the tasks handled by the associated runQueue |
87 | """ | 88 | """ |
88 | def __init__(self, total): | 89 | def __init__(self, total, setscene_total): |
89 | self.completed = 0 | 90 | self.completed = 0 |
90 | self.skipped = 0 | 91 | self.skipped = 0 |
91 | self.failed = 0 | 92 | self.failed = 0 |
92 | self.active = 0 | 93 | self.active = 0 |
94 | self.setscene_active = 0 | ||
95 | self.setscene_covered = 0 | ||
96 | self.setscene_notcovered = 0 | ||
97 | self.setscene_total = setscene_total | ||
93 | self.total = total | 98 | self.total = total |
94 | 99 | ||
95 | def copy(self): | 100 | def copy(self): |
96 | obj = self.__class__(self.total) | 101 | obj = self.__class__(self.total, self.setscene_total) |
97 | obj.__dict__.update(self.__dict__) | 102 | obj.__dict__.update(self.__dict__) |
98 | return obj | 103 | return obj |
99 | 104 | ||
@@ -112,6 +117,13 @@ class RunQueueStats: | |||
112 | def taskActive(self): | 117 | def taskActive(self): |
113 | self.active = self.active + 1 | 118 | self.active = self.active + 1 |
114 | 119 | ||
120 | def updateCovered(self, covered, notcovered): | ||
121 | self.setscene_covered = covered | ||
122 | self.setscene_notcovered = notcovered | ||
123 | |||
124 | def updateActiveSetscene(self, active): | ||
125 | self.setscene_active = active | ||
126 | |||
115 | # These values indicate the next step due to be run in the | 127 | # These values indicate the next step due to be run in the |
116 | # runQueue state machine | 128 | # runQueue state machine |
117 | runQueuePrepare = 2 | 129 | runQueuePrepare = 2 |
@@ -143,11 +155,82 @@ class RunQueueScheduler(object): | |||
143 | self.stamps = {} | 155 | self.stamps = {} |
144 | for tid in self.rqdata.runtaskentries: | 156 | for tid in self.rqdata.runtaskentries: |
145 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 157 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
146 | self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) | 158 | self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False) |
147 | if tid in self.rq.runq_buildable: | 159 | if tid in self.rq.runq_buildable: |
148 | self.buildable.append(tid) | 160 | self.buildable.add(tid) |
149 | 161 | ||
150 | self.rev_prio_map = None | 162 | self.rev_prio_map = None |
163 | self.is_pressure_usable() | ||
164 | |||
165 | def is_pressure_usable(self): | ||
166 | """ | ||
167 | If monitoring pressure, return True if pressure files can be open and read. For example | ||
168 | openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported) | ||
169 | is returned. | ||
170 | """ | ||
171 | if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure: | ||
172 | try: | ||
173 | with open("/proc/pressure/cpu") as cpu_pressure_fds, \ | ||
174 | open("/proc/pressure/io") as io_pressure_fds, \ | ||
175 | open("/proc/pressure/memory") as memory_pressure_fds: | ||
176 | |||
177 | self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1] | ||
178 | self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1] | ||
179 | self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1] | ||
180 | self.prev_pressure_time = time.time() | ||
181 | self.check_pressure = True | ||
182 | except: | ||
183 | bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure") | ||
184 | self.check_pressure = False | ||
185 | else: | ||
186 | self.check_pressure = False | ||
187 | |||
188 | def exceeds_max_pressure(self): | ||
189 | """ | ||
190 | Monitor the difference in total pressure at least once per second, if | ||
191 | BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold. | ||
192 | """ | ||
193 | if self.check_pressure: | ||
194 | with open("/proc/pressure/cpu") as cpu_pressure_fds, \ | ||
195 | open("/proc/pressure/io") as io_pressure_fds, \ | ||
196 | open("/proc/pressure/memory") as memory_pressure_fds: | ||
197 | # extract "total" from /proc/pressure/{cpu|io} | ||
198 | curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1] | ||
199 | curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1] | ||
200 | curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1] | ||
201 | now = time.time() | ||
202 | tdiff = now - self.prev_pressure_time | ||
203 | psi_accumulation_interval = 1.0 | ||
204 | cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff | ||
205 | io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff | ||
206 | memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff | ||
207 | exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure | ||
208 | exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure | ||
209 | exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure | ||
210 | |||
211 | if tdiff > psi_accumulation_interval: | ||
212 | self.prev_cpu_pressure = curr_cpu_pressure | ||
213 | self.prev_io_pressure = curr_io_pressure | ||
214 | self.prev_memory_pressure = curr_memory_pressure | ||
215 | self.prev_pressure_time = now | ||
216 | |||
217 | pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure) | ||
218 | pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure) | ||
219 | if hasattr(self, "pressure_state") and pressure_state != self.pressure_state: | ||
220 | bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks))) | ||
221 | self.pressure_state = pressure_state | ||
222 | return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure) | ||
223 | elif self.rq.max_loadfactor: | ||
224 | limit = False | ||
225 | loadfactor = float(os.getloadavg()[0]) / os.cpu_count() | ||
226 | # bb.warn("Comparing %s to %s" % (loadfactor, self.rq.max_loadfactor)) | ||
227 | if loadfactor > self.rq.max_loadfactor: | ||
228 | limit = True | ||
229 | if hasattr(self, "loadfactor_limit") and limit != self.loadfactor_limit: | ||
230 | bb.note("Load average limiting set to %s as load average: %s - using %s/%s bitbake threads" % (limit, loadfactor, len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)) | ||
231 | self.loadfactor_limit = limit | ||
232 | return limit | ||
233 | return False | ||
151 | 234 | ||
152 | def next_buildable_task(self): | 235 | def next_buildable_task(self): |
153 | """ | 236 | """ |
@@ -161,6 +244,12 @@ class RunQueueScheduler(object): | |||
161 | if not buildable: | 244 | if not buildable: |
162 | return None | 245 | return None |
163 | 246 | ||
247 | # Bitbake requires that at least one task be active. Only check for pressure if | ||
248 | # this is the case, otherwise the pressure limitation could result in no tasks | ||
249 | # being active and no new tasks started thereby, at times, breaking the scheduler. | ||
250 | if self.rq.stats.active and self.exceeds_max_pressure(): | ||
251 | return None | ||
252 | |||
164 | # Filter out tasks that have a max number of threads that have been exceeded | 253 | # Filter out tasks that have a max number of threads that have been exceeded |
165 | skip_buildable = {} | 254 | skip_buildable = {} |
166 | for running in self.rq.runq_running.difference(self.rq.runq_complete): | 255 | for running in self.rq.runq_running.difference(self.rq.runq_complete): |
@@ -191,11 +280,11 @@ class RunQueueScheduler(object): | |||
191 | best = None | 280 | best = None |
192 | bestprio = None | 281 | bestprio = None |
193 | for tid in buildable: | 282 | for tid in buildable: |
194 | taskname = taskname_from_tid(tid) | ||
195 | if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]): | ||
196 | continue | ||
197 | prio = self.rev_prio_map[tid] | 283 | prio = self.rev_prio_map[tid] |
198 | if bestprio is None or bestprio > prio: | 284 | if bestprio is None or bestprio > prio: |
285 | taskname = taskname_from_tid(tid) | ||
286 | if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]): | ||
287 | continue | ||
199 | stamp = self.stamps[tid] | 288 | stamp = self.stamps[tid] |
200 | if stamp in self.rq.build_stamps.values(): | 289 | if stamp in self.rq.build_stamps.values(): |
201 | continue | 290 | continue |
@@ -374,10 +463,9 @@ class RunQueueData: | |||
374 | self.rq = rq | 463 | self.rq = rq |
375 | self.warn_multi_bb = False | 464 | self.warn_multi_bb = False |
376 | 465 | ||
377 | self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or "" | 466 | self.multi_provider_allowed = (cfgData.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() |
378 | self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split() | 467 | self.setscene_ignore_tasks = get_setscene_enforce_ignore_tasks(cfgData, targets) |
379 | self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets) | 468 | self.setscene_ignore_tasks_checked = False |
380 | self.setscenewhitelist_checked = False | ||
381 | self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1") | 469 | self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1") |
382 | self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() | 470 | self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() |
383 | 471 | ||
@@ -475,7 +563,7 @@ class RunQueueData: | |||
475 | msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends))) | 563 | msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends))) |
476 | msgs.append("\n") | 564 | msgs.append("\n") |
477 | if len(valid_chains) > 10: | 565 | if len(valid_chains) > 10: |
478 | msgs.append("Aborted dependency loops search after 10 matches.\n") | 566 | msgs.append("Halted dependency loops search after 10 matches.\n") |
479 | raise TooManyLoops | 567 | raise TooManyLoops |
480 | continue | 568 | continue |
481 | scan = False | 569 | scan = False |
@@ -536,7 +624,7 @@ class RunQueueData: | |||
536 | next_points.append(revdep) | 624 | next_points.append(revdep) |
537 | task_done[revdep] = True | 625 | task_done[revdep] = True |
538 | endpoints = next_points | 626 | endpoints = next_points |
539 | if len(next_points) == 0: | 627 | if not next_points: |
540 | break | 628 | break |
541 | 629 | ||
542 | # Circular dependency sanity check | 630 | # Circular dependency sanity check |
@@ -578,15 +666,18 @@ class RunQueueData: | |||
578 | 666 | ||
579 | found = False | 667 | found = False |
580 | for mc in self.taskData: | 668 | for mc in self.taskData: |
581 | if len(taskData[mc].taskentries) > 0: | 669 | if taskData[mc].taskentries: |
582 | found = True | 670 | found = True |
583 | break | 671 | break |
584 | if not found: | 672 | if not found: |
585 | # Nothing to do | 673 | # Nothing to do |
586 | return 0 | 674 | return 0 |
587 | 675 | ||
676 | bb.parse.siggen.setup_datacache(self.dataCaches) | ||
677 | |||
588 | self.init_progress_reporter.start() | 678 | self.init_progress_reporter.start() |
589 | self.init_progress_reporter.next_stage() | 679 | self.init_progress_reporter.next_stage() |
680 | bb.event.check_for_interrupts(self.cooker.data) | ||
590 | 681 | ||
591 | # Step A - Work out a list of tasks to run | 682 | # Step A - Work out a list of tasks to run |
592 | # | 683 | # |
@@ -632,6 +723,8 @@ class RunQueueData: | |||
632 | frommc = mcdependency[1] | 723 | frommc = mcdependency[1] |
633 | mcdep = mcdependency[2] | 724 | mcdep = mcdependency[2] |
634 | deptask = mcdependency[4] | 725 | deptask = mcdependency[4] |
726 | if mcdep not in taskData: | ||
727 | bb.fatal("Multiconfig '%s' is referenced in multiconfig dependency '%s' but not enabled in BBMULTICONFIG?" % (mcdep, dep)) | ||
635 | if mc == frommc: | 728 | if mc == frommc: |
636 | fn = taskData[mcdep].build_targets[pn][0] | 729 | fn = taskData[mcdep].build_targets[pn][0] |
637 | newdep = '%s:%s' % (fn,deptask) | 730 | newdep = '%s:%s' % (fn,deptask) |
@@ -733,6 +826,7 @@ class RunQueueData: | |||
733 | #self.dump_data() | 826 | #self.dump_data() |
734 | 827 | ||
735 | self.init_progress_reporter.next_stage() | 828 | self.init_progress_reporter.next_stage() |
829 | bb.event.check_for_interrupts(self.cooker.data) | ||
736 | 830 | ||
737 | # Resolve recursive 'recrdeptask' dependencies (Part B) | 831 | # Resolve recursive 'recrdeptask' dependencies (Part B) |
738 | # | 832 | # |
@@ -762,7 +856,7 @@ class RunQueueData: | |||
762 | # Find the dependency chain endpoints | 856 | # Find the dependency chain endpoints |
763 | endpoints = set() | 857 | endpoints = set() |
764 | for tid in self.runtaskentries: | 858 | for tid in self.runtaskentries: |
765 | if len(deps[tid]) == 0: | 859 | if not deps[tid]: |
766 | endpoints.add(tid) | 860 | endpoints.add(tid) |
767 | # Iterate the chains collating dependencies | 861 | # Iterate the chains collating dependencies |
768 | while endpoints: | 862 | while endpoints: |
@@ -773,11 +867,11 @@ class RunQueueData: | |||
773 | cumulativedeps[dep].update(cumulativedeps[tid]) | 867 | cumulativedeps[dep].update(cumulativedeps[tid]) |
774 | if tid in deps[dep]: | 868 | if tid in deps[dep]: |
775 | deps[dep].remove(tid) | 869 | deps[dep].remove(tid) |
776 | if len(deps[dep]) == 0: | 870 | if not deps[dep]: |
777 | next.add(dep) | 871 | next.add(dep) |
778 | endpoints = next | 872 | endpoints = next |
779 | #for tid in deps: | 873 | #for tid in deps: |
780 | # if len(deps[tid]) != 0: | 874 | # if deps[tid]: |
781 | # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid])) | 875 | # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid])) |
782 | 876 | ||
783 | # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to | 877 | # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to |
@@ -829,6 +923,7 @@ class RunQueueData: | |||
829 | self.runtaskentries[tid].depends.difference_update(recursivetasksselfref) | 923 | self.runtaskentries[tid].depends.difference_update(recursivetasksselfref) |
830 | 924 | ||
831 | self.init_progress_reporter.next_stage() | 925 | self.init_progress_reporter.next_stage() |
926 | bb.event.check_for_interrupts(self.cooker.data) | ||
832 | 927 | ||
833 | #self.dump_data() | 928 | #self.dump_data() |
834 | 929 | ||
@@ -867,7 +962,7 @@ class RunQueueData: | |||
867 | bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname) | 962 | bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname) |
868 | else: | 963 | else: |
869 | logger.verbose("Invalidate task %s, %s", taskname, fn) | 964 | logger.verbose("Invalidate task %s, %s", taskname, fn) |
870 | bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn) | 965 | bb.parse.siggen.invalidate_task(taskname, taskfn) |
871 | 966 | ||
872 | self.target_tids = [] | 967 | self.target_tids = [] |
873 | for (mc, target, task, fn) in self.targets: | 968 | for (mc, target, task, fn) in self.targets: |
@@ -910,47 +1005,54 @@ class RunQueueData: | |||
910 | mark_active(tid, 1) | 1005 | mark_active(tid, 1) |
911 | 1006 | ||
912 | self.init_progress_reporter.next_stage() | 1007 | self.init_progress_reporter.next_stage() |
1008 | bb.event.check_for_interrupts(self.cooker.data) | ||
913 | 1009 | ||
914 | # Step C - Prune all inactive tasks | 1010 | # Step C - Prune all inactive tasks |
915 | # | 1011 | # |
916 | # Once all active tasks are marked, prune the ones we don't need. | 1012 | # Once all active tasks are marked, prune the ones we don't need. |
917 | 1013 | ||
918 | delcount = {} | ||
919 | for tid in list(self.runtaskentries.keys()): | ||
920 | if tid not in runq_build: | ||
921 | delcount[tid] = self.runtaskentries[tid] | ||
922 | del self.runtaskentries[tid] | ||
923 | |||
924 | # Handle --runall | 1014 | # Handle --runall |
925 | if self.cooker.configuration.runall: | 1015 | if self.cooker.configuration.runall: |
926 | # re-run the mark_active and then drop unused tasks from new list | 1016 | # re-run the mark_active and then drop unused tasks from new list |
927 | runq_build = {} | ||
928 | 1017 | ||
929 | for task in self.cooker.configuration.runall: | 1018 | runall_tids = set() |
930 | if not task.startswith("do_"): | 1019 | added = True |
931 | task = "do_{0}".format(task) | 1020 | while added: |
932 | runall_tids = set() | 1021 | reduced_tasklist = set(self.runtaskentries.keys()) |
933 | for tid in list(self.runtaskentries): | 1022 | for tid in list(self.runtaskentries.keys()): |
934 | wanttid = "{0}:{1}".format(fn_from_tid(tid), task) | 1023 | if tid not in runq_build: |
935 | if wanttid in delcount: | 1024 | reduced_tasklist.remove(tid) |
936 | self.runtaskentries[wanttid] = delcount[wanttid] | 1025 | runq_build = {} |
937 | if wanttid in self.runtaskentries: | ||
938 | runall_tids.add(wanttid) | ||
939 | |||
940 | for tid in list(runall_tids): | ||
941 | mark_active(tid,1) | ||
942 | if self.cooker.configuration.force: | ||
943 | invalidate_task(tid, False) | ||
944 | 1026 | ||
945 | for tid in list(self.runtaskentries.keys()): | 1027 | orig = runall_tids |
946 | if tid not in runq_build: | 1028 | runall_tids = set() |
947 | delcount[tid] = self.runtaskentries[tid] | 1029 | for task in self.cooker.configuration.runall: |
948 | del self.runtaskentries[tid] | 1030 | if not task.startswith("do_"): |
1031 | task = "do_{0}".format(task) | ||
1032 | for tid in reduced_tasklist: | ||
1033 | wanttid = "{0}:{1}".format(fn_from_tid(tid), task) | ||
1034 | if wanttid in self.runtaskentries: | ||
1035 | runall_tids.add(wanttid) | ||
1036 | |||
1037 | for tid in list(runall_tids): | ||
1038 | mark_active(tid, 1) | ||
1039 | self.target_tids.append(tid) | ||
1040 | if self.cooker.configuration.force: | ||
1041 | invalidate_task(tid, False) | ||
1042 | added = runall_tids - orig | ||
1043 | |||
1044 | delcount = set() | ||
1045 | for tid in list(self.runtaskentries.keys()): | ||
1046 | if tid not in runq_build: | ||
1047 | delcount.add(tid) | ||
1048 | del self.runtaskentries[tid] | ||
949 | 1049 | ||
950 | if len(self.runtaskentries) == 0: | 1050 | if self.cooker.configuration.runall: |
1051 | if not self.runtaskentries: | ||
951 | bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets))) | 1052 | bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets))) |
952 | 1053 | ||
953 | self.init_progress_reporter.next_stage() | 1054 | self.init_progress_reporter.next_stage() |
1055 | bb.event.check_for_interrupts(self.cooker.data) | ||
954 | 1056 | ||
955 | # Handle runonly | 1057 | # Handle runonly |
956 | if self.cooker.configuration.runonly: | 1058 | if self.cooker.configuration.runonly: |
@@ -960,19 +1062,19 @@ class RunQueueData: | |||
960 | for task in self.cooker.configuration.runonly: | 1062 | for task in self.cooker.configuration.runonly: |
961 | if not task.startswith("do_"): | 1063 | if not task.startswith("do_"): |
962 | task = "do_{0}".format(task) | 1064 | task = "do_{0}".format(task) |
963 | runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task } | 1065 | runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task] |
964 | 1066 | ||
965 | for tid in list(runonly_tids): | 1067 | for tid in runonly_tids: |
966 | mark_active(tid,1) | 1068 | mark_active(tid, 1) |
967 | if self.cooker.configuration.force: | 1069 | if self.cooker.configuration.force: |
968 | invalidate_task(tid, False) | 1070 | invalidate_task(tid, False) |
969 | 1071 | ||
970 | for tid in list(self.runtaskentries.keys()): | 1072 | for tid in list(self.runtaskentries.keys()): |
971 | if tid not in runq_build: | 1073 | if tid not in runq_build: |
972 | delcount[tid] = self.runtaskentries[tid] | 1074 | delcount.add(tid) |
973 | del self.runtaskentries[tid] | 1075 | del self.runtaskentries[tid] |
974 | 1076 | ||
975 | if len(self.runtaskentries) == 0: | 1077 | if not self.runtaskentries: |
976 | bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets))) | 1078 | bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets))) |
977 | 1079 | ||
978 | # | 1080 | # |
@@ -980,8 +1082,8 @@ class RunQueueData: | |||
980 | # | 1082 | # |
981 | 1083 | ||
982 | # Check to make sure we still have tasks to run | 1084 | # Check to make sure we still have tasks to run |
983 | if len(self.runtaskentries) == 0: | 1085 | if not self.runtaskentries: |
984 | if not taskData[''].abort: | 1086 | if not taskData[''].halt: |
985 | bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.") | 1087 | bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.") |
986 | else: | 1088 | else: |
987 | bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.") | 1089 | bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.") |
@@ -991,6 +1093,7 @@ class RunQueueData: | |||
991 | logger.verbose("Assign Weightings") | 1093 | logger.verbose("Assign Weightings") |
992 | 1094 | ||
993 | self.init_progress_reporter.next_stage() | 1095 | self.init_progress_reporter.next_stage() |
1096 | bb.event.check_for_interrupts(self.cooker.data) | ||
994 | 1097 | ||
995 | # Generate a list of reverse dependencies to ease future calculations | 1098 | # Generate a list of reverse dependencies to ease future calculations |
996 | for tid in self.runtaskentries: | 1099 | for tid in self.runtaskentries: |
@@ -998,13 +1101,14 @@ class RunQueueData: | |||
998 | self.runtaskentries[dep].revdeps.add(tid) | 1101 | self.runtaskentries[dep].revdeps.add(tid) |
999 | 1102 | ||
1000 | self.init_progress_reporter.next_stage() | 1103 | self.init_progress_reporter.next_stage() |
1104 | bb.event.check_for_interrupts(self.cooker.data) | ||
1001 | 1105 | ||
1002 | # Identify tasks at the end of dependency chains | 1106 | # Identify tasks at the end of dependency chains |
1003 | # Error on circular dependency loops (length two) | 1107 | # Error on circular dependency loops (length two) |
1004 | endpoints = [] | 1108 | endpoints = [] |
1005 | for tid in self.runtaskentries: | 1109 | for tid in self.runtaskentries: |
1006 | revdeps = self.runtaskentries[tid].revdeps | 1110 | revdeps = self.runtaskentries[tid].revdeps |
1007 | if len(revdeps) == 0: | 1111 | if not revdeps: |
1008 | endpoints.append(tid) | 1112 | endpoints.append(tid) |
1009 | for dep in revdeps: | 1113 | for dep in revdeps: |
1010 | if dep in self.runtaskentries[tid].depends: | 1114 | if dep in self.runtaskentries[tid].depends: |
@@ -1014,12 +1118,14 @@ class RunQueueData: | |||
1014 | logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints)) | 1118 | logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints)) |
1015 | 1119 | ||
1016 | self.init_progress_reporter.next_stage() | 1120 | self.init_progress_reporter.next_stage() |
1121 | bb.event.check_for_interrupts(self.cooker.data) | ||
1017 | 1122 | ||
1018 | # Calculate task weights | 1123 | # Calculate task weights |
1019 | # Check of higher length circular dependencies | 1124 | # Check of higher length circular dependencies |
1020 | self.runq_weight = self.calculate_task_weights(endpoints) | 1125 | self.runq_weight = self.calculate_task_weights(endpoints) |
1021 | 1126 | ||
1022 | self.init_progress_reporter.next_stage() | 1127 | self.init_progress_reporter.next_stage() |
1128 | bb.event.check_for_interrupts(self.cooker.data) | ||
1023 | 1129 | ||
1024 | # Sanity Check - Check for multiple tasks building the same provider | 1130 | # Sanity Check - Check for multiple tasks building the same provider |
1025 | for mc in self.dataCaches: | 1131 | for mc in self.dataCaches: |
@@ -1040,7 +1146,7 @@ class RunQueueData: | |||
1040 | for prov in prov_list: | 1146 | for prov in prov_list: |
1041 | if len(prov_list[prov]) < 2: | 1147 | if len(prov_list[prov]) < 2: |
1042 | continue | 1148 | continue |
1043 | if prov in self.multi_provider_whitelist: | 1149 | if prov in self.multi_provider_allowed: |
1044 | continue | 1150 | continue |
1045 | seen_pn = [] | 1151 | seen_pn = [] |
1046 | # If two versions of the same PN are being built its fatal, we don't support it. | 1152 | # If two versions of the same PN are being built its fatal, we don't support it. |
@@ -1050,12 +1156,12 @@ class RunQueueData: | |||
1050 | seen_pn.append(pn) | 1156 | seen_pn.append(pn) |
1051 | else: | 1157 | else: |
1052 | bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn)) | 1158 | bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn)) |
1053 | msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov])) | 1159 | msgs = ["Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))] |
1054 | # | 1160 | # |
1055 | # Construct a list of things which uniquely depend on each provider | 1161 | # Construct a list of things which uniquely depend on each provider |
1056 | # since this may help the user figure out which dependency is triggering this warning | 1162 | # since this may help the user figure out which dependency is triggering this warning |
1057 | # | 1163 | # |
1058 | msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from." | 1164 | msgs.append("\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from.") |
1059 | deplist = {} | 1165 | deplist = {} |
1060 | commondeps = None | 1166 | commondeps = None |
1061 | for provfn in prov_list[prov]: | 1167 | for provfn in prov_list[prov]: |
@@ -1075,12 +1181,12 @@ class RunQueueData: | |||
1075 | commondeps &= deps | 1181 | commondeps &= deps |
1076 | deplist[provfn] = deps | 1182 | deplist[provfn] = deps |
1077 | for provfn in deplist: | 1183 | for provfn in deplist: |
1078 | msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)) | 1184 | msgs.append("\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps))) |
1079 | # | 1185 | # |
1080 | # Construct a list of provides and runtime providers for each recipe | 1186 | # Construct a list of provides and runtime providers for each recipe |
1081 | # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC) | 1187 | # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC) |
1082 | # | 1188 | # |
1083 | msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful." | 1189 | msgs.append("\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful.") |
1084 | provide_results = {} | 1190 | provide_results = {} |
1085 | rprovide_results = {} | 1191 | rprovide_results = {} |
1086 | commonprovs = None | 1192 | commonprovs = None |
@@ -1107,30 +1213,20 @@ class RunQueueData: | |||
1107 | else: | 1213 | else: |
1108 | commonrprovs &= rprovides | 1214 | commonrprovs &= rprovides |
1109 | rprovide_results[provfn] = rprovides | 1215 | rprovide_results[provfn] = rprovides |
1110 | #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs)) | 1216 | #msgs.append("\nCommon provides:\n %s" % ("\n ".join(commonprovs))) |
1111 | #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)) | 1217 | #msgs.append("\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs))) |
1112 | for provfn in prov_list[prov]: | 1218 | for provfn in prov_list[prov]: |
1113 | msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)) | 1219 | msgs.append("\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs))) |
1114 | msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)) | 1220 | msgs.append("\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))) |
1115 | 1221 | ||
1116 | if self.warn_multi_bb: | 1222 | if self.warn_multi_bb: |
1117 | logger.verbnote(msg) | 1223 | logger.verbnote("".join(msgs)) |
1118 | else: | 1224 | else: |
1119 | logger.error(msg) | 1225 | logger.error("".join(msgs)) |
1120 | 1226 | ||
1121 | self.init_progress_reporter.next_stage() | 1227 | self.init_progress_reporter.next_stage() |
1122 | |||
1123 | # Create a whitelist usable by the stamp checks | ||
1124 | self.stampfnwhitelist = {} | ||
1125 | for mc in self.taskData: | ||
1126 | self.stampfnwhitelist[mc] = [] | ||
1127 | for entry in self.stampwhitelist.split(): | ||
1128 | if entry not in self.taskData[mc].build_targets: | ||
1129 | continue | ||
1130 | fn = self.taskData.build_targets[entry][0] | ||
1131 | self.stampfnwhitelist[mc].append(fn) | ||
1132 | |||
1133 | self.init_progress_reporter.next_stage() | 1228 | self.init_progress_reporter.next_stage() |
1229 | bb.event.check_for_interrupts(self.cooker.data) | ||
1134 | 1230 | ||
1135 | # Iterate over the task list looking for tasks with a 'setscene' function | 1231 | # Iterate over the task list looking for tasks with a 'setscene' function |
1136 | self.runq_setscene_tids = set() | 1232 | self.runq_setscene_tids = set() |
@@ -1143,6 +1239,7 @@ class RunQueueData: | |||
1143 | self.runq_setscene_tids.add(tid) | 1239 | self.runq_setscene_tids.add(tid) |
1144 | 1240 | ||
1145 | self.init_progress_reporter.next_stage() | 1241 | self.init_progress_reporter.next_stage() |
1242 | bb.event.check_for_interrupts(self.cooker.data) | ||
1146 | 1243 | ||
1147 | # Invalidate task if force mode active | 1244 | # Invalidate task if force mode active |
1148 | if self.cooker.configuration.force: | 1245 | if self.cooker.configuration.force: |
@@ -1159,6 +1256,7 @@ class RunQueueData: | |||
1159 | invalidate_task(fn + ":" + st, True) | 1256 | invalidate_task(fn + ":" + st, True) |
1160 | 1257 | ||
1161 | self.init_progress_reporter.next_stage() | 1258 | self.init_progress_reporter.next_stage() |
1259 | bb.event.check_for_interrupts(self.cooker.data) | ||
1162 | 1260 | ||
1163 | # Create and print to the logs a virtual/xxxx -> PN (fn) table | 1261 | # Create and print to the logs a virtual/xxxx -> PN (fn) table |
1164 | for mc in taskData: | 1262 | for mc in taskData: |
@@ -1171,18 +1269,20 @@ class RunQueueData: | |||
1171 | bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc]) | 1269 | bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc]) |
1172 | 1270 | ||
1173 | self.init_progress_reporter.next_stage() | 1271 | self.init_progress_reporter.next_stage() |
1272 | bb.event.check_for_interrupts(self.cooker.data) | ||
1174 | 1273 | ||
1175 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) | 1274 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) |
1176 | 1275 | ||
1177 | # Iterate over the task list and call into the siggen code | 1276 | # Iterate over the task list and call into the siggen code |
1178 | dealtwith = set() | 1277 | dealtwith = set() |
1179 | todeal = set(self.runtaskentries) | 1278 | todeal = set(self.runtaskentries) |
1180 | while len(todeal) > 0: | 1279 | while todeal: |
1181 | for tid in todeal.copy(): | 1280 | for tid in todeal.copy(): |
1182 | if len(self.runtaskentries[tid].depends - dealtwith) == 0: | 1281 | if not (self.runtaskentries[tid].depends - dealtwith): |
1183 | dealtwith.add(tid) | 1282 | dealtwith.add(tid) |
1184 | todeal.remove(tid) | 1283 | todeal.remove(tid) |
1185 | self.prepare_task_hash(tid) | 1284 | self.prepare_task_hash(tid) |
1285 | bb.event.check_for_interrupts(self.cooker.data) | ||
1186 | 1286 | ||
1187 | bb.parse.siggen.writeout_file_checksum_cache() | 1287 | bb.parse.siggen.writeout_file_checksum_cache() |
1188 | 1288 | ||
@@ -1190,9 +1290,8 @@ class RunQueueData: | |||
1190 | return len(self.runtaskentries) | 1290 | return len(self.runtaskentries) |
1191 | 1291 | ||
1192 | def prepare_task_hash(self, tid): | 1292 | def prepare_task_hash(self, tid): |
1193 | dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid)) | 1293 | bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1194 | bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc) | 1294 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1195 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc) | ||
1196 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) | 1295 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) |
1197 | 1296 | ||
1198 | def dump_data(self): | 1297 | def dump_data(self): |
@@ -1218,7 +1317,6 @@ class RunQueue: | |||
1218 | self.cfgData = cfgData | 1317 | self.cfgData = cfgData |
1219 | self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) | 1318 | self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) |
1220 | 1319 | ||
1221 | self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile" | ||
1222 | self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None | 1320 | self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None |
1223 | self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None | 1321 | self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None |
1224 | 1322 | ||
@@ -1237,30 +1335,40 @@ class RunQueue: | |||
1237 | self.worker = {} | 1335 | self.worker = {} |
1238 | self.fakeworker = {} | 1336 | self.fakeworker = {} |
1239 | 1337 | ||
1338 | @staticmethod | ||
1339 | def send_pickled_data(worker, data, name): | ||
1340 | msg = bytearray() | ||
1341 | msg.extend(b"<" + name.encode() + b">") | ||
1342 | pickled_data = pickle.dumps(data) | ||
1343 | msg.extend(len(pickled_data).to_bytes(4, 'big')) | ||
1344 | msg.extend(pickled_data) | ||
1345 | msg.extend(b"</" + name.encode() + b">") | ||
1346 | worker.stdin.write(msg) | ||
1347 | |||
1240 | def _start_worker(self, mc, fakeroot = False, rqexec = None): | 1348 | def _start_worker(self, mc, fakeroot = False, rqexec = None): |
1241 | logger.debug("Starting bitbake-worker") | 1349 | logger.debug("Starting bitbake-worker") |
1242 | magic = "decafbad" | 1350 | magic = "decafbad" |
1243 | if self.cooker.configuration.profile: | 1351 | if self.cooker.configuration.profile: |
1244 | magic = "decafbadbad" | 1352 | magic = "decafbadbad" |
1353 | fakerootlogs = None | ||
1354 | |||
1355 | workerscript = os.path.realpath(os.path.dirname(__file__) + "/../../bin/bitbake-worker") | ||
1245 | if fakeroot: | 1356 | if fakeroot: |
1246 | magic = magic + "beef" | 1357 | magic = magic + "beef" |
1247 | mcdata = self.cooker.databuilder.mcdata[mc] | 1358 | mcdata = self.cooker.databuilder.mcdata[mc] |
1248 | fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD")) | 1359 | fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD")) |
1249 | fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split() | 1360 | fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split() |
1250 | env = os.environ.copy() | 1361 | env = os.environ.copy() |
1251 | for key, value in (var.split('=') for var in fakerootenv): | 1362 | for key, value in (var.split('=',1) for var in fakerootenv): |
1252 | env[key] = value | 1363 | env[key] = value |
1253 | worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env) | 1364 | worker = subprocess.Popen(fakerootcmd + [sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env) |
1365 | fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs | ||
1254 | else: | 1366 | else: |
1255 | worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE) | 1367 | worker = subprocess.Popen([sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE) |
1256 | bb.utils.nonblockingfd(worker.stdout) | 1368 | bb.utils.nonblockingfd(worker.stdout) |
1257 | workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec) | 1369 | workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs) |
1258 | 1370 | ||
1259 | workerdata = { | 1371 | workerdata = { |
1260 | "taskdeps" : self.rqdata.dataCaches[mc].task_deps, | ||
1261 | "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv, | ||
1262 | "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs, | ||
1263 | "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv, | ||
1264 | "sigdata" : bb.parse.siggen.get_taskdata(), | 1372 | "sigdata" : bb.parse.siggen.get_taskdata(), |
1265 | "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, | 1373 | "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, |
1266 | "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, | 1374 | "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, |
@@ -1274,9 +1382,9 @@ class RunQueue: | |||
1274 | "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"), | 1382 | "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"), |
1275 | } | 1383 | } |
1276 | 1384 | ||
1277 | worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") | 1385 | RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig") |
1278 | worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>") | 1386 | RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata") |
1279 | worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>") | 1387 | RunQueue.send_pickled_data(worker, workerdata, "workerdata") |
1280 | worker.stdin.flush() | 1388 | worker.stdin.flush() |
1281 | 1389 | ||
1282 | return RunQueueWorker(worker, workerpipe) | 1390 | return RunQueueWorker(worker, workerpipe) |
@@ -1286,7 +1394,7 @@ class RunQueue: | |||
1286 | return | 1394 | return |
1287 | logger.debug("Teardown for bitbake-worker") | 1395 | logger.debug("Teardown for bitbake-worker") |
1288 | try: | 1396 | try: |
1289 | worker.process.stdin.write(b"<quit></quit>") | 1397 | RunQueue.send_pickled_data(worker.process, b"", "quit") |
1290 | worker.process.stdin.flush() | 1398 | worker.process.stdin.flush() |
1291 | worker.process.stdin.close() | 1399 | worker.process.stdin.close() |
1292 | except IOError: | 1400 | except IOError: |
@@ -1298,12 +1406,12 @@ class RunQueue: | |||
1298 | continue | 1406 | continue |
1299 | worker.pipe.close() | 1407 | worker.pipe.close() |
1300 | 1408 | ||
1301 | def start_worker(self): | 1409 | def start_worker(self, rqexec): |
1302 | if self.worker: | 1410 | if self.worker: |
1303 | self.teardown_workers() | 1411 | self.teardown_workers() |
1304 | self.teardown = False | 1412 | self.teardown = False |
1305 | for mc in self.rqdata.dataCaches: | 1413 | for mc in self.rqdata.dataCaches: |
1306 | self.worker[mc] = self._start_worker(mc) | 1414 | self.worker[mc] = self._start_worker(mc, False, rqexec) |
1307 | 1415 | ||
1308 | def start_fakeworker(self, rqexec, mc): | 1416 | def start_fakeworker(self, rqexec, mc): |
1309 | if not mc in self.fakeworker: | 1417 | if not mc in self.fakeworker: |
@@ -1345,15 +1453,7 @@ class RunQueue: | |||
1345 | if taskname is None: | 1453 | if taskname is None: |
1346 | taskname = tn | 1454 | taskname = tn |
1347 | 1455 | ||
1348 | if self.stamppolicy == "perfile": | 1456 | stampfile = bb.parse.siggen.stampfile_mcfn(taskname, taskfn) |
1349 | fulldeptree = False | ||
1350 | else: | ||
1351 | fulldeptree = True | ||
1352 | stampwhitelist = [] | ||
1353 | if self.stamppolicy == "whitelist": | ||
1354 | stampwhitelist = self.rqdata.stampfnwhitelist[mc] | ||
1355 | |||
1356 | stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn) | ||
1357 | 1457 | ||
1358 | # If the stamp is missing, it's not current | 1458 | # If the stamp is missing, it's not current |
1359 | if not os.access(stampfile, os.F_OK): | 1459 | if not os.access(stampfile, os.F_OK): |
@@ -1365,7 +1465,7 @@ class RunQueue: | |||
1365 | logger.debug2("%s.%s is nostamp\n", fn, taskname) | 1465 | logger.debug2("%s.%s is nostamp\n", fn, taskname) |
1366 | return False | 1466 | return False |
1367 | 1467 | ||
1368 | if taskname != "do_setscene" and taskname.endswith("_setscene"): | 1468 | if taskname.endswith("_setscene"): |
1369 | return True | 1469 | return True |
1370 | 1470 | ||
1371 | if cache is None: | 1471 | if cache is None: |
@@ -1376,15 +1476,15 @@ class RunQueue: | |||
1376 | for dep in self.rqdata.runtaskentries[tid].depends: | 1476 | for dep in self.rqdata.runtaskentries[tid].depends: |
1377 | if iscurrent: | 1477 | if iscurrent: |
1378 | (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep) | 1478 | (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep) |
1379 | stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2) | 1479 | stampfile2 = bb.parse.siggen.stampfile_mcfn(taskname2, taskfn2) |
1380 | stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2) | 1480 | stampfile3 = bb.parse.siggen.stampfile_mcfn(taskname2 + "_setscene", taskfn2) |
1381 | t2 = get_timestamp(stampfile2) | 1481 | t2 = get_timestamp(stampfile2) |
1382 | t3 = get_timestamp(stampfile3) | 1482 | t3 = get_timestamp(stampfile3) |
1383 | if t3 and not t2: | 1483 | if t3 and not t2: |
1384 | continue | 1484 | continue |
1385 | if t3 and t3 > t2: | 1485 | if t3 and t3 > t2: |
1386 | continue | 1486 | continue |
1387 | if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): | 1487 | if fn == fn2: |
1388 | if not t2: | 1488 | if not t2: |
1389 | logger.debug2('Stampfile %s does not exist', stampfile2) | 1489 | logger.debug2('Stampfile %s does not exist', stampfile2) |
1390 | iscurrent = False | 1490 | iscurrent = False |
@@ -1434,10 +1534,11 @@ class RunQueue: | |||
1434 | """ | 1534 | """ |
1435 | Run the tasks in a queue prepared by rqdata.prepare() | 1535 | Run the tasks in a queue prepared by rqdata.prepare() |
1436 | Upon failure, optionally try to recover the build using any alternate providers | 1536 | Upon failure, optionally try to recover the build using any alternate providers |
1437 | (if the abort on failure configuration option isn't set) | 1537 | (if the halt on failure configuration option isn't set) |
1438 | """ | 1538 | """ |
1439 | 1539 | ||
1440 | retval = True | 1540 | retval = True |
1541 | bb.event.check_for_interrupts(self.cooker.data) | ||
1441 | 1542 | ||
1442 | if self.state is runQueuePrepare: | 1543 | if self.state is runQueuePrepare: |
1443 | # NOTE: if you add, remove or significantly refactor the stages of this | 1544 | # NOTE: if you add, remove or significantly refactor the stages of this |
@@ -1466,10 +1567,13 @@ class RunQueue: | |||
1466 | 1567 | ||
1467 | if not self.dm_event_handler_registered: | 1568 | if not self.dm_event_handler_registered: |
1468 | res = bb.event.register(self.dm_event_handler_name, | 1569 | res = bb.event.register(self.dm_event_handler_name, |
1469 | lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False, | 1570 | lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False, |
1470 | ('bb.event.HeartbeatEvent',), data=self.cfgData) | 1571 | ('bb.event.HeartbeatEvent',), data=self.cfgData) |
1471 | self.dm_event_handler_registered = True | 1572 | self.dm_event_handler_registered = True |
1472 | 1573 | ||
1574 | self.rqdata.init_progress_reporter.next_stage() | ||
1575 | self.rqexe = RunQueueExecute(self) | ||
1576 | |||
1473 | dump = self.cooker.configuration.dump_signatures | 1577 | dump = self.cooker.configuration.dump_signatures |
1474 | if dump: | 1578 | if dump: |
1475 | self.rqdata.init_progress_reporter.finish() | 1579 | self.rqdata.init_progress_reporter.finish() |
@@ -1481,16 +1585,14 @@ class RunQueue: | |||
1481 | self.state = runQueueComplete | 1585 | self.state = runQueueComplete |
1482 | 1586 | ||
1483 | if self.state is runQueueSceneInit: | 1587 | if self.state is runQueueSceneInit: |
1484 | self.rqdata.init_progress_reporter.next_stage() | 1588 | self.start_worker(self.rqexe) |
1485 | self.start_worker() | 1589 | self.rqdata.init_progress_reporter.finish() |
1486 | self.rqdata.init_progress_reporter.next_stage() | ||
1487 | self.rqexe = RunQueueExecute(self) | ||
1488 | 1590 | ||
1489 | # If we don't have any setscene functions, skip execution | 1591 | # If we don't have any setscene functions, skip execution |
1490 | if len(self.rqdata.runq_setscene_tids) == 0: | 1592 | if not self.rqdata.runq_setscene_tids: |
1491 | logger.info('No setscene tasks') | 1593 | logger.info('No setscene tasks') |
1492 | for tid in self.rqdata.runtaskentries: | 1594 | for tid in self.rqdata.runtaskentries: |
1493 | if len(self.rqdata.runtaskentries[tid].depends) == 0: | 1595 | if not self.rqdata.runtaskentries[tid].depends: |
1494 | self.rqexe.setbuildable(tid) | 1596 | self.rqexe.setbuildable(tid) |
1495 | self.rqexe.tasks_notcovered.add(tid) | 1597 | self.rqexe.tasks_notcovered.add(tid) |
1496 | self.rqexe.sqdone = True | 1598 | self.rqexe.sqdone = True |
@@ -1563,29 +1665,28 @@ class RunQueue: | |||
1563 | else: | 1665 | else: |
1564 | self.rqexe.finish() | 1666 | self.rqexe.finish() |
1565 | 1667 | ||
1566 | def rq_dump_sigfn(self, fn, options): | 1668 | def _rq_dump_sigtid(self, tids): |
1567 | bb_cache = bb.cache.NoCache(self.cooker.databuilder) | 1669 | for tid in tids: |
1568 | mc = bb.runqueue.mc_from_tid(fn) | 1670 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
1569 | the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn)) | 1671 | dataCaches = self.rqdata.dataCaches |
1570 | siggen = bb.parse.siggen | 1672 | bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True) |
1571 | dataCaches = self.rqdata.dataCaches | ||
1572 | siggen.dump_sigfn(fn, dataCaches, options) | ||
1573 | 1673 | ||
1574 | def dump_signatures(self, options): | 1674 | def dump_signatures(self, options): |
1575 | fns = set() | 1675 | if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset: |
1576 | bb.note("Reparsing files to collect dependency data") | 1676 | bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled") |
1577 | 1677 | ||
1578 | for tid in self.rqdata.runtaskentries: | 1678 | bb.note("Writing task signature files") |
1579 | fn = fn_from_tid(tid) | ||
1580 | fns.add(fn) | ||
1581 | 1679 | ||
1582 | max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) | 1680 | max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) |
1681 | def chunkify(l, n): | ||
1682 | return [l[i::n] for i in range(n)] | ||
1683 | tids = chunkify(list(self.rqdata.runtaskentries), max_process) | ||
1583 | # We cannot use the real multiprocessing.Pool easily due to some local data | 1684 | # We cannot use the real multiprocessing.Pool easily due to some local data |
1584 | # that can't be pickled. This is a cheap multi-process solution. | 1685 | # that can't be pickled. This is a cheap multi-process solution. |
1585 | launched = [] | 1686 | launched = [] |
1586 | while fns: | 1687 | while tids: |
1587 | if len(launched) < max_process: | 1688 | if len(launched) < max_process: |
1588 | p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options)) | 1689 | p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), )) |
1589 | p.start() | 1690 | p.start() |
1590 | launched.append(p) | 1691 | launched.append(p) |
1591 | for q in launched: | 1692 | for q in launched: |
@@ -1600,6 +1701,17 @@ class RunQueue: | |||
1600 | return | 1701 | return |
1601 | 1702 | ||
1602 | def print_diffscenetasks(self): | 1703 | def print_diffscenetasks(self): |
1704 | def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid): | ||
1705 | invalidtasks = [] | ||
1706 | for t in taskdepends[task].depends: | ||
1707 | if t not in valid and t not in visited_invalid: | ||
1708 | invalidtasks.extend(get_root_invalid_tasks(t, taskdepends, valid, noexec, visited_invalid)) | ||
1709 | visited_invalid.add(t) | ||
1710 | |||
1711 | direct_invalid = [t for t in taskdepends[task].depends if t not in valid] | ||
1712 | if not direct_invalid and task not in noexec: | ||
1713 | invalidtasks = [task] | ||
1714 | return invalidtasks | ||
1603 | 1715 | ||
1604 | noexec = [] | 1716 | noexec = [] |
1605 | tocheck = set() | 1717 | tocheck = set() |
@@ -1633,46 +1745,49 @@ class RunQueue: | |||
1633 | valid_new.add(dep) | 1745 | valid_new.add(dep) |
1634 | 1746 | ||
1635 | invalidtasks = set() | 1747 | invalidtasks = set() |
1636 | for tid in self.rqdata.runtaskentries: | ||
1637 | if tid not in valid_new and tid not in noexec: | ||
1638 | invalidtasks.add(tid) | ||
1639 | 1748 | ||
1640 | found = set() | 1749 | toptasks = set(["{}:{}".format(t[3], t[2]) for t in self.rqdata.targets]) |
1641 | processed = set() | 1750 | for tid in toptasks: |
1642 | for tid in invalidtasks: | ||
1643 | toprocess = set([tid]) | 1751 | toprocess = set([tid]) |
1644 | while toprocess: | 1752 | while toprocess: |
1645 | next = set() | 1753 | next = set() |
1754 | visited_invalid = set() | ||
1646 | for t in toprocess: | 1755 | for t in toprocess: |
1647 | for dep in self.rqdata.runtaskentries[t].depends: | 1756 | if t not in valid_new and t not in noexec: |
1648 | if dep in invalidtasks: | 1757 | invalidtasks.update(get_root_invalid_tasks(t, self.rqdata.runtaskentries, valid_new, noexec, visited_invalid)) |
1649 | found.add(tid) | 1758 | continue |
1650 | if dep not in processed: | 1759 | if t in self.rqdata.runq_setscene_tids: |
1651 | processed.add(dep) | 1760 | for dep in self.rqexe.sqdata.sq_deps[t]: |
1652 | next.add(dep) | 1761 | next.add(dep) |
1762 | continue | ||
1763 | |||
1764 | for dep in self.rqdata.runtaskentries[t].depends: | ||
1765 | next.add(dep) | ||
1766 | |||
1653 | toprocess = next | 1767 | toprocess = next |
1654 | if tid in found: | ||
1655 | toprocess = set() | ||
1656 | 1768 | ||
1657 | tasklist = [] | 1769 | tasklist = [] |
1658 | for tid in invalidtasks.difference(found): | 1770 | for tid in invalidtasks: |
1659 | tasklist.append(tid) | 1771 | tasklist.append(tid) |
1660 | 1772 | ||
1661 | if tasklist: | 1773 | if tasklist: |
1662 | bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist)) | 1774 | bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist)) |
1663 | 1775 | ||
1664 | return invalidtasks.difference(found) | 1776 | return invalidtasks |
1665 | 1777 | ||
1666 | def write_diffscenetasks(self, invalidtasks): | 1778 | def write_diffscenetasks(self, invalidtasks): |
1779 | bb.siggen.check_siggen_version(bb.siggen) | ||
1667 | 1780 | ||
1668 | # Define recursion callback | 1781 | # Define recursion callback |
1669 | def recursecb(key, hash1, hash2): | 1782 | def recursecb(key, hash1, hash2): |
1670 | hashes = [hash1, hash2] | 1783 | hashes = [hash1, hash2] |
1784 | bb.debug(1, "Recursively looking for recipe {} hashes {}".format(key, hashes)) | ||
1671 | hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData) | 1785 | hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData) |
1786 | bb.debug(1, "Found hashfiles:\n{}".format(hashfiles)) | ||
1672 | 1787 | ||
1673 | recout = [] | 1788 | recout = [] |
1674 | if len(hashfiles) == 2: | 1789 | if len(hashfiles) == 2: |
1675 | out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb) | 1790 | out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb) |
1676 | recout.extend(list(' ' + l for l in out2)) | 1791 | recout.extend(list(' ' + l for l in out2)) |
1677 | else: | 1792 | else: |
1678 | recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) | 1793 | recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) |
@@ -1683,20 +1798,25 @@ class RunQueue: | |||
1683 | for tid in invalidtasks: | 1798 | for tid in invalidtasks: |
1684 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 1799 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
1685 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 1800 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] |
1686 | h = self.rqdata.runtaskentries[tid].hash | 1801 | h = self.rqdata.runtaskentries[tid].unihash |
1687 | matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData) | 1802 | bb.debug(1, "Looking for recipe {} task {}".format(pn, taskname)) |
1803 | matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc]) | ||
1804 | bb.debug(1, "Found hashfiles:\n{}".format(matches)) | ||
1688 | match = None | 1805 | match = None |
1689 | for m in matches: | 1806 | for m in matches.values(): |
1690 | if h in m: | 1807 | if h in m['path']: |
1691 | match = m | 1808 | match = m['path'] |
1692 | if match is None: | 1809 | if match is None: |
1693 | bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h) | 1810 | bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid)) |
1694 | matches = {k : v for k, v in iter(matches.items()) if h not in k} | 1811 | matches = {k : v for k, v in iter(matches.items()) if h not in k} |
1812 | matches_local = {k : v for k, v in iter(matches.items()) if h not in k and not v['sstate']} | ||
1813 | if matches_local: | ||
1814 | matches = matches_local | ||
1695 | if matches: | 1815 | if matches: |
1696 | latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1] | 1816 | latestmatch = matches[sorted(matches.keys(), key=lambda h: matches[h]['time'])[-1]]['path'] |
1697 | prevh = __find_sha256__.search(latestmatch).group(0) | 1817 | prevh = __find_sha256__.search(latestmatch).group(0) |
1698 | output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb) | 1818 | output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb) |
1699 | bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output)) | 1819 | bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, most recent matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output)) |
1700 | 1820 | ||
1701 | 1821 | ||
1702 | class RunQueueExecute: | 1822 | class RunQueueExecute: |
@@ -1709,6 +1829,10 @@ class RunQueueExecute: | |||
1709 | 1829 | ||
1710 | self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) | 1830 | self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) |
1711 | self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" | 1831 | self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" |
1832 | self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU") | ||
1833 | self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO") | ||
1834 | self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY") | ||
1835 | self.max_loadfactor = self.cfgData.getVar("BB_LOADFACTOR_MAX") | ||
1712 | 1836 | ||
1713 | self.sq_buildable = set() | 1837 | self.sq_buildable = set() |
1714 | self.sq_running = set() | 1838 | self.sq_running = set() |
@@ -1726,6 +1850,8 @@ class RunQueueExecute: | |||
1726 | self.build_stamps2 = [] | 1850 | self.build_stamps2 = [] |
1727 | self.failed_tids = [] | 1851 | self.failed_tids = [] |
1728 | self.sq_deferred = {} | 1852 | self.sq_deferred = {} |
1853 | self.sq_needed_harddeps = set() | ||
1854 | self.sq_harddep_deferred = set() | ||
1729 | 1855 | ||
1730 | self.stampcache = {} | 1856 | self.stampcache = {} |
1731 | 1857 | ||
@@ -1733,17 +1859,39 @@ class RunQueueExecute: | |||
1733 | self.holdoff_need_update = True | 1859 | self.holdoff_need_update = True |
1734 | self.sqdone = False | 1860 | self.sqdone = False |
1735 | 1861 | ||
1736 | self.stats = RunQueueStats(len(self.rqdata.runtaskentries)) | 1862 | self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids)) |
1737 | self.sq_stats = RunQueueStats(len(self.rqdata.runq_setscene_tids)) | ||
1738 | |||
1739 | for mc in rq.worker: | ||
1740 | rq.worker[mc].pipe.setrunqueueexec(self) | ||
1741 | for mc in rq.fakeworker: | ||
1742 | rq.fakeworker[mc].pipe.setrunqueueexec(self) | ||
1743 | 1863 | ||
1744 | if self.number_tasks <= 0: | 1864 | if self.number_tasks <= 0: |
1745 | bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks) | 1865 | bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks) |
1746 | 1866 | ||
1867 | lower_limit = 1.0 | ||
1868 | upper_limit = 1000000.0 | ||
1869 | if self.max_cpu_pressure: | ||
1870 | self.max_cpu_pressure = float(self.max_cpu_pressure) | ||
1871 | if self.max_cpu_pressure < lower_limit: | ||
1872 | bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit)) | ||
1873 | if self.max_cpu_pressure > upper_limit: | ||
1874 | bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure)) | ||
1875 | |||
1876 | if self.max_io_pressure: | ||
1877 | self.max_io_pressure = float(self.max_io_pressure) | ||
1878 | if self.max_io_pressure < lower_limit: | ||
1879 | bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit)) | ||
1880 | if self.max_io_pressure > upper_limit: | ||
1881 | bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure)) | ||
1882 | |||
1883 | if self.max_memory_pressure: | ||
1884 | self.max_memory_pressure = float(self.max_memory_pressure) | ||
1885 | if self.max_memory_pressure < lower_limit: | ||
1886 | bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit)) | ||
1887 | if self.max_memory_pressure > upper_limit: | ||
1888 | bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure)) | ||
1889 | |||
1890 | if self.max_loadfactor: | ||
1891 | self.max_loadfactor = float(self.max_loadfactor) | ||
1892 | if self.max_loadfactor <= 0: | ||
1893 | bb.fatal("Invalid BB_LOADFACTOR_MAX %s, needs to be greater than zero." % (self.max_loadfactor)) | ||
1894 | |||
1747 | # List of setscene tasks which we've covered | 1895 | # List of setscene tasks which we've covered |
1748 | self.scenequeue_covered = set() | 1896 | self.scenequeue_covered = set() |
1749 | # List of tasks which are covered (including setscene ones) | 1897 | # List of tasks which are covered (including setscene ones) |
@@ -1753,11 +1901,6 @@ class RunQueueExecute: | |||
1753 | self.tasks_notcovered = set() | 1901 | self.tasks_notcovered = set() |
1754 | self.scenequeue_notneeded = set() | 1902 | self.scenequeue_notneeded = set() |
1755 | 1903 | ||
1756 | # We can't skip specified target tasks which aren't setscene tasks | ||
1757 | self.cantskip = set(self.rqdata.target_tids) | ||
1758 | self.cantskip.difference_update(self.rqdata.runq_setscene_tids) | ||
1759 | self.cantskip.intersection_update(self.rqdata.runtaskentries) | ||
1760 | |||
1761 | schedulers = self.get_schedulers() | 1904 | schedulers = self.get_schedulers() |
1762 | for scheduler in schedulers: | 1905 | for scheduler in schedulers: |
1763 | if self.scheduler == scheduler.name: | 1906 | if self.scheduler == scheduler.name: |
@@ -1768,11 +1911,29 @@ class RunQueueExecute: | |||
1768 | bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % | 1911 | bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % |
1769 | (self.scheduler, ", ".join(obj.name for obj in schedulers))) | 1912 | (self.scheduler, ", ".join(obj.name for obj in schedulers))) |
1770 | 1913 | ||
1771 | #if len(self.rqdata.runq_setscene_tids) > 0: | 1914 | #if self.rqdata.runq_setscene_tids: |
1772 | self.sqdata = SQData() | 1915 | self.sqdata = SQData() |
1773 | build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self) | 1916 | build_scenequeue_data(self.sqdata, self.rqdata, self) |
1917 | |||
1918 | update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True) | ||
1919 | |||
1920 | # Compute a list of 'stale' sstate tasks where the current hash does not match the one | ||
1921 | # in any stamp files. Pass the list out to metadata as an event. | ||
1922 | found = {} | ||
1923 | for tid in self.rqdata.runq_setscene_tids: | ||
1924 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | ||
1925 | stamps = bb.build.find_stale_stamps(taskname, taskfn) | ||
1926 | if stamps: | ||
1927 | if mc not in found: | ||
1928 | found[mc] = {} | ||
1929 | found[mc][tid] = stamps | ||
1930 | for mc in found: | ||
1931 | event = bb.event.StaleSetSceneTasks(found[mc]) | ||
1932 | bb.event.fire(event, self.cooker.databuilder.mcdata[mc]) | ||
1933 | |||
1934 | self.build_taskdepdata_cache() | ||
1774 | 1935 | ||
1775 | def runqueue_process_waitpid(self, task, status): | 1936 | def runqueue_process_waitpid(self, task, status, fakerootlog=None): |
1776 | 1937 | ||
1777 | # self.build_stamps[pid] may not exist when use shared work directory. | 1938 | # self.build_stamps[pid] may not exist when use shared work directory. |
1778 | if task in self.build_stamps: | 1939 | if task in self.build_stamps: |
@@ -1785,9 +1946,10 @@ class RunQueueExecute: | |||
1785 | else: | 1946 | else: |
1786 | self.sq_task_complete(task) | 1947 | self.sq_task_complete(task) |
1787 | self.sq_live.remove(task) | 1948 | self.sq_live.remove(task) |
1949 | self.stats.updateActiveSetscene(len(self.sq_live)) | ||
1788 | else: | 1950 | else: |
1789 | if status != 0: | 1951 | if status != 0: |
1790 | self.task_fail(task, status) | 1952 | self.task_fail(task, status, fakerootlog=fakerootlog) |
1791 | else: | 1953 | else: |
1792 | self.task_complete(task) | 1954 | self.task_complete(task) |
1793 | return True | 1955 | return True |
@@ -1795,20 +1957,20 @@ class RunQueueExecute: | |||
1795 | def finish_now(self): | 1957 | def finish_now(self): |
1796 | for mc in self.rq.worker: | 1958 | for mc in self.rq.worker: |
1797 | try: | 1959 | try: |
1798 | self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>") | 1960 | RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow") |
1799 | self.rq.worker[mc].process.stdin.flush() | 1961 | self.rq.worker[mc].process.stdin.flush() |
1800 | except IOError: | 1962 | except IOError: |
1801 | # worker must have died? | 1963 | # worker must have died? |
1802 | pass | 1964 | pass |
1803 | for mc in self.rq.fakeworker: | 1965 | for mc in self.rq.fakeworker: |
1804 | try: | 1966 | try: |
1805 | self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>") | 1967 | RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow") |
1806 | self.rq.fakeworker[mc].process.stdin.flush() | 1968 | self.rq.fakeworker[mc].process.stdin.flush() |
1807 | except IOError: | 1969 | except IOError: |
1808 | # worker must have died? | 1970 | # worker must have died? |
1809 | pass | 1971 | pass |
1810 | 1972 | ||
1811 | if len(self.failed_tids) != 0: | 1973 | if self.failed_tids: |
1812 | self.rq.state = runQueueFailed | 1974 | self.rq.state = runQueueFailed |
1813 | return | 1975 | return |
1814 | 1976 | ||
@@ -1818,13 +1980,13 @@ class RunQueueExecute: | |||
1818 | def finish(self): | 1980 | def finish(self): |
1819 | self.rq.state = runQueueCleanUp | 1981 | self.rq.state = runQueueCleanUp |
1820 | 1982 | ||
1821 | active = self.stats.active + self.sq_stats.active | 1983 | active = self.stats.active + len(self.sq_live) |
1822 | if active > 0: | 1984 | if active > 0: |
1823 | bb.event.fire(runQueueExitWait(active), self.cfgData) | 1985 | bb.event.fire(runQueueExitWait(active), self.cfgData) |
1824 | self.rq.read_workers() | 1986 | self.rq.read_workers() |
1825 | return self.rq.active_fds() | 1987 | return self.rq.active_fds() |
1826 | 1988 | ||
1827 | if len(self.failed_tids) != 0: | 1989 | if self.failed_tids: |
1828 | self.rq.state = runQueueFailed | 1990 | self.rq.state = runQueueFailed |
1829 | return True | 1991 | return True |
1830 | 1992 | ||
@@ -1851,7 +2013,7 @@ class RunQueueExecute: | |||
1851 | return valid | 2013 | return valid |
1852 | 2014 | ||
1853 | def can_start_task(self): | 2015 | def can_start_task(self): |
1854 | active = self.stats.active + self.sq_stats.active | 2016 | active = self.stats.active + len(self.sq_live) |
1855 | can_start = active < self.number_tasks | 2017 | can_start = active < self.number_tasks |
1856 | return can_start | 2018 | return can_start |
1857 | 2019 | ||
@@ -1871,8 +2033,7 @@ class RunQueueExecute: | |||
1871 | try: | 2033 | try: |
1872 | module = __import__(modname, fromlist=(name,)) | 2034 | module = __import__(modname, fromlist=(name,)) |
1873 | except ImportError as exc: | 2035 | except ImportError as exc: |
1874 | logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) | 2036 | bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) |
1875 | raise SystemExit(1) | ||
1876 | else: | 2037 | else: |
1877 | schedulers.add(getattr(module, name)) | 2038 | schedulers.add(getattr(module, name)) |
1878 | return schedulers | 2039 | return schedulers |
@@ -1902,21 +2063,52 @@ class RunQueueExecute: | |||
1902 | self.setbuildable(revdep) | 2063 | self.setbuildable(revdep) |
1903 | logger.debug("Marking task %s as buildable", revdep) | 2064 | logger.debug("Marking task %s as buildable", revdep) |
1904 | 2065 | ||
2066 | found = None | ||
2067 | for t in sorted(self.sq_deferred.copy()): | ||
2068 | if self.sq_deferred[t] == task: | ||
2069 | # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task. | ||
2070 | # We shouldn't allow all to run at once as it is prone to races. | ||
2071 | if not found: | ||
2072 | bb.debug(1, "Deferred task %s now buildable" % t) | ||
2073 | del self.sq_deferred[t] | ||
2074 | update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) | ||
2075 | found = t | ||
2076 | else: | ||
2077 | bb.debug(1, "Deferring %s after %s" % (t, found)) | ||
2078 | self.sq_deferred[t] = found | ||
2079 | |||
1905 | def task_complete(self, task): | 2080 | def task_complete(self, task): |
1906 | self.stats.taskCompleted() | 2081 | self.stats.taskCompleted() |
1907 | bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) | 2082 | bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) |
1908 | self.task_completeoutright(task) | 2083 | self.task_completeoutright(task) |
1909 | self.runq_tasksrun.add(task) | 2084 | self.runq_tasksrun.add(task) |
1910 | 2085 | ||
1911 | def task_fail(self, task, exitcode): | 2086 | def task_fail(self, task, exitcode, fakerootlog=None): |
1912 | """ | 2087 | """ |
1913 | Called when a task has failed | 2088 | Called when a task has failed |
1914 | Updates the state engine with the failure | 2089 | Updates the state engine with the failure |
1915 | """ | 2090 | """ |
1916 | self.stats.taskFailed() | 2091 | self.stats.taskFailed() |
1917 | self.failed_tids.append(task) | 2092 | self.failed_tids.append(task) |
1918 | bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData) | 2093 | |
1919 | if self.rqdata.taskData[''].abort: | 2094 | fakeroot_log = [] |
2095 | if fakerootlog and os.path.exists(fakerootlog): | ||
2096 | with open(fakerootlog) as fakeroot_log_file: | ||
2097 | fakeroot_failed = False | ||
2098 | for line in reversed(fakeroot_log_file.readlines()): | ||
2099 | for fakeroot_error in ['mismatch', 'error', 'fatal']: | ||
2100 | if fakeroot_error in line.lower(): | ||
2101 | fakeroot_failed = True | ||
2102 | if 'doing new pid setup and server start' in line: | ||
2103 | break | ||
2104 | fakeroot_log.append(line) | ||
2105 | |||
2106 | if not fakeroot_failed: | ||
2107 | fakeroot_log = [] | ||
2108 | |||
2109 | bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData) | ||
2110 | |||
2111 | if self.rqdata.taskData[''].halt: | ||
1920 | self.rq.state = runQueueCleanUp | 2112 | self.rq.state = runQueueCleanUp |
1921 | 2113 | ||
1922 | def task_skip(self, task, reason): | 2114 | def task_skip(self, task, reason): |
@@ -1931,7 +2123,7 @@ class RunQueueExecute: | |||
1931 | err = False | 2123 | err = False |
1932 | if not self.sqdone: | 2124 | if not self.sqdone: |
1933 | logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) | 2125 | logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) |
1934 | completeevent = sceneQueueComplete(self.sq_stats, self.rq) | 2126 | completeevent = sceneQueueComplete(self.stats, self.rq) |
1935 | bb.event.fire(completeevent, self.cfgData) | 2127 | bb.event.fire(completeevent, self.cfgData) |
1936 | if self.sq_deferred: | 2128 | if self.sq_deferred: |
1937 | logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred)) | 2129 | logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred)) |
@@ -1943,6 +2135,10 @@ class RunQueueExecute: | |||
1943 | logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) | 2135 | logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) |
1944 | err = True | 2136 | err = True |
1945 | 2137 | ||
2138 | for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered): | ||
2139 | # No task should end up in both covered and uncovered, that is a bug. | ||
2140 | logger.error("Setscene task %s in both covered and notcovered." % tid) | ||
2141 | |||
1946 | for tid in self.rqdata.runq_setscene_tids: | 2142 | for tid in self.rqdata.runq_setscene_tids: |
1947 | if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: | 2143 | if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: |
1948 | err = True | 2144 | err = True |
@@ -1961,7 +2157,7 @@ class RunQueueExecute: | |||
1961 | if x not in self.tasks_scenequeue_done: | 2157 | if x not in self.tasks_scenequeue_done: |
1962 | logger.error("Task %s was never processed by the setscene code" % x) | 2158 | logger.error("Task %s was never processed by the setscene code" % x) |
1963 | err = True | 2159 | err = True |
1964 | if len(self.rqdata.runtaskentries[x].depends) == 0 and x not in self.runq_buildable: | 2160 | if not self.rqdata.runtaskentries[x].depends and x not in self.runq_buildable: |
1965 | logger.error("Task %s was never marked as buildable by the setscene code" % x) | 2161 | logger.error("Task %s was never marked as buildable by the setscene code" % x) |
1966 | err = True | 2162 | err = True |
1967 | return err | 2163 | return err |
@@ -1984,8 +2180,11 @@ class RunQueueExecute: | |||
1984 | if not self.sqdone and self.can_start_task(): | 2180 | if not self.sqdone and self.can_start_task(): |
1985 | # Find the next setscene to run | 2181 | # Find the next setscene to run |
1986 | for nexttask in self.sorted_setscene_tids: | 2182 | for nexttask in self.sorted_setscene_tids: |
1987 | if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): | 2183 | if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred: |
1988 | if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): | 2184 | if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \ |
2185 | nexttask not in self.sq_needed_harddeps and \ | ||
2186 | self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \ | ||
2187 | self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): | ||
1989 | if nexttask not in self.rqdata.target_tids: | 2188 | if nexttask not in self.rqdata.target_tids: |
1990 | logger.debug2("Skipping setscene for task %s" % nexttask) | 2189 | logger.debug2("Skipping setscene for task %s" % nexttask) |
1991 | self.sq_task_skip(nexttask) | 2190 | self.sq_task_skip(nexttask) |
@@ -1993,6 +2192,19 @@ class RunQueueExecute: | |||
1993 | if nexttask in self.sq_deferred: | 2192 | if nexttask in self.sq_deferred: |
1994 | del self.sq_deferred[nexttask] | 2193 | del self.sq_deferred[nexttask] |
1995 | return True | 2194 | return True |
2195 | if nexttask in self.sqdata.sq_harddeps_rev and not self.sqdata.sq_harddeps_rev[nexttask].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | ||
2196 | logger.debug2("Deferring %s due to hard dependencies" % nexttask) | ||
2197 | updated = False | ||
2198 | for dep in self.sqdata.sq_harddeps_rev[nexttask]: | ||
2199 | if dep not in self.sq_needed_harddeps: | ||
2200 | logger.debug2("Enabling task %s as it is a hard dependency" % dep) | ||
2201 | self.sq_buildable.add(dep) | ||
2202 | self.sq_needed_harddeps.add(dep) | ||
2203 | updated = True | ||
2204 | self.sq_harddep_deferred.add(nexttask) | ||
2205 | if updated: | ||
2206 | return True | ||
2207 | continue | ||
1996 | # If covered tasks are running, need to wait for them to complete | 2208 | # If covered tasks are running, need to wait for them to complete |
1997 | for t in self.sqdata.sq_covered_tasks[nexttask]: | 2209 | for t in self.sqdata.sq_covered_tasks[nexttask]: |
1998 | if t in self.runq_running and t not in self.runq_complete: | 2210 | if t in self.runq_running and t not in self.runq_complete: |
@@ -2007,8 +2219,6 @@ class RunQueueExecute: | |||
2007 | logger.debug("%s didn't become valid, skipping setscene" % nexttask) | 2219 | logger.debug("%s didn't become valid, skipping setscene" % nexttask) |
2008 | self.sq_task_failoutright(nexttask) | 2220 | self.sq_task_failoutright(nexttask) |
2009 | return True | 2221 | return True |
2010 | else: | ||
2011 | self.sqdata.outrightfail.remove(nexttask) | ||
2012 | if nexttask in self.sqdata.outrightfail: | 2222 | if nexttask in self.sqdata.outrightfail: |
2013 | logger.debug2('No package found, so skipping setscene task %s', nexttask) | 2223 | logger.debug2('No package found, so skipping setscene task %s', nexttask) |
2014 | self.sq_task_failoutright(nexttask) | 2224 | self.sq_task_failoutright(nexttask) |
@@ -2040,28 +2250,42 @@ class RunQueueExecute: | |||
2040 | self.sq_task_failoutright(task) | 2250 | self.sq_task_failoutright(task) |
2041 | return True | 2251 | return True |
2042 | 2252 | ||
2043 | startevent = sceneQueueTaskStarted(task, self.sq_stats, self.rq) | 2253 | startevent = sceneQueueTaskStarted(task, self.stats, self.rq) |
2044 | bb.event.fire(startevent, self.cfgData) | 2254 | bb.event.fire(startevent, self.cfgData) |
2045 | 2255 | ||
2046 | taskdepdata = self.sq_build_taskdepdata(task) | ||
2047 | |||
2048 | taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] | 2256 | taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] |
2049 | taskhash = self.rqdata.get_task_hash(task) | 2257 | realfn = bb.cache.virtualfn2realfn(taskfn)[0] |
2050 | unihash = self.rqdata.get_task_unihash(task) | 2258 | runtask = { |
2259 | 'fn' : taskfn, | ||
2260 | 'task' : task, | ||
2261 | 'taskname' : taskname, | ||
2262 | 'taskhash' : self.rqdata.get_task_hash(task), | ||
2263 | 'unihash' : self.rqdata.get_task_unihash(task), | ||
2264 | 'quieterrors' : True, | ||
2265 | 'appends' : self.cooker.collections[mc].get_file_appends(taskfn), | ||
2266 | 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2], | ||
2267 | 'taskdepdata' : self.sq_build_taskdepdata(task), | ||
2268 | 'dry_run' : False, | ||
2269 | 'taskdep': taskdep, | ||
2270 | 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn], | ||
2271 | 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn], | ||
2272 | 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn] | ||
2273 | } | ||
2274 | |||
2051 | if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: | 2275 | if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: |
2052 | if not mc in self.rq.fakeworker: | 2276 | if not mc in self.rq.fakeworker: |
2053 | self.rq.start_fakeworker(self, mc) | 2277 | self.rq.start_fakeworker(self, mc) |
2054 | self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") | 2278 | RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask") |
2055 | self.rq.fakeworker[mc].process.stdin.flush() | 2279 | self.rq.fakeworker[mc].process.stdin.flush() |
2056 | else: | 2280 | else: |
2057 | self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") | 2281 | RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask") |
2058 | self.rq.worker[mc].process.stdin.flush() | 2282 | self.rq.worker[mc].process.stdin.flush() |
2059 | 2283 | ||
2060 | self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) | 2284 | self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False) |
2061 | self.build_stamps2.append(self.build_stamps[task]) | 2285 | self.build_stamps2.append(self.build_stamps[task]) |
2062 | self.sq_running.add(task) | 2286 | self.sq_running.add(task) |
2063 | self.sq_live.add(task) | 2287 | self.sq_live.add(task) |
2064 | self.sq_stats.taskActive() | 2288 | self.stats.updateActiveSetscene(len(self.sq_live)) |
2065 | if self.can_start_task(): | 2289 | if self.can_start_task(): |
2066 | return True | 2290 | return True |
2067 | 2291 | ||
@@ -2092,9 +2316,9 @@ class RunQueueExecute: | |||
2092 | if task is not None: | 2316 | if task is not None: |
2093 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) | 2317 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) |
2094 | 2318 | ||
2095 | if self.rqdata.setscenewhitelist is not None: | 2319 | if self.rqdata.setscene_ignore_tasks is not None: |
2096 | if self.check_setscenewhitelist(task): | 2320 | if self.check_setscene_ignore_tasks(task): |
2097 | self.task_fail(task, "setscene whitelist") | 2321 | self.task_fail(task, "setscene ignore_tasks") |
2098 | return True | 2322 | return True |
2099 | 2323 | ||
2100 | if task in self.tasks_covered: | 2324 | if task in self.tasks_covered: |
@@ -2117,18 +2341,32 @@ class RunQueueExecute: | |||
2117 | self.runq_running.add(task) | 2341 | self.runq_running.add(task) |
2118 | self.stats.taskActive() | 2342 | self.stats.taskActive() |
2119 | if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): | 2343 | if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): |
2120 | bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn) | 2344 | bb.build.make_stamp_mcfn(taskname, taskfn) |
2121 | self.task_complete(task) | 2345 | self.task_complete(task) |
2122 | return True | 2346 | return True |
2123 | else: | 2347 | else: |
2124 | startevent = runQueueTaskStarted(task, self.stats, self.rq) | 2348 | startevent = runQueueTaskStarted(task, self.stats, self.rq) |
2125 | bb.event.fire(startevent, self.cfgData) | 2349 | bb.event.fire(startevent, self.cfgData) |
2126 | 2350 | ||
2127 | taskdepdata = self.build_taskdepdata(task) | ||
2128 | |||
2129 | taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] | 2351 | taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] |
2130 | taskhash = self.rqdata.get_task_hash(task) | 2352 | realfn = bb.cache.virtualfn2realfn(taskfn)[0] |
2131 | unihash = self.rqdata.get_task_unihash(task) | 2353 | runtask = { |
2354 | 'fn' : taskfn, | ||
2355 | 'task' : task, | ||
2356 | 'taskname' : taskname, | ||
2357 | 'taskhash' : self.rqdata.get_task_hash(task), | ||
2358 | 'unihash' : self.rqdata.get_task_unihash(task), | ||
2359 | 'quieterrors' : False, | ||
2360 | 'appends' : self.cooker.collections[mc].get_file_appends(taskfn), | ||
2361 | 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2], | ||
2362 | 'taskdepdata' : self.build_taskdepdata(task), | ||
2363 | 'dry_run' : self.rqdata.setscene_enforce, | ||
2364 | 'taskdep': taskdep, | ||
2365 | 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn], | ||
2366 | 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn], | ||
2367 | 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn] | ||
2368 | } | ||
2369 | |||
2132 | if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): | 2370 | if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): |
2133 | if not mc in self.rq.fakeworker: | 2371 | if not mc in self.rq.fakeworker: |
2134 | try: | 2372 | try: |
@@ -2138,31 +2376,31 @@ class RunQueueExecute: | |||
2138 | self.rq.state = runQueueFailed | 2376 | self.rq.state = runQueueFailed |
2139 | self.stats.taskFailed() | 2377 | self.stats.taskFailed() |
2140 | return True | 2378 | return True |
2141 | self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") | 2379 | RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask") |
2142 | self.rq.fakeworker[mc].process.stdin.flush() | 2380 | self.rq.fakeworker[mc].process.stdin.flush() |
2143 | else: | 2381 | else: |
2144 | self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") | 2382 | RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask") |
2145 | self.rq.worker[mc].process.stdin.flush() | 2383 | self.rq.worker[mc].process.stdin.flush() |
2146 | 2384 | ||
2147 | self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) | 2385 | self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False) |
2148 | self.build_stamps2.append(self.build_stamps[task]) | 2386 | self.build_stamps2.append(self.build_stamps[task]) |
2149 | self.runq_running.add(task) | 2387 | self.runq_running.add(task) |
2150 | self.stats.taskActive() | 2388 | self.stats.taskActive() |
2151 | if self.can_start_task(): | 2389 | if self.can_start_task(): |
2152 | return True | 2390 | return True |
2153 | 2391 | ||
2154 | if self.stats.active > 0 or self.sq_stats.active > 0: | 2392 | if self.stats.active > 0 or self.sq_live: |
2155 | self.rq.read_workers() | 2393 | self.rq.read_workers() |
2156 | return self.rq.active_fds() | 2394 | return self.rq.active_fds() |
2157 | 2395 | ||
2158 | # No more tasks can be run. If we have deferred setscene tasks we should run them. | 2396 | # No more tasks can be run. If we have deferred setscene tasks we should run them. |
2159 | if self.sq_deferred: | 2397 | if self.sq_deferred: |
2160 | tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0]) | 2398 | deferred_tid = list(self.sq_deferred.keys())[0] |
2161 | logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid) | 2399 | blocking_tid = self.sq_deferred.pop(deferred_tid) |
2162 | self.sq_task_failoutright(tid) | 2400 | logger.warning("Runqueue deadlocked on deferred tasks, forcing task %s blocked by %s" % (deferred_tid, blocking_tid)) |
2163 | return True | 2401 | return True |
2164 | 2402 | ||
2165 | if len(self.failed_tids) != 0: | 2403 | if self.failed_tids: |
2166 | self.rq.state = runQueueFailed | 2404 | self.rq.state = runQueueFailed |
2167 | return True | 2405 | return True |
2168 | 2406 | ||
@@ -2195,6 +2433,22 @@ class RunQueueExecute: | |||
2195 | ret.add(dep) | 2433 | ret.add(dep) |
2196 | return ret | 2434 | return ret |
2197 | 2435 | ||
2436 | # Build the individual cache entries in advance once to save time | ||
2437 | def build_taskdepdata_cache(self): | ||
2438 | taskdepdata_cache = {} | ||
2439 | for task in self.rqdata.runtaskentries: | ||
2440 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) | ||
2441 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | ||
2442 | deps = self.rqdata.runtaskentries[task].depends | ||
2443 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | ||
2444 | taskhash = self.rqdata.runtaskentries[task].hash | ||
2445 | unihash = self.rqdata.runtaskentries[task].unihash | ||
2446 | deps = self.filtermcdeps(task, mc, deps) | ||
2447 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | ||
2448 | taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] | ||
2449 | |||
2450 | self.taskdepdata_cache = taskdepdata_cache | ||
2451 | |||
2198 | # We filter out multiconfig dependencies from taskdepdata we pass to the tasks | 2452 | # We filter out multiconfig dependencies from taskdepdata we pass to the tasks |
2199 | # as most code can't handle them | 2453 | # as most code can't handle them |
2200 | def build_taskdepdata(self, task): | 2454 | def build_taskdepdata(self, task): |
@@ -2206,15 +2460,9 @@ class RunQueueExecute: | |||
2206 | while next: | 2460 | while next: |
2207 | additional = [] | 2461 | additional = [] |
2208 | for revdep in next: | 2462 | for revdep in next: |
2209 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) | 2463 | self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash |
2210 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 2464 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] |
2211 | deps = self.rqdata.runtaskentries[revdep].depends | 2465 | for revdep2 in self.taskdepdata_cache[revdep][3]: |
2212 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | ||
2213 | taskhash = self.rqdata.runtaskentries[revdep].hash | ||
2214 | unihash = self.rqdata.runtaskentries[revdep].unihash | ||
2215 | deps = self.filtermcdeps(task, mc, deps) | ||
2216 | taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash] | ||
2217 | for revdep2 in deps: | ||
2218 | if revdep2 not in taskdepdata: | 2466 | if revdep2 not in taskdepdata: |
2219 | additional.append(revdep2) | 2467 | additional.append(revdep2) |
2220 | next = additional | 2468 | next = additional |
@@ -2228,7 +2476,7 @@ class RunQueueExecute: | |||
2228 | return | 2476 | return |
2229 | 2477 | ||
2230 | notcovered = set(self.scenequeue_notcovered) | 2478 | notcovered = set(self.scenequeue_notcovered) |
2231 | notcovered |= self.cantskip | 2479 | notcovered |= self.sqdata.cantskip |
2232 | for tid in self.scenequeue_notcovered: | 2480 | for tid in self.scenequeue_notcovered: |
2233 | notcovered |= self.sqdata.sq_covered_tasks[tid] | 2481 | notcovered |= self.sqdata.sq_covered_tasks[tid] |
2234 | notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids) | 2482 | notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids) |
@@ -2241,7 +2489,7 @@ class RunQueueExecute: | |||
2241 | covered.intersection_update(self.tasks_scenequeue_done) | 2489 | covered.intersection_update(self.tasks_scenequeue_done) |
2242 | 2490 | ||
2243 | for tid in notcovered | covered: | 2491 | for tid in notcovered | covered: |
2244 | if len(self.rqdata.runtaskentries[tid].depends) == 0: | 2492 | if not self.rqdata.runtaskentries[tid].depends: |
2245 | self.setbuildable(tid) | 2493 | self.setbuildable(tid) |
2246 | elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete): | 2494 | elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete): |
2247 | self.setbuildable(tid) | 2495 | self.setbuildable(tid) |
@@ -2273,10 +2521,19 @@ class RunQueueExecute: | |||
2273 | self.updated_taskhash_queue.remove((tid, unihash)) | 2521 | self.updated_taskhash_queue.remove((tid, unihash)) |
2274 | 2522 | ||
2275 | if unihash != self.rqdata.runtaskentries[tid].unihash: | 2523 | if unihash != self.rqdata.runtaskentries[tid].unihash: |
2276 | hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash)) | 2524 | # Make sure we rehash any other tasks with the same task hash that we're deferred against. |
2277 | self.rqdata.runtaskentries[tid].unihash = unihash | 2525 | torehash = [tid] |
2278 | bb.parse.siggen.set_unihash(tid, unihash) | 2526 | for deftid in self.sq_deferred: |
2279 | toprocess.add(tid) | 2527 | if self.sq_deferred[deftid] == tid: |
2528 | torehash.append(deftid) | ||
2529 | for hashtid in torehash: | ||
2530 | hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash)) | ||
2531 | self.rqdata.runtaskentries[hashtid].unihash = unihash | ||
2532 | bb.parse.siggen.set_unihash(hashtid, unihash) | ||
2533 | toprocess.add(hashtid) | ||
2534 | if torehash: | ||
2535 | # Need to save after set_unihash above | ||
2536 | bb.parse.siggen.save_unitaskhashes() | ||
2280 | 2537 | ||
2281 | # Work out all tasks which depend upon these | 2538 | # Work out all tasks which depend upon these |
2282 | total = set() | 2539 | total = set() |
@@ -2294,7 +2551,7 @@ class RunQueueExecute: | |||
2294 | # Now iterate those tasks in dependency order to regenerate their taskhash/unihash | 2551 | # Now iterate those tasks in dependency order to regenerate their taskhash/unihash |
2295 | next = set() | 2552 | next = set() |
2296 | for p in total: | 2553 | for p in total: |
2297 | if len(self.rqdata.runtaskentries[p].depends) == 0: | 2554 | if not self.rqdata.runtaskentries[p].depends: |
2298 | next.add(p) | 2555 | next.add(p) |
2299 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): | 2556 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): |
2300 | next.add(p) | 2557 | next.add(p) |
@@ -2304,11 +2561,10 @@ class RunQueueExecute: | |||
2304 | current = next.copy() | 2561 | current = next.copy() |
2305 | next = set() | 2562 | next = set() |
2306 | for tid in current: | 2563 | for tid in current: |
2307 | if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): | 2564 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): |
2308 | continue | 2565 | continue |
2309 | orighash = self.rqdata.runtaskentries[tid].hash | 2566 | orighash = self.rqdata.runtaskentries[tid].hash |
2310 | dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid)) | 2567 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) |
2311 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc) | ||
2312 | origuni = self.rqdata.runtaskentries[tid].unihash | 2568 | origuni = self.rqdata.runtaskentries[tid].unihash |
2313 | newuni = bb.parse.siggen.get_unihash(tid) | 2569 | newuni = bb.parse.siggen.get_unihash(tid) |
2314 | # FIXME, need to check it can come from sstate at all for determinism? | 2570 | # FIXME, need to check it can come from sstate at all for determinism? |
@@ -2334,9 +2590,9 @@ class RunQueueExecute: | |||
2334 | 2590 | ||
2335 | if changed: | 2591 | if changed: |
2336 | for mc in self.rq.worker: | 2592 | for mc in self.rq.worker: |
2337 | self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") | 2593 | RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes") |
2338 | for mc in self.rq.fakeworker: | 2594 | for mc in self.rq.fakeworker: |
2339 | self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") | 2595 | RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes") |
2340 | 2596 | ||
2341 | hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) | 2597 | hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) |
2342 | 2598 | ||
@@ -2370,7 +2626,7 @@ class RunQueueExecute: | |||
2370 | self.tasks_scenequeue_done.remove(tid) | 2626 | self.tasks_scenequeue_done.remove(tid) |
2371 | for dep in self.sqdata.sq_covered_tasks[tid]: | 2627 | for dep in self.sqdata.sq_covered_tasks[tid]: |
2372 | if dep in self.runq_complete and dep not in self.runq_tasksrun: | 2628 | if dep in self.runq_complete and dep not in self.runq_tasksrun: |
2373 | bb.error("Task %s marked as completed but now needing to rerun? Aborting build." % dep) | 2629 | bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep) |
2374 | self.failed_tids.append(tid) | 2630 | self.failed_tids.append(tid) |
2375 | self.rq.state = runQueueCleanUp | 2631 | self.rq.state = runQueueCleanUp |
2376 | return | 2632 | return |
@@ -2383,17 +2639,6 @@ class RunQueueExecute: | |||
2383 | self.sq_buildable.remove(tid) | 2639 | self.sq_buildable.remove(tid) |
2384 | if tid in self.sq_running: | 2640 | if tid in self.sq_running: |
2385 | self.sq_running.remove(tid) | 2641 | self.sq_running.remove(tid) |
2386 | harddepfail = False | ||
2387 | for t in self.sqdata.sq_harddeps: | ||
2388 | if tid in self.sqdata.sq_harddeps[t] and t in self.scenequeue_notcovered: | ||
2389 | harddepfail = True | ||
2390 | break | ||
2391 | if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | ||
2392 | if tid not in self.sq_buildable: | ||
2393 | self.sq_buildable.add(tid) | ||
2394 | if len(self.sqdata.sq_revdeps[tid]) == 0: | ||
2395 | self.sq_buildable.add(tid) | ||
2396 | |||
2397 | if tid in self.sqdata.outrightfail: | 2642 | if tid in self.sqdata.outrightfail: |
2398 | self.sqdata.outrightfail.remove(tid) | 2643 | self.sqdata.outrightfail.remove(tid) |
2399 | if tid in self.scenequeue_notcovered: | 2644 | if tid in self.scenequeue_notcovered: |
@@ -2404,7 +2649,7 @@ class RunQueueExecute: | |||
2404 | self.scenequeue_notneeded.remove(tid) | 2649 | self.scenequeue_notneeded.remove(tid) |
2405 | 2650 | ||
2406 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 2651 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
2407 | self.sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn, noextra=True) | 2652 | self.sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False) |
2408 | 2653 | ||
2409 | if tid in self.stampcache: | 2654 | if tid in self.stampcache: |
2410 | del self.stampcache[tid] | 2655 | del self.stampcache[tid] |
@@ -2412,28 +2657,62 @@ class RunQueueExecute: | |||
2412 | if tid in self.build_stamps: | 2657 | if tid in self.build_stamps: |
2413 | del self.build_stamps[tid] | 2658 | del self.build_stamps[tid] |
2414 | 2659 | ||
2415 | update_tasks.append((tid, harddepfail, tid in self.sqdata.valid)) | 2660 | update_tasks.append(tid) |
2661 | |||
2662 | update_tasks2 = [] | ||
2663 | for tid in update_tasks: | ||
2664 | harddepfail = False | ||
2665 | for t in self.sqdata.sq_harddeps_rev[tid]: | ||
2666 | if t in self.scenequeue_notcovered: | ||
2667 | harddepfail = True | ||
2668 | break | ||
2669 | if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | ||
2670 | if tid not in self.sq_buildable: | ||
2671 | self.sq_buildable.add(tid) | ||
2672 | if not self.sqdata.sq_revdeps[tid]: | ||
2673 | self.sq_buildable.add(tid) | ||
2416 | 2674 | ||
2417 | if update_tasks: | 2675 | update_tasks2.append((tid, harddepfail, tid in self.sqdata.valid)) |
2676 | |||
2677 | if update_tasks2: | ||
2418 | self.sqdone = False | 2678 | self.sqdone = False |
2419 | update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) | 2679 | for mc in sorted(self.sqdata.multiconfigs): |
2680 | for tid in sorted([t[0] for t in update_tasks2]): | ||
2681 | if mc_from_tid(tid) != mc: | ||
2682 | continue | ||
2683 | h = pending_hash_index(tid, self.rqdata) | ||
2684 | if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]: | ||
2685 | self.sq_deferred[tid] = self.sqdata.hashes[h] | ||
2686 | bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h])) | ||
2687 | update_scenequeue_data([t[0] for t in update_tasks2], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) | ||
2420 | 2688 | ||
2421 | for (tid, harddepfail, origvalid) in update_tasks: | 2689 | for (tid, harddepfail, origvalid) in update_tasks2: |
2422 | if tid in self.sqdata.valid and not origvalid: | 2690 | if tid in self.sqdata.valid and not origvalid: |
2423 | hashequiv_logger.verbose("Setscene task %s became valid" % tid) | 2691 | hashequiv_logger.verbose("Setscene task %s became valid" % tid) |
2424 | if harddepfail: | 2692 | if harddepfail: |
2693 | logger.debug2("%s has an unavailable hard dependency so skipping" % (tid)) | ||
2425 | self.sq_task_failoutright(tid) | 2694 | self.sq_task_failoutright(tid) |
2426 | 2695 | ||
2427 | if changed: | 2696 | if changed: |
2697 | self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered)) | ||
2698 | self.sq_needed_harddeps = set() | ||
2699 | self.sq_harddep_deferred = set() | ||
2428 | self.holdoff_need_update = True | 2700 | self.holdoff_need_update = True |
2429 | 2701 | ||
2430 | def scenequeue_updatecounters(self, task, fail=False): | 2702 | def scenequeue_updatecounters(self, task, fail=False): |
2431 | 2703 | ||
2432 | for dep in sorted(self.sqdata.sq_deps[task]): | 2704 | if fail and task in self.sqdata.sq_harddeps: |
2433 | if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: | 2705 | for dep in sorted(self.sqdata.sq_harddeps[task]): |
2706 | if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered: | ||
2707 | # dependency could be already processed, e.g. noexec setscene task | ||
2708 | continue | ||
2709 | noexec, stamppresent = check_setscene_stamps(dep, self.rqdata, self.rq, self.stampcache) | ||
2710 | if noexec or stamppresent: | ||
2711 | continue | ||
2434 | logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) | 2712 | logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) |
2435 | self.sq_task_failoutright(dep) | 2713 | self.sq_task_failoutright(dep) |
2436 | continue | 2714 | continue |
2715 | for dep in sorted(self.sqdata.sq_deps[task]): | ||
2437 | if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | 2716 | if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): |
2438 | if dep not in self.sq_buildable: | 2717 | if dep not in self.sq_buildable: |
2439 | self.sq_buildable.add(dep) | 2718 | self.sq_buildable.add(dep) |
@@ -2452,6 +2731,14 @@ class RunQueueExecute: | |||
2452 | new.add(dep) | 2731 | new.add(dep) |
2453 | next = new | 2732 | next = new |
2454 | 2733 | ||
2734 | # If this task was one which other setscene tasks have a hard dependency upon, we need | ||
2735 | # to walk through the hard dependencies and allow execution of those which have completed dependencies. | ||
2736 | if task in self.sqdata.sq_harddeps: | ||
2737 | for dep in self.sq_harddep_deferred.copy(): | ||
2738 | if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | ||
2739 | self.sq_harddep_deferred.remove(dep) | ||
2740 | |||
2741 | self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered)) | ||
2455 | self.holdoff_need_update = True | 2742 | self.holdoff_need_update = True |
2456 | 2743 | ||
2457 | def sq_task_completeoutright(self, task): | 2744 | def sq_task_completeoutright(self, task): |
@@ -2466,22 +2753,20 @@ class RunQueueExecute: | |||
2466 | self.scenequeue_updatecounters(task) | 2753 | self.scenequeue_updatecounters(task) |
2467 | 2754 | ||
2468 | def sq_check_taskfail(self, task): | 2755 | def sq_check_taskfail(self, task): |
2469 | if self.rqdata.setscenewhitelist is not None: | 2756 | if self.rqdata.setscene_ignore_tasks is not None: |
2470 | realtask = task.split('_setscene')[0] | 2757 | realtask = task.split('_setscene')[0] |
2471 | (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask) | 2758 | (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask) |
2472 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 2759 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] |
2473 | if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): | 2760 | if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks): |
2474 | logger.error('Task %s.%s failed' % (pn, taskname + "_setscene")) | 2761 | logger.error('Task %s.%s failed' % (pn, taskname + "_setscene")) |
2475 | self.rq.state = runQueueCleanUp | 2762 | self.rq.state = runQueueCleanUp |
2476 | 2763 | ||
2477 | def sq_task_complete(self, task): | 2764 | def sq_task_complete(self, task): |
2478 | self.sq_stats.taskCompleted() | 2765 | bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) |
2479 | bb.event.fire(sceneQueueTaskCompleted(task, self.sq_stats, self.rq), self.cfgData) | ||
2480 | self.sq_task_completeoutright(task) | 2766 | self.sq_task_completeoutright(task) |
2481 | 2767 | ||
2482 | def sq_task_fail(self, task, result): | 2768 | def sq_task_fail(self, task, result): |
2483 | self.sq_stats.taskFailed() | 2769 | bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData) |
2484 | bb.event.fire(sceneQueueTaskFailed(task, self.sq_stats, result, self), self.cfgData) | ||
2485 | self.scenequeue_notcovered.add(task) | 2770 | self.scenequeue_notcovered.add(task) |
2486 | self.scenequeue_updatecounters(task, True) | 2771 | self.scenequeue_updatecounters(task, True) |
2487 | self.sq_check_taskfail(task) | 2772 | self.sq_check_taskfail(task) |
@@ -2489,8 +2774,6 @@ class RunQueueExecute: | |||
2489 | def sq_task_failoutright(self, task): | 2774 | def sq_task_failoutright(self, task): |
2490 | self.sq_running.add(task) | 2775 | self.sq_running.add(task) |
2491 | self.sq_buildable.add(task) | 2776 | self.sq_buildable.add(task) |
2492 | self.sq_stats.taskSkipped() | ||
2493 | self.sq_stats.taskCompleted() | ||
2494 | self.scenequeue_notcovered.add(task) | 2777 | self.scenequeue_notcovered.add(task) |
2495 | self.scenequeue_updatecounters(task, True) | 2778 | self.scenequeue_updatecounters(task, True) |
2496 | 2779 | ||
@@ -2498,8 +2781,6 @@ class RunQueueExecute: | |||
2498 | self.sq_running.add(task) | 2781 | self.sq_running.add(task) |
2499 | self.sq_buildable.add(task) | 2782 | self.sq_buildable.add(task) |
2500 | self.sq_task_completeoutright(task) | 2783 | self.sq_task_completeoutright(task) |
2501 | self.sq_stats.taskSkipped() | ||
2502 | self.sq_stats.taskCompleted() | ||
2503 | 2784 | ||
2504 | def sq_build_taskdepdata(self, task): | 2785 | def sq_build_taskdepdata(self, task): |
2505 | def getsetscenedeps(tid): | 2786 | def getsetscenedeps(tid): |
@@ -2530,7 +2811,8 @@ class RunQueueExecute: | |||
2530 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2811 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] |
2531 | taskhash = self.rqdata.runtaskentries[revdep].hash | 2812 | taskhash = self.rqdata.runtaskentries[revdep].hash |
2532 | unihash = self.rqdata.runtaskentries[revdep].unihash | 2813 | unihash = self.rqdata.runtaskentries[revdep].unihash |
2533 | taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash] | 2814 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] |
2815 | taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] | ||
2534 | for revdep2 in deps: | 2816 | for revdep2 in deps: |
2535 | if revdep2 not in taskdepdata: | 2817 | if revdep2 not in taskdepdata: |
2536 | additional.append(revdep2) | 2818 | additional.append(revdep2) |
@@ -2539,8 +2821,8 @@ class RunQueueExecute: | |||
2539 | #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n")) | 2821 | #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n")) |
2540 | return taskdepdata | 2822 | return taskdepdata |
2541 | 2823 | ||
2542 | def check_setscenewhitelist(self, tid): | 2824 | def check_setscene_ignore_tasks(self, tid): |
2543 | # Check task that is going to run against the whitelist | 2825 | # Check task that is going to run against the ignore tasks list |
2544 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 2826 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
2545 | # Ignore covered tasks | 2827 | # Ignore covered tasks |
2546 | if tid in self.tasks_covered: | 2828 | if tid in self.tasks_covered: |
@@ -2554,14 +2836,15 @@ class RunQueueExecute: | |||
2554 | return False | 2836 | return False |
2555 | 2837 | ||
2556 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 2838 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] |
2557 | if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): | 2839 | if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks): |
2558 | if tid in self.rqdata.runq_setscene_tids: | 2840 | if tid in self.rqdata.runq_setscene_tids: |
2559 | msg = 'Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname) | 2841 | msg = ['Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)] |
2560 | else: | 2842 | else: |
2561 | msg = 'Task %s.%s attempted to execute unexpectedly' % (pn, taskname) | 2843 | msg = ['Task %s.%s attempted to execute unexpectedly' % (pn, taskname)] |
2562 | for t in self.scenequeue_notcovered: | 2844 | for t in self.scenequeue_notcovered: |
2563 | msg = msg + "\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash) | 2845 | msg.append("\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash)) |
2564 | logger.error(msg + '\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered)) | 2846 | msg.append('\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered)) |
2847 | logger.error("".join(msg)) | ||
2565 | return True | 2848 | return True |
2566 | return False | 2849 | return False |
2567 | 2850 | ||
@@ -2573,6 +2856,7 @@ class SQData(object): | |||
2573 | self.sq_revdeps = {} | 2856 | self.sq_revdeps = {} |
2574 | # Injected inter-setscene task dependencies | 2857 | # Injected inter-setscene task dependencies |
2575 | self.sq_harddeps = {} | 2858 | self.sq_harddeps = {} |
2859 | self.sq_harddeps_rev = {} | ||
2576 | # Cache of stamp files so duplicates can't run in parallel | 2860 | # Cache of stamp files so duplicates can't run in parallel |
2577 | self.stamps = {} | 2861 | self.stamps = {} |
2578 | # Setscene tasks directly depended upon by the build | 2862 | # Setscene tasks directly depended upon by the build |
@@ -2582,12 +2866,17 @@ class SQData(object): | |||
2582 | # A list of normal tasks a setscene task covers | 2866 | # A list of normal tasks a setscene task covers |
2583 | self.sq_covered_tasks = {} | 2867 | self.sq_covered_tasks = {} |
2584 | 2868 | ||
2585 | def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | 2869 | def build_scenequeue_data(sqdata, rqdata, sqrq): |
2586 | 2870 | ||
2587 | sq_revdeps = {} | 2871 | sq_revdeps = {} |
2588 | sq_revdeps_squash = {} | 2872 | sq_revdeps_squash = {} |
2589 | sq_collated_deps = {} | 2873 | sq_collated_deps = {} |
2590 | 2874 | ||
2875 | # We can't skip specified target tasks which aren't setscene tasks | ||
2876 | sqdata.cantskip = set(rqdata.target_tids) | ||
2877 | sqdata.cantskip.difference_update(rqdata.runq_setscene_tids) | ||
2878 | sqdata.cantskip.intersection_update(rqdata.runtaskentries) | ||
2879 | |||
2591 | # We need to construct a dependency graph for the setscene functions. Intermediate | 2880 | # We need to construct a dependency graph for the setscene functions. Intermediate |
2592 | # dependencies between the setscene tasks only complicate the code. This code | 2881 | # dependencies between the setscene tasks only complicate the code. This code |
2593 | # therefore aims to collapse the huge runqueue dependency tree into a smaller one | 2882 | # therefore aims to collapse the huge runqueue dependency tree into a smaller one |
@@ -2600,7 +2889,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2600 | for tid in rqdata.runtaskentries: | 2889 | for tid in rqdata.runtaskentries: |
2601 | sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps) | 2890 | sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps) |
2602 | sq_revdeps_squash[tid] = set() | 2891 | sq_revdeps_squash[tid] = set() |
2603 | if (len(sq_revdeps[tid]) == 0) and tid not in rqdata.runq_setscene_tids: | 2892 | if not sq_revdeps[tid] and tid not in rqdata.runq_setscene_tids: |
2604 | #bb.warn("Added endpoint %s" % (tid)) | 2893 | #bb.warn("Added endpoint %s" % (tid)) |
2605 | endpoints[tid] = set() | 2894 | endpoints[tid] = set() |
2606 | 2895 | ||
@@ -2634,16 +2923,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2634 | sq_revdeps_squash[point] = set() | 2923 | sq_revdeps_squash[point] = set() |
2635 | if point in rqdata.runq_setscene_tids: | 2924 | if point in rqdata.runq_setscene_tids: |
2636 | sq_revdeps_squash[point] = tasks | 2925 | sq_revdeps_squash[point] = tasks |
2637 | tasks = set() | ||
2638 | continue | 2926 | continue |
2639 | for dep in rqdata.runtaskentries[point].depends: | 2927 | for dep in rqdata.runtaskentries[point].depends: |
2640 | if point in sq_revdeps[dep]: | 2928 | if point in sq_revdeps[dep]: |
2641 | sq_revdeps[dep].remove(point) | 2929 | sq_revdeps[dep].remove(point) |
2642 | if tasks: | 2930 | if tasks: |
2643 | sq_revdeps_squash[dep] |= tasks | 2931 | sq_revdeps_squash[dep] |= tasks |
2644 | if len(sq_revdeps[dep]) == 0 and dep not in rqdata.runq_setscene_tids: | 2932 | if not sq_revdeps[dep] and dep not in rqdata.runq_setscene_tids: |
2645 | newendpoints[dep] = task | 2933 | newendpoints[dep] = task |
2646 | if len(newendpoints) != 0: | 2934 | if newendpoints: |
2647 | process_endpoints(newendpoints) | 2935 | process_endpoints(newendpoints) |
2648 | 2936 | ||
2649 | process_endpoints(endpoints) | 2937 | process_endpoints(endpoints) |
@@ -2655,16 +2943,16 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2655 | # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon | 2943 | # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon |
2656 | new = True | 2944 | new = True |
2657 | for tid in rqdata.runtaskentries: | 2945 | for tid in rqdata.runtaskentries: |
2658 | if len(rqdata.runtaskentries[tid].revdeps) == 0: | 2946 | if not rqdata.runtaskentries[tid].revdeps: |
2659 | sqdata.unskippable.add(tid) | 2947 | sqdata.unskippable.add(tid) |
2660 | sqdata.unskippable |= sqrq.cantskip | 2948 | sqdata.unskippable |= sqdata.cantskip |
2661 | while new: | 2949 | while new: |
2662 | new = False | 2950 | new = False |
2663 | orig = sqdata.unskippable.copy() | 2951 | orig = sqdata.unskippable.copy() |
2664 | for tid in sorted(orig, reverse=True): | 2952 | for tid in sorted(orig, reverse=True): |
2665 | if tid in rqdata.runq_setscene_tids: | 2953 | if tid in rqdata.runq_setscene_tids: |
2666 | continue | 2954 | continue |
2667 | if len(rqdata.runtaskentries[tid].depends) == 0: | 2955 | if not rqdata.runtaskentries[tid].depends: |
2668 | # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable | 2956 | # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable |
2669 | sqrq.setbuildable(tid) | 2957 | sqrq.setbuildable(tid) |
2670 | sqdata.unskippable |= rqdata.runtaskentries[tid].depends | 2958 | sqdata.unskippable |= rqdata.runtaskentries[tid].depends |
@@ -2679,8 +2967,8 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2679 | for taskcounter, tid in enumerate(rqdata.runtaskentries): | 2967 | for taskcounter, tid in enumerate(rqdata.runtaskentries): |
2680 | if tid in rqdata.runq_setscene_tids: | 2968 | if tid in rqdata.runq_setscene_tids: |
2681 | pass | 2969 | pass |
2682 | elif len(sq_revdeps_squash[tid]) != 0: | 2970 | elif sq_revdeps_squash[tid]: |
2683 | bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.") | 2971 | bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.") |
2684 | else: | 2972 | else: |
2685 | del sq_revdeps_squash[tid] | 2973 | del sq_revdeps_squash[tid] |
2686 | rqdata.init_progress_reporter.update(taskcounter) | 2974 | rqdata.init_progress_reporter.update(taskcounter) |
@@ -2694,7 +2982,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2694 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 2982 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
2695 | realtid = tid + "_setscene" | 2983 | realtid = tid + "_setscene" |
2696 | idepends = rqdata.taskData[mc].taskentries[realtid].idepends | 2984 | idepends = rqdata.taskData[mc].taskentries[realtid].idepends |
2697 | sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", rqdata.dataCaches[mc], taskfn, noextra=True) | 2985 | sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False) |
2986 | |||
2987 | sqdata.sq_harddeps_rev[tid] = set() | ||
2698 | for (depname, idependtask) in idepends: | 2988 | for (depname, idependtask) in idepends: |
2699 | 2989 | ||
2700 | if depname not in rqdata.taskData[mc].build_targets: | 2990 | if depname not in rqdata.taskData[mc].build_targets: |
@@ -2707,20 +2997,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2707 | if deptid not in rqdata.runtaskentries: | 2997 | if deptid not in rqdata.runtaskentries: |
2708 | bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask)) | 2998 | bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask)) |
2709 | 2999 | ||
3000 | logger.debug2("Adding hard setscene dependency %s for %s" % (deptid, tid)) | ||
3001 | |||
2710 | if not deptid in sqdata.sq_harddeps: | 3002 | if not deptid in sqdata.sq_harddeps: |
2711 | sqdata.sq_harddeps[deptid] = set() | 3003 | sqdata.sq_harddeps[deptid] = set() |
2712 | sqdata.sq_harddeps[deptid].add(tid) | 3004 | sqdata.sq_harddeps[deptid].add(tid) |
2713 | 3005 | sqdata.sq_harddeps_rev[tid].add(deptid) | |
2714 | sq_revdeps_squash[tid].add(deptid) | ||
2715 | # Have to zero this to avoid circular dependencies | ||
2716 | sq_revdeps_squash[deptid] = set() | ||
2717 | 3006 | ||
2718 | rqdata.init_progress_reporter.next_stage() | 3007 | rqdata.init_progress_reporter.next_stage() |
2719 | 3008 | ||
2720 | for task in sqdata.sq_harddeps: | ||
2721 | for dep in sqdata.sq_harddeps[task]: | ||
2722 | sq_revdeps_squash[dep].add(task) | ||
2723 | |||
2724 | rqdata.init_progress_reporter.next_stage() | 3009 | rqdata.init_progress_reporter.next_stage() |
2725 | 3010 | ||
2726 | #for tid in sq_revdeps_squash: | 3011 | #for tid in sq_revdeps_squash: |
@@ -2744,16 +3029,47 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): | |||
2744 | sqdata.multiconfigs = set() | 3029 | sqdata.multiconfigs = set() |
2745 | for tid in sqdata.sq_revdeps: | 3030 | for tid in sqdata.sq_revdeps: |
2746 | sqdata.multiconfigs.add(mc_from_tid(tid)) | 3031 | sqdata.multiconfigs.add(mc_from_tid(tid)) |
2747 | if len(sqdata.sq_revdeps[tid]) == 0: | 3032 | if not sqdata.sq_revdeps[tid]: |
2748 | sqrq.sq_buildable.add(tid) | 3033 | sqrq.sq_buildable.add(tid) |
2749 | 3034 | ||
2750 | rqdata.init_progress_reporter.finish() | 3035 | rqdata.init_progress_reporter.next_stage() |
2751 | 3036 | ||
2752 | sqdata.noexec = set() | 3037 | sqdata.noexec = set() |
2753 | sqdata.stamppresent = set() | 3038 | sqdata.stamppresent = set() |
2754 | sqdata.valid = set() | 3039 | sqdata.valid = set() |
2755 | 3040 | ||
2756 | update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True) | 3041 | sqdata.hashes = {} |
3042 | sqrq.sq_deferred = {} | ||
3043 | for mc in sorted(sqdata.multiconfigs): | ||
3044 | for tid in sorted(sqdata.sq_revdeps): | ||
3045 | if mc_from_tid(tid) != mc: | ||
3046 | continue | ||
3047 | h = pending_hash_index(tid, rqdata) | ||
3048 | if h not in sqdata.hashes: | ||
3049 | sqdata.hashes[h] = tid | ||
3050 | else: | ||
3051 | sqrq.sq_deferred[tid] = sqdata.hashes[h] | ||
3052 | bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h])) | ||
3053 | |||
3054 | def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False): | ||
3055 | |||
3056 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | ||
3057 | |||
3058 | taskdep = rqdata.dataCaches[mc].task_deps[taskfn] | ||
3059 | |||
3060 | if 'noexec' in taskdep and taskname in taskdep['noexec']: | ||
3061 | bb.build.make_stamp_mcfn(taskname + "_setscene", taskfn) | ||
3062 | return True, False | ||
3063 | |||
3064 | if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache): | ||
3065 | logger.debug2('Setscene stamp current for task %s', tid) | ||
3066 | return False, True | ||
3067 | |||
3068 | if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): | ||
3069 | logger.debug2('Normal stamp current for task %s', tid) | ||
3070 | return False, True | ||
3071 | |||
3072 | return False, False | ||
2757 | 3073 | ||
2758 | def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True): | 3074 | def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True): |
2759 | 3075 | ||
@@ -2764,55 +3080,42 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s | |||
2764 | sqdata.stamppresent.remove(tid) | 3080 | sqdata.stamppresent.remove(tid) |
2765 | if tid in sqdata.valid: | 3081 | if tid in sqdata.valid: |
2766 | sqdata.valid.remove(tid) | 3082 | sqdata.valid.remove(tid) |
3083 | if tid in sqdata.outrightfail: | ||
3084 | sqdata.outrightfail.remove(tid) | ||
2767 | 3085 | ||
2768 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 3086 | noexec, stamppresent = check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=True) |
2769 | |||
2770 | taskdep = rqdata.dataCaches[mc].task_deps[taskfn] | ||
2771 | 3087 | ||
2772 | if 'noexec' in taskdep and taskname in taskdep['noexec']: | 3088 | if noexec: |
2773 | sqdata.noexec.add(tid) | 3089 | sqdata.noexec.add(tid) |
2774 | sqrq.sq_task_skip(tid) | 3090 | sqrq.sq_task_skip(tid) |
2775 | bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn) | 3091 | logger.debug2("%s is noexec so skipping setscene" % (tid)) |
2776 | continue | ||
2777 | |||
2778 | if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache): | ||
2779 | logger.debug2('Setscene stamp current for task %s', tid) | ||
2780 | sqdata.stamppresent.add(tid) | ||
2781 | sqrq.sq_task_skip(tid) | ||
2782 | continue | 3092 | continue |
2783 | 3093 | ||
2784 | if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): | 3094 | if stamppresent: |
2785 | logger.debug2('Normal stamp current for task %s', tid) | ||
2786 | sqdata.stamppresent.add(tid) | 3095 | sqdata.stamppresent.add(tid) |
2787 | sqrq.sq_task_skip(tid) | 3096 | sqrq.sq_task_skip(tid) |
3097 | logger.debug2("%s has a valid stamp, skipping" % (tid)) | ||
2788 | continue | 3098 | continue |
2789 | 3099 | ||
2790 | tocheck.add(tid) | 3100 | tocheck.add(tid) |
2791 | 3101 | ||
2792 | sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) | 3102 | sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) |
2793 | 3103 | ||
2794 | sqdata.hashes = {} | 3104 | for tid in tids: |
2795 | for mc in sorted(sqdata.multiconfigs): | 3105 | if tid in sqdata.stamppresent: |
2796 | for tid in sorted(sqdata.sq_revdeps): | 3106 | continue |
2797 | if mc_from_tid(tid) != mc: | 3107 | if tid in sqdata.valid: |
2798 | continue | 3108 | continue |
2799 | if tid in sqdata.stamppresent: | 3109 | if tid in sqdata.noexec: |
2800 | continue | 3110 | continue |
2801 | if tid in sqdata.valid: | 3111 | if tid in sqrq.scenequeue_covered: |
2802 | continue | 3112 | continue |
2803 | if tid in sqdata.noexec: | 3113 | if tid in sqrq.scenequeue_notcovered: |
2804 | continue | 3114 | continue |
2805 | if tid in sqrq.scenequeue_notcovered: | 3115 | if tid in sqrq.sq_deferred: |
2806 | continue | 3116 | continue |
2807 | sqdata.outrightfail.add(tid) | 3117 | sqdata.outrightfail.add(tid) |
2808 | 3118 | logger.debug2("%s already handled (fallthrough), skipping" % (tid)) | |
2809 | h = pending_hash_index(tid, rqdata) | ||
2810 | if h not in sqdata.hashes: | ||
2811 | sqdata.hashes[h] = tid | ||
2812 | else: | ||
2813 | sqrq.sq_deferred[tid] = sqdata.hashes[h] | ||
2814 | bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h])) | ||
2815 | |||
2816 | 3119 | ||
2817 | class TaskFailure(Exception): | 3120 | class TaskFailure(Exception): |
2818 | """ | 3121 | """ |
@@ -2876,12 +3179,16 @@ class runQueueTaskFailed(runQueueEvent): | |||
2876 | """ | 3179 | """ |
2877 | Event notifying a task failed | 3180 | Event notifying a task failed |
2878 | """ | 3181 | """ |
2879 | def __init__(self, task, stats, exitcode, rq): | 3182 | def __init__(self, task, stats, exitcode, rq, fakeroot_log=None): |
2880 | runQueueEvent.__init__(self, task, stats, rq) | 3183 | runQueueEvent.__init__(self, task, stats, rq) |
2881 | self.exitcode = exitcode | 3184 | self.exitcode = exitcode |
3185 | self.fakeroot_log = fakeroot_log | ||
2882 | 3186 | ||
2883 | def __str__(self): | 3187 | def __str__(self): |
2884 | return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode) | 3188 | if self.fakeroot_log: |
3189 | return "Task (%s) failed with exit code '%s' \nPseudo log:\n%s" % (self.taskstring, self.exitcode, self.fakeroot_log) | ||
3190 | else: | ||
3191 | return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode) | ||
2885 | 3192 | ||
2886 | class sceneQueueTaskFailed(sceneQueueEvent): | 3193 | class sceneQueueTaskFailed(sceneQueueEvent): |
2887 | """ | 3194 | """ |
@@ -2933,18 +3240,16 @@ class runQueuePipe(): | |||
2933 | """ | 3240 | """ |
2934 | Abstraction for a pipe between a worker thread and the server | 3241 | Abstraction for a pipe between a worker thread and the server |
2935 | """ | 3242 | """ |
2936 | def __init__(self, pipein, pipeout, d, rq, rqexec): | 3243 | def __init__(self, pipein, pipeout, d, rq, rqexec, fakerootlogs=None): |
2937 | self.input = pipein | 3244 | self.input = pipein |
2938 | if pipeout: | 3245 | if pipeout: |
2939 | pipeout.close() | 3246 | pipeout.close() |
2940 | bb.utils.nonblockingfd(self.input) | 3247 | bb.utils.nonblockingfd(self.input) |
2941 | self.queue = b"" | 3248 | self.queue = bytearray() |
2942 | self.d = d | 3249 | self.d = d |
2943 | self.rq = rq | 3250 | self.rq = rq |
2944 | self.rqexec = rqexec | 3251 | self.rqexec = rqexec |
2945 | 3252 | self.fakerootlogs = fakerootlogs | |
2946 | def setrunqueueexec(self, rqexec): | ||
2947 | self.rqexec = rqexec | ||
2948 | 3253 | ||
2949 | def read(self): | 3254 | def read(self): |
2950 | for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]: | 3255 | for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]: |
@@ -2956,13 +3261,13 @@ class runQueuePipe(): | |||
2956 | 3261 | ||
2957 | start = len(self.queue) | 3262 | start = len(self.queue) |
2958 | try: | 3263 | try: |
2959 | self.queue = self.queue + (self.input.read(102400) or b"") | 3264 | self.queue.extend(self.input.read(102400) or b"") |
2960 | except (OSError, IOError) as e: | 3265 | except (OSError, IOError) as e: |
2961 | if e.errno != errno.EAGAIN: | 3266 | if e.errno != errno.EAGAIN: |
2962 | raise | 3267 | raise |
2963 | end = len(self.queue) | 3268 | end = len(self.queue) |
2964 | found = True | 3269 | found = True |
2965 | while found and len(self.queue): | 3270 | while found and self.queue: |
2966 | found = False | 3271 | found = False |
2967 | index = self.queue.find(b"</event>") | 3272 | index = self.queue.find(b"</event>") |
2968 | while index != -1 and self.queue.startswith(b"<event>"): | 3273 | while index != -1 and self.queue.startswith(b"<event>"): |
@@ -2987,7 +3292,11 @@ class runQueuePipe(): | |||
2987 | task, status = pickle.loads(self.queue[10:index]) | 3292 | task, status = pickle.loads(self.queue[10:index]) |
2988 | except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e: | 3293 | except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e: |
2989 | bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index])) | 3294 | bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index])) |
2990 | self.rqexec.runqueue_process_waitpid(task, status) | 3295 | (_, _, _, taskfn) = split_tid_mcfn(task) |
3296 | fakerootlog = None | ||
3297 | if self.fakerootlogs and taskfn and taskfn in self.fakerootlogs: | ||
3298 | fakerootlog = self.fakerootlogs[taskfn] | ||
3299 | self.rqexec.runqueue_process_waitpid(task, status, fakerootlog=fakerootlog) | ||
2991 | found = True | 3300 | found = True |
2992 | self.queue = self.queue[index+11:] | 3301 | self.queue = self.queue[index+11:] |
2993 | index = self.queue.find(b"</exitcode>") | 3302 | index = self.queue.find(b"</exitcode>") |
@@ -2996,16 +3305,16 @@ class runQueuePipe(): | |||
2996 | def close(self): | 3305 | def close(self): |
2997 | while self.read(): | 3306 | while self.read(): |
2998 | continue | 3307 | continue |
2999 | if len(self.queue) > 0: | 3308 | if self.queue: |
3000 | print("Warning, worker left partial message: %s" % self.queue) | 3309 | print("Warning, worker left partial message: %s" % self.queue) |
3001 | self.input.close() | 3310 | self.input.close() |
3002 | 3311 | ||
3003 | def get_setscene_enforce_whitelist(d, targets): | 3312 | def get_setscene_enforce_ignore_tasks(d, targets): |
3004 | if d.getVar('BB_SETSCENE_ENFORCE') != '1': | 3313 | if d.getVar('BB_SETSCENE_ENFORCE') != '1': |
3005 | return None | 3314 | return None |
3006 | whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split() | 3315 | ignore_tasks = (d.getVar("BB_SETSCENE_ENFORCE_IGNORE_TASKS") or "").split() |
3007 | outlist = [] | 3316 | outlist = [] |
3008 | for item in whitelist[:]: | 3317 | for item in ignore_tasks[:]: |
3009 | if item.startswith('%:'): | 3318 | if item.startswith('%:'): |
3010 | for (mc, target, task, fn) in targets: | 3319 | for (mc, target, task, fn) in targets: |
3011 | outlist.append(target + ':' + item.split(':')[1]) | 3320 | outlist.append(target + ':' + item.split(':')[1]) |
@@ -3013,12 +3322,12 @@ def get_setscene_enforce_whitelist(d, targets): | |||
3013 | outlist.append(item) | 3322 | outlist.append(item) |
3014 | return outlist | 3323 | return outlist |
3015 | 3324 | ||
3016 | def check_setscene_enforce_whitelist(pn, taskname, whitelist): | 3325 | def check_setscene_enforce_ignore_tasks(pn, taskname, ignore_tasks): |
3017 | import fnmatch | 3326 | import fnmatch |
3018 | if whitelist is not None: | 3327 | if ignore_tasks is not None: |
3019 | item = '%s:%s' % (pn, taskname) | 3328 | item = '%s:%s' % (pn, taskname) |
3020 | for whitelist_item in whitelist: | 3329 | for ignore_tasks in ignore_tasks: |
3021 | if fnmatch.fnmatch(item, whitelist_item): | 3330 | if fnmatch.fnmatch(item, ignore_tasks): |
3022 | return True | 3331 | return True |
3023 | return False | 3332 | return False |
3024 | return True | 3333 | return True |
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py index b27b4aefe0..76b189291d 100644 --- a/bitbake/lib/bb/server/process.py +++ b/bitbake/lib/bb/server/process.py | |||
@@ -26,6 +26,9 @@ import errno | |||
26 | import re | 26 | import re |
27 | import datetime | 27 | import datetime |
28 | import pickle | 28 | import pickle |
29 | import traceback | ||
30 | import gc | ||
31 | import stat | ||
29 | import bb.server.xmlrpcserver | 32 | import bb.server.xmlrpcserver |
30 | from bb import daemonize | 33 | from bb import daemonize |
31 | from multiprocessing import queues | 34 | from multiprocessing import queues |
@@ -35,9 +38,46 @@ logger = logging.getLogger('BitBake') | |||
35 | class ProcessTimeout(SystemExit): | 38 | class ProcessTimeout(SystemExit): |
36 | pass | 39 | pass |
37 | 40 | ||
41 | def currenttime(): | ||
42 | return datetime.datetime.now().strftime('%H:%M:%S.%f') | ||
43 | |||
38 | def serverlog(msg): | 44 | def serverlog(msg): |
39 | print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg) | 45 | print(str(os.getpid()) + " " + currenttime() + " " + msg) |
40 | sys.stdout.flush() | 46 | #Seems a flush here triggers filesytem sync like behaviour and long hangs in the server |
47 | #sys.stdout.flush() | ||
48 | |||
49 | # | ||
50 | # When we have lockfile issues, try and find infomation about which process is | ||
51 | # using the lockfile | ||
52 | # | ||
53 | def get_lockfile_process_msg(lockfile): | ||
54 | # Some systems may not have lsof available | ||
55 | procs = None | ||
56 | try: | ||
57 | procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT) | ||
58 | except subprocess.CalledProcessError: | ||
59 | # File was deleted? | ||
60 | pass | ||
61 | except OSError as e: | ||
62 | if e.errno != errno.ENOENT: | ||
63 | raise | ||
64 | if procs is None: | ||
65 | # Fall back to fuser if lsof is unavailable | ||
66 | try: | ||
67 | procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT) | ||
68 | except subprocess.CalledProcessError: | ||
69 | # File was deleted? | ||
70 | pass | ||
71 | except OSError as e: | ||
72 | if e.errno != errno.ENOENT: | ||
73 | raise | ||
74 | if procs: | ||
75 | return procs.decode("utf-8") | ||
76 | return None | ||
77 | |||
78 | class idleFinish(): | ||
79 | def __init__(self, msg): | ||
80 | self.msg = msg | ||
41 | 81 | ||
42 | class ProcessServer(): | 82 | class ProcessServer(): |
43 | profile_filename = "profile.log" | 83 | profile_filename = "profile.log" |
@@ -56,12 +96,19 @@ class ProcessServer(): | |||
56 | self.maxuiwait = 30 | 96 | self.maxuiwait = 30 |
57 | self.xmlrpc = False | 97 | self.xmlrpc = False |
58 | 98 | ||
99 | self.idle = None | ||
100 | # Need a lock for _idlefuns changes | ||
59 | self._idlefuns = {} | 101 | self._idlefuns = {} |
102 | self._idlefuncsLock = threading.Lock() | ||
103 | self.idle_cond = threading.Condition(self._idlefuncsLock) | ||
60 | 104 | ||
61 | self.bitbake_lock = lock | 105 | self.bitbake_lock = lock |
62 | self.bitbake_lock_name = lockname | 106 | self.bitbake_lock_name = lockname |
63 | self.sock = sock | 107 | self.sock = sock |
64 | self.sockname = sockname | 108 | self.sockname = sockname |
109 | # It is possible the directory may be renamed. Cache the inode of the socket file | ||
110 | # so we can tell if things changed. | ||
111 | self.sockinode = os.stat(self.sockname)[stat.ST_INO] | ||
65 | 112 | ||
66 | self.server_timeout = server_timeout | 113 | self.server_timeout = server_timeout |
67 | self.timeout = self.server_timeout | 114 | self.timeout = self.server_timeout |
@@ -70,7 +117,9 @@ class ProcessServer(): | |||
70 | def register_idle_function(self, function, data): | 117 | def register_idle_function(self, function, data): |
71 | """Register a function to be called while the server is idle""" | 118 | """Register a function to be called while the server is idle""" |
72 | assert hasattr(function, '__call__') | 119 | assert hasattr(function, '__call__') |
73 | self._idlefuns[function] = data | 120 | with bb.utils.lock_timeout(self._idlefuncsLock): |
121 | self._idlefuns[function] = data | ||
122 | serverlog("Registering idle function %s" % str(function)) | ||
74 | 123 | ||
75 | def run(self): | 124 | def run(self): |
76 | 125 | ||
@@ -109,6 +158,31 @@ class ProcessServer(): | |||
109 | 158 | ||
110 | return ret | 159 | return ret |
111 | 160 | ||
161 | def _idle_check(self): | ||
162 | return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None | ||
163 | |||
164 | def wait_for_idle(self, timeout=30): | ||
165 | # Wait for the idle loop to have cleared | ||
166 | with bb.utils.lock_timeout(self._idlefuncsLock): | ||
167 | return self.idle_cond.wait_for(self._idle_check, timeout) is not False | ||
168 | |||
169 | def set_async_cmd(self, cmd): | ||
170 | with bb.utils.lock_timeout(self._idlefuncsLock): | ||
171 | ret = self.idle_cond.wait_for(self._idle_check, 30) | ||
172 | if ret is False: | ||
173 | return False | ||
174 | self.cooker.command.currentAsyncCommand = cmd | ||
175 | return True | ||
176 | |||
177 | def clear_async_cmd(self): | ||
178 | with bb.utils.lock_timeout(self._idlefuncsLock): | ||
179 | self.cooker.command.currentAsyncCommand = None | ||
180 | self.idle_cond.notify_all() | ||
181 | |||
182 | def get_async_cmd(self): | ||
183 | with bb.utils.lock_timeout(self._idlefuncsLock): | ||
184 | return self.cooker.command.currentAsyncCommand | ||
185 | |||
112 | def main(self): | 186 | def main(self): |
113 | self.cooker.pre_serve() | 187 | self.cooker.pre_serve() |
114 | 188 | ||
@@ -123,14 +197,19 @@ class ProcessServer(): | |||
123 | fds.append(self.xmlrpc) | 197 | fds.append(self.xmlrpc) |
124 | seendata = False | 198 | seendata = False |
125 | serverlog("Entering server connection loop") | 199 | serverlog("Entering server connection loop") |
200 | serverlog("Lockfile is: %s\nSocket is %s (%s)" % (self.bitbake_lock_name, self.sockname, os.path.exists(self.sockname))) | ||
126 | 201 | ||
127 | def disconnect_client(self, fds): | 202 | def disconnect_client(self, fds): |
128 | serverlog("Disconnecting Client") | 203 | serverlog("Disconnecting Client (socket: %s)" % os.path.exists(self.sockname)) |
129 | if self.controllersock: | 204 | if self.controllersock: |
130 | fds.remove(self.controllersock) | 205 | fds.remove(self.controllersock) |
131 | self.controllersock.close() | 206 | self.controllersock.close() |
132 | self.controllersock = False | 207 | self.controllersock = False |
133 | if self.haveui: | 208 | if self.haveui: |
209 | # Wait for the idle loop to have cleared (30s max) | ||
210 | if not self.wait_for_idle(30): | ||
211 | serverlog("Idle loop didn't finish queued commands after 30s, exiting.") | ||
212 | self.quit = True | ||
134 | fds.remove(self.command_channel) | 213 | fds.remove(self.command_channel) |
135 | bb.event.unregister_UIHhandler(self.event_handle, True) | 214 | bb.event.unregister_UIHhandler(self.event_handle, True) |
136 | self.command_channel_reply.writer.close() | 215 | self.command_channel_reply.writer.close() |
@@ -142,12 +221,12 @@ class ProcessServer(): | |||
142 | self.cooker.clientComplete() | 221 | self.cooker.clientComplete() |
143 | self.haveui = False | 222 | self.haveui = False |
144 | ready = select.select(fds,[],[],0)[0] | 223 | ready = select.select(fds,[],[],0)[0] |
145 | if newconnections: | 224 | if newconnections and not self.quit: |
146 | serverlog("Starting new client") | 225 | serverlog("Starting new client") |
147 | conn = newconnections.pop(-1) | 226 | conn = newconnections.pop(-1) |
148 | fds.append(conn) | 227 | fds.append(conn) |
149 | self.controllersock = conn | 228 | self.controllersock = conn |
150 | elif self.timeout is None and not ready: | 229 | elif not self.timeout and not ready: |
151 | serverlog("No timeout, exiting.") | 230 | serverlog("No timeout, exiting.") |
152 | self.quit = True | 231 | self.quit = True |
153 | 232 | ||
@@ -214,11 +293,14 @@ class ProcessServer(): | |||
214 | continue | 293 | continue |
215 | try: | 294 | try: |
216 | serverlog("Running command %s" % command) | 295 | serverlog("Running command %s" % command) |
217 | self.command_channel_reply.send(self.cooker.command.runCommand(command)) | 296 | reply = self.cooker.command.runCommand(command, self) |
218 | serverlog("Command Completed") | 297 | serverlog("Sending reply %s" % repr(reply)) |
298 | self.command_channel_reply.send(reply) | ||
299 | serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname)) | ||
219 | except Exception as e: | 300 | except Exception as e: |
220 | serverlog('Exception in server main event loop running command %s (%s)' % (command, str(e))) | 301 | stack = traceback.format_exc() |
221 | logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e))) | 302 | serverlog('Exception in server main event loop running command %s (%s)' % (command, stack)) |
303 | logger.exception('Exception in server main event loop running command %s (%s)' % (command, stack)) | ||
222 | 304 | ||
223 | if self.xmlrpc in ready: | 305 | if self.xmlrpc in ready: |
224 | self.xmlrpc.handle_requests() | 306 | self.xmlrpc.handle_requests() |
@@ -241,19 +323,25 @@ class ProcessServer(): | |||
241 | 323 | ||
242 | ready = self.idle_commands(.1, fds) | 324 | ready = self.idle_commands(.1, fds) |
243 | 325 | ||
244 | if len(threading.enumerate()) != 1: | 326 | if self.idle: |
245 | serverlog("More than one thread left?: " + str(threading.enumerate())) | 327 | self.idle.join() |
246 | 328 | ||
247 | serverlog("Exiting") | 329 | serverlog("Exiting (socket: %s)" % os.path.exists(self.sockname)) |
248 | # Remove the socket file so we don't get any more connections to avoid races | 330 | # Remove the socket file so we don't get any more connections to avoid races |
331 | # The build directory could have been renamed so if the file isn't the one we created | ||
332 | # we shouldn't delete it. | ||
249 | try: | 333 | try: |
250 | os.unlink(self.sockname) | 334 | sockinode = os.stat(self.sockname)[stat.ST_INO] |
251 | except: | 335 | if sockinode == self.sockinode: |
252 | pass | 336 | os.unlink(self.sockname) |
337 | else: | ||
338 | serverlog("bitbake.sock inode mismatch (%s vs %s), not deleting." % (sockinode, self.sockinode)) | ||
339 | except Exception as err: | ||
340 | serverlog("Removing socket file '%s' failed (%s)" % (self.sockname, err)) | ||
253 | self.sock.close() | 341 | self.sock.close() |
254 | 342 | ||
255 | try: | 343 | try: |
256 | self.cooker.shutdown(True) | 344 | self.cooker.shutdown(True, idle=False) |
257 | self.cooker.notifier.stop() | 345 | self.cooker.notifier.stop() |
258 | self.cooker.confignotifier.stop() | 346 | self.cooker.confignotifier.stop() |
259 | except: | 347 | except: |
@@ -261,6 +349,9 @@ class ProcessServer(): | |||
261 | 349 | ||
262 | self.cooker.post_serve() | 350 | self.cooker.post_serve() |
263 | 351 | ||
352 | if len(threading.enumerate()) != 1: | ||
353 | serverlog("More than one thread left?: " + str(threading.enumerate())) | ||
354 | |||
264 | # Flush logs before we release the lock | 355 | # Flush logs before we release the lock |
265 | sys.stdout.flush() | 356 | sys.stdout.flush() |
266 | sys.stderr.flush() | 357 | sys.stderr.flush() |
@@ -276,20 +367,21 @@ class ProcessServer(): | |||
276 | except FileNotFoundError: | 367 | except FileNotFoundError: |
277 | return None | 368 | return None |
278 | 369 | ||
279 | lockcontents = get_lock_contents(lockfile) | ||
280 | serverlog("Original lockfile contents: " + str(lockcontents)) | ||
281 | |||
282 | lock.close() | 370 | lock.close() |
283 | lock = None | 371 | lock = None |
284 | 372 | ||
285 | while not lock: | 373 | while not lock: |
286 | i = 0 | 374 | i = 0 |
287 | lock = None | 375 | lock = None |
376 | if not os.path.exists(os.path.basename(lockfile)): | ||
377 | serverlog("Lockfile directory gone, exiting.") | ||
378 | return | ||
379 | |||
288 | while not lock and i < 30: | 380 | while not lock and i < 30: |
289 | lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False) | 381 | lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False) |
290 | if not lock: | 382 | if not lock: |
291 | newlockcontents = get_lock_contents(lockfile) | 383 | newlockcontents = get_lock_contents(lockfile) |
292 | if newlockcontents != lockcontents: | 384 | if not newlockcontents[0].startswith([f"{os.getpid()}\n", f"{os.getpid()} "]): |
293 | # A new server was started, the lockfile contents changed, we can exit | 385 | # A new server was started, the lockfile contents changed, we can exit |
294 | serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents)) | 386 | serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents)) |
295 | return | 387 | return |
@@ -303,75 +395,108 @@ class ProcessServer(): | |||
303 | return | 395 | return |
304 | 396 | ||
305 | if not lock: | 397 | if not lock: |
306 | # Some systems may not have lsof available | 398 | procs = get_lockfile_process_msg(lockfile) |
307 | procs = None | 399 | msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"] |
400 | if procs: | ||
401 | msg.append(":\n%s" % procs) | ||
402 | serverlog("".join(msg)) | ||
403 | |||
404 | def idle_thread(self): | ||
405 | if self.cooker.configuration.profile: | ||
406 | try: | ||
407 | import cProfile as profile | ||
408 | except: | ||
409 | import profile | ||
410 | prof = profile.Profile() | ||
411 | |||
412 | ret = profile.Profile.runcall(prof, self.idle_thread_internal) | ||
413 | |||
414 | prof.dump_stats("profile-mainloop.log") | ||
415 | bb.utils.process_profilelog("profile-mainloop.log") | ||
416 | serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed") | ||
417 | else: | ||
418 | self.idle_thread_internal() | ||
419 | |||
420 | def idle_thread_internal(self): | ||
421 | def remove_idle_func(function): | ||
422 | with bb.utils.lock_timeout(self._idlefuncsLock): | ||
423 | del self._idlefuns[function] | ||
424 | self.idle_cond.notify_all() | ||
425 | |||
426 | while not self.quit: | ||
427 | nextsleep = 0.1 | ||
428 | fds = [] | ||
429 | |||
430 | with bb.utils.lock_timeout(self._idlefuncsLock): | ||
431 | items = list(self._idlefuns.items()) | ||
432 | |||
433 | for function, data in items: | ||
308 | try: | 434 | try: |
309 | procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT) | 435 | retval = function(self, data, False) |
310 | except subprocess.CalledProcessError: | 436 | if isinstance(retval, idleFinish): |
311 | # File was deleted? | 437 | serverlog("Removing idle function %s at idleFinish" % str(function)) |
312 | continue | 438 | remove_idle_func(function) |
313 | except OSError as e: | 439 | self.cooker.command.finishAsyncCommand(retval.msg) |
314 | if e.errno != errno.ENOENT: | 440 | nextsleep = None |
315 | raise | 441 | elif retval is False: |
316 | if procs is None: | 442 | serverlog("Removing idle function %s" % str(function)) |
317 | # Fall back to fuser if lsof is unavailable | 443 | remove_idle_func(function) |
318 | try: | 444 | nextsleep = None |
319 | procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT) | 445 | elif retval is True: |
320 | except subprocess.CalledProcessError: | 446 | nextsleep = None |
321 | # File was deleted? | 447 | elif isinstance(retval, float) and nextsleep: |
448 | if (retval < nextsleep): | ||
449 | nextsleep = retval | ||
450 | elif nextsleep is None: | ||
322 | continue | 451 | continue |
323 | except OSError as e: | 452 | else: |
324 | if e.errno != errno.ENOENT: | 453 | fds = fds + retval |
325 | raise | 454 | except SystemExit: |
455 | raise | ||
456 | except Exception as exc: | ||
457 | if not isinstance(exc, bb.BBHandledException): | ||
458 | logger.exception('Running idle function') | ||
459 | remove_idle_func(function) | ||
460 | serverlog("Exception %s broke the idle_thread, exiting" % traceback.format_exc()) | ||
461 | self.quit = True | ||
326 | 462 | ||
327 | msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock" | 463 | # Create new heartbeat event? |
328 | if procs: | 464 | now = time.time() |
329 | msg += ":\n%s" % str(procs.decode("utf-8")) | 465 | if bb.event._heartbeat_enabled and now >= self.next_heartbeat: |
330 | serverlog(msg) | 466 | # We might have missed heartbeats. Just trigger once in |
467 | # that case and continue after the usual delay. | ||
468 | self.next_heartbeat += self.heartbeat_seconds | ||
469 | if self.next_heartbeat <= now: | ||
470 | self.next_heartbeat = now + self.heartbeat_seconds | ||
471 | if hasattr(self.cooker, "data"): | ||
472 | heartbeat = bb.event.HeartbeatEvent(now) | ||
473 | try: | ||
474 | bb.event.fire(heartbeat, self.cooker.data) | ||
475 | except Exception as exc: | ||
476 | if not isinstance(exc, bb.BBHandledException): | ||
477 | logger.exception('Running heartbeat function') | ||
478 | serverlog("Exception %s broke in idle_thread, exiting" % traceback.format_exc()) | ||
479 | self.quit = True | ||
480 | if nextsleep and bb.event._heartbeat_enabled and now + nextsleep > self.next_heartbeat: | ||
481 | # Shorten timeout so that we we wake up in time for | ||
482 | # the heartbeat. | ||
483 | nextsleep = self.next_heartbeat - now | ||
484 | |||
485 | if nextsleep is not None: | ||
486 | select.select(fds,[],[],nextsleep)[0] | ||
331 | 487 | ||
332 | def idle_commands(self, delay, fds=None): | 488 | def idle_commands(self, delay, fds=None): |
333 | nextsleep = delay | 489 | nextsleep = delay |
334 | if not fds: | 490 | if not fds: |
335 | fds = [] | 491 | fds = [] |
336 | 492 | ||
337 | for function, data in list(self._idlefuns.items()): | 493 | if not self.idle: |
338 | try: | 494 | self.idle = threading.Thread(target=self.idle_thread) |
339 | retval = function(self, data, False) | 495 | self.idle.start() |
340 | if retval is False: | 496 | elif self.idle and not self.idle.is_alive(): |
341 | del self._idlefuns[function] | 497 | serverlog("Idle thread terminated, main thread exiting too") |
342 | nextsleep = None | 498 | bb.error("Idle thread terminated, main thread exiting too") |
343 | elif retval is True: | 499 | self.quit = True |
344 | nextsleep = None | ||
345 | elif isinstance(retval, float) and nextsleep: | ||
346 | if (retval < nextsleep): | ||
347 | nextsleep = retval | ||
348 | elif nextsleep is None: | ||
349 | continue | ||
350 | else: | ||
351 | fds = fds + retval | ||
352 | except SystemExit: | ||
353 | raise | ||
354 | except Exception as exc: | ||
355 | if not isinstance(exc, bb.BBHandledException): | ||
356 | logger.exception('Running idle function') | ||
357 | del self._idlefuns[function] | ||
358 | self.quit = True | ||
359 | |||
360 | # Create new heartbeat event? | ||
361 | now = time.time() | ||
362 | if now >= self.next_heartbeat: | ||
363 | # We might have missed heartbeats. Just trigger once in | ||
364 | # that case and continue after the usual delay. | ||
365 | self.next_heartbeat += self.heartbeat_seconds | ||
366 | if self.next_heartbeat <= now: | ||
367 | self.next_heartbeat = now + self.heartbeat_seconds | ||
368 | if hasattr(self.cooker, "data"): | ||
369 | heartbeat = bb.event.HeartbeatEvent(now) | ||
370 | bb.event.fire(heartbeat, self.cooker.data) | ||
371 | if nextsleep and now + nextsleep > self.next_heartbeat: | ||
372 | # Shorten timeout so that we we wake up in time for | ||
373 | # the heartbeat. | ||
374 | nextsleep = self.next_heartbeat - now | ||
375 | 500 | ||
376 | if nextsleep is not None: | 501 | if nextsleep is not None: |
377 | if self.xmlrpc: | 502 | if self.xmlrpc: |
@@ -391,12 +516,18 @@ class ServerCommunicator(): | |||
391 | self.recv = recv | 516 | self.recv = recv |
392 | 517 | ||
393 | def runCommand(self, command): | 518 | def runCommand(self, command): |
394 | self.connection.send(command) | 519 | try: |
520 | self.connection.send(command) | ||
521 | except BrokenPipeError as e: | ||
522 | raise BrokenPipeError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e | ||
395 | if not self.recv.poll(30): | 523 | if not self.recv.poll(30): |
396 | logger.info("No reply from server in 30s") | 524 | logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime())) |
397 | if not self.recv.poll(30): | 525 | if not self.recv.poll(30): |
398 | raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)") | 526 | raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime()) |
399 | ret, exc = self.recv.get() | 527 | try: |
528 | ret, exc = self.recv.get() | ||
529 | except EOFError as e: | ||
530 | raise EOFError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e | ||
400 | # Should probably turn all exceptions in exc back into exceptions? | 531 | # Should probably turn all exceptions in exc back into exceptions? |
401 | # For now, at least handle BBHandledException | 532 | # For now, at least handle BBHandledException |
402 | if exc and ("BBHandledException" in exc or "SystemExit" in exc): | 533 | if exc and ("BBHandledException" in exc or "SystemExit" in exc): |
@@ -429,6 +560,7 @@ class BitBakeProcessServerConnection(object): | |||
429 | self.socket_connection = sock | 560 | self.socket_connection = sock |
430 | 561 | ||
431 | def terminate(self): | 562 | def terminate(self): |
563 | self.events.close() | ||
432 | self.socket_connection.close() | 564 | self.socket_connection.close() |
433 | self.connection.connection.close() | 565 | self.connection.connection.close() |
434 | self.connection.recv.close() | 566 | self.connection.recv.close() |
@@ -439,13 +571,14 @@ start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f' | |||
439 | 571 | ||
440 | class BitBakeServer(object): | 572 | class BitBakeServer(object): |
441 | 573 | ||
442 | def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface): | 574 | def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface, profile): |
443 | 575 | ||
444 | self.server_timeout = server_timeout | 576 | self.server_timeout = server_timeout |
445 | self.xmlrpcinterface = xmlrpcinterface | 577 | self.xmlrpcinterface = xmlrpcinterface |
446 | self.featureset = featureset | 578 | self.featureset = featureset |
447 | self.sockname = sockname | 579 | self.sockname = sockname |
448 | self.bitbake_lock = lock | 580 | self.bitbake_lock = lock |
581 | self.profile = profile | ||
449 | self.readypipe, self.readypipein = os.pipe() | 582 | self.readypipe, self.readypipein = os.pipe() |
450 | 583 | ||
451 | # Place the log in the builddirectory alongside the lock file | 584 | # Place the log in the builddirectory alongside the lock file |
@@ -466,7 +599,7 @@ class BitBakeServer(object): | |||
466 | try: | 599 | try: |
467 | r = ready.get() | 600 | r = ready.get() |
468 | except EOFError: | 601 | except EOFError: |
469 | # Trap the child exitting/closing the pipe and error out | 602 | # Trap the child exiting/closing the pipe and error out |
470 | r = None | 603 | r = None |
471 | if not r or r[0] != "r": | 604 | if not r or r[0] != "r": |
472 | ready.close() | 605 | ready.close() |
@@ -509,9 +642,9 @@ class BitBakeServer(object): | |||
509 | os.set_inheritable(self.bitbake_lock.fileno(), True) | 642 | os.set_inheritable(self.bitbake_lock.fileno(), True) |
510 | os.set_inheritable(self.readypipein, True) | 643 | os.set_inheritable(self.readypipein, True) |
511 | serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") | 644 | serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") |
512 | os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) | 645 | os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) |
513 | 646 | ||
514 | def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface): | 647 | def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile): |
515 | 648 | ||
516 | import bb.cookerdata | 649 | import bb.cookerdata |
517 | import bb.cooker | 650 | import bb.cooker |
@@ -523,6 +656,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc | |||
523 | 656 | ||
524 | # Create server control socket | 657 | # Create server control socket |
525 | if os.path.exists(sockname): | 658 | if os.path.exists(sockname): |
659 | serverlog("WARNING: removing existing socket file '%s'" % sockname) | ||
526 | os.unlink(sockname) | 660 | os.unlink(sockname) |
527 | 661 | ||
528 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) | 662 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) |
@@ -539,7 +673,8 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc | |||
539 | writer = ConnectionWriter(readypipeinfd) | 673 | writer = ConnectionWriter(readypipeinfd) |
540 | try: | 674 | try: |
541 | featureset = [] | 675 | featureset = [] |
542 | cooker = bb.cooker.BBCooker(featureset, server.register_idle_function) | 676 | cooker = bb.cooker.BBCooker(featureset, server) |
677 | cooker.configuration.profile = profile | ||
543 | except bb.BBHandledException: | 678 | except bb.BBHandledException: |
544 | return None | 679 | return None |
545 | writer.send("r") | 680 | writer.send("r") |
@@ -549,7 +684,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc | |||
549 | 684 | ||
550 | server.run() | 685 | server.run() |
551 | finally: | 686 | finally: |
552 | # Flush any ,essages/errors to the logfile before exit | 687 | # Flush any messages/errors to the logfile before exit |
553 | sys.stdout.flush() | 688 | sys.stdout.flush() |
554 | sys.stderr.flush() | 689 | sys.stderr.flush() |
555 | 690 | ||
@@ -654,23 +789,18 @@ class BBUIEventQueue: | |||
654 | self.reader = ConnectionReader(readfd) | 789 | self.reader = ConnectionReader(readfd) |
655 | 790 | ||
656 | self.t = threading.Thread() | 791 | self.t = threading.Thread() |
657 | self.t.setDaemon(True) | ||
658 | self.t.run = self.startCallbackHandler | 792 | self.t.run = self.startCallbackHandler |
659 | self.t.start() | 793 | self.t.start() |
660 | 794 | ||
661 | def getEvent(self): | 795 | def getEvent(self): |
662 | self.eventQueueLock.acquire() | 796 | with bb.utils.lock_timeout(self.eventQueueLock): |
663 | 797 | if len(self.eventQueue) == 0: | |
664 | if len(self.eventQueue) == 0: | 798 | return None |
665 | self.eventQueueLock.release() | ||
666 | return None | ||
667 | |||
668 | item = self.eventQueue.pop(0) | ||
669 | 799 | ||
670 | if len(self.eventQueue) == 0: | 800 | item = self.eventQueue.pop(0) |
671 | self.eventQueueNotify.clear() | 801 | if len(self.eventQueue) == 0: |
802 | self.eventQueueNotify.clear() | ||
672 | 803 | ||
673 | self.eventQueueLock.release() | ||
674 | return item | 804 | return item |
675 | 805 | ||
676 | def waitEvent(self, delay): | 806 | def waitEvent(self, delay): |
@@ -678,10 +808,9 @@ class BBUIEventQueue: | |||
678 | return self.getEvent() | 808 | return self.getEvent() |
679 | 809 | ||
680 | def queue_event(self, event): | 810 | def queue_event(self, event): |
681 | self.eventQueueLock.acquire() | 811 | with bb.utils.lock_timeout(self.eventQueueLock): |
682 | self.eventQueue.append(event) | 812 | self.eventQueue.append(event) |
683 | self.eventQueueNotify.set() | 813 | self.eventQueueNotify.set() |
684 | self.eventQueueLock.release() | ||
685 | 814 | ||
686 | def send_event(self, event): | 815 | def send_event(self, event): |
687 | self.queue_event(pickle.loads(event)) | 816 | self.queue_event(pickle.loads(event)) |
@@ -690,13 +819,17 @@ class BBUIEventQueue: | |||
690 | bb.utils.set_process_name("UIEventQueue") | 819 | bb.utils.set_process_name("UIEventQueue") |
691 | while True: | 820 | while True: |
692 | try: | 821 | try: |
693 | self.reader.wait() | 822 | ready = self.reader.wait(0.25) |
694 | event = self.reader.get() | 823 | if ready: |
695 | self.queue_event(event) | 824 | event = self.reader.get() |
696 | except EOFError: | 825 | self.queue_event(event) |
826 | except (EOFError, OSError, TypeError): | ||
697 | # Easiest way to exit is to close the file descriptor to cause an exit | 827 | # Easiest way to exit is to close the file descriptor to cause an exit |
698 | break | 828 | break |
829 | |||
830 | def close(self): | ||
699 | self.reader.close() | 831 | self.reader.close() |
832 | self.t.join() | ||
700 | 833 | ||
701 | class ConnectionReader(object): | 834 | class ConnectionReader(object): |
702 | 835 | ||
@@ -711,7 +844,7 @@ class ConnectionReader(object): | |||
711 | return self.reader.poll(timeout) | 844 | return self.reader.poll(timeout) |
712 | 845 | ||
713 | def get(self): | 846 | def get(self): |
714 | with self.rlock: | 847 | with bb.utils.lock_timeout(self.rlock): |
715 | res = self.reader.recv_bytes() | 848 | res = self.reader.recv_bytes() |
716 | return multiprocessing.reduction.ForkingPickler.loads(res) | 849 | return multiprocessing.reduction.ForkingPickler.loads(res) |
717 | 850 | ||
@@ -730,10 +863,31 @@ class ConnectionWriter(object): | |||
730 | # Why bb.event needs this I have no idea | 863 | # Why bb.event needs this I have no idea |
731 | self.event = self | 864 | self.event = self |
732 | 865 | ||
866 | def _send(self, obj): | ||
867 | gc.disable() | ||
868 | with bb.utils.lock_timeout(self.wlock): | ||
869 | self.writer.send_bytes(obj) | ||
870 | gc.enable() | ||
871 | |||
733 | def send(self, obj): | 872 | def send(self, obj): |
734 | obj = multiprocessing.reduction.ForkingPickler.dumps(obj) | 873 | obj = multiprocessing.reduction.ForkingPickler.dumps(obj) |
735 | with self.wlock: | 874 | # See notes/code in CookerParser |
736 | self.writer.send_bytes(obj) | 875 | # We must not terminate holding this lock else processes will hang. |
876 | # For SIGTERM, raising afterwards avoids this. | ||
877 | # For SIGINT, we don't want to have written partial data to the pipe. | ||
878 | # pthread_sigmask block/unblock would be nice but doesn't work, https://bugs.python.org/issue47139 | ||
879 | process = multiprocessing.current_process() | ||
880 | if process and hasattr(process, "queue_signals"): | ||
881 | with bb.utils.lock_timeout(process.signal_threadlock): | ||
882 | process.queue_signals = True | ||
883 | self._send(obj) | ||
884 | process.queue_signals = False | ||
885 | |||
886 | while len(process.signal_received) > 0: | ||
887 | sig = process.signal_received.pop() | ||
888 | process.handle_sig(sig, None) | ||
889 | else: | ||
890 | self._send(obj) | ||
737 | 891 | ||
738 | def fileno(self): | 892 | def fileno(self): |
739 | return self.writer.fileno() | 893 | return self.writer.fileno() |
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py index 2fa71be667..04b0b17db1 100644 --- a/bitbake/lib/bb/server/xmlrpcserver.py +++ b/bitbake/lib/bb/server/xmlrpcserver.py | |||
@@ -11,6 +11,7 @@ import hashlib | |||
11 | import time | 11 | import time |
12 | import inspect | 12 | import inspect |
13 | from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | 13 | from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler |
14 | import bb.server.xmlrpcclient | ||
14 | 15 | ||
15 | import bb | 16 | import bb |
16 | 17 | ||
@@ -117,7 +118,7 @@ class BitBakeXMLRPCServerCommands(): | |||
117 | """ | 118 | """ |
118 | Run a cooker command on the server | 119 | Run a cooker command on the server |
119 | """ | 120 | """ |
120 | return self.server.cooker.command.runCommand(command, self.server.readonly) | 121 | return self.server.cooker.command.runCommand(command, self.server.parent, self.server.readonly) |
121 | 122 | ||
122 | def getEventHandle(self): | 123 | def getEventHandle(self): |
123 | return self.event_handle | 124 | return self.event_handle |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 0d88c6ec68..8ab08ec961 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -11,6 +13,10 @@ import pickle | |||
11 | import bb.data | 13 | import bb.data |
12 | import difflib | 14 | import difflib |
13 | import simplediff | 15 | import simplediff |
16 | import json | ||
17 | import types | ||
18 | from contextlib import contextmanager | ||
19 | import bb.compress.zstd | ||
14 | from bb.checksum import FileChecksumCache | 20 | from bb.checksum import FileChecksumCache |
15 | from bb import runqueue | 21 | from bb import runqueue |
16 | import hashserv | 22 | import hashserv |
@@ -19,6 +25,35 @@ import hashserv.client | |||
19 | logger = logging.getLogger('BitBake.SigGen') | 25 | logger = logging.getLogger('BitBake.SigGen') |
20 | hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') | 26 | hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') |
21 | 27 | ||
28 | #find_siginfo and find_siginfo_version are set by the metadata siggen | ||
29 | # The minimum version of the find_siginfo function we need | ||
30 | find_siginfo_minversion = 2 | ||
31 | |||
32 | HASHSERV_ENVVARS = [ | ||
33 | "SSL_CERT_DIR", | ||
34 | "SSL_CERT_FILE", | ||
35 | "NO_PROXY", | ||
36 | "HTTPS_PROXY", | ||
37 | "HTTP_PROXY" | ||
38 | ] | ||
39 | |||
40 | def check_siggen_version(siggen): | ||
41 | if not hasattr(siggen, "find_siginfo_version"): | ||
42 | bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)") | ||
43 | if siggen.find_siginfo_version < siggen.find_siginfo_minversion: | ||
44 | bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion)) | ||
45 | |||
46 | class SetEncoder(json.JSONEncoder): | ||
47 | def default(self, obj): | ||
48 | if isinstance(obj, set) or isinstance(obj, frozenset): | ||
49 | return dict(_set_object=list(sorted(obj))) | ||
50 | return json.JSONEncoder.default(self, obj) | ||
51 | |||
52 | def SetDecoder(dct): | ||
53 | if '_set_object' in dct: | ||
54 | return frozenset(dct['_set_object']) | ||
55 | return dct | ||
56 | |||
22 | def init(d): | 57 | def init(d): |
23 | siggens = [obj for obj in globals().values() | 58 | siggens = [obj for obj in globals().values() |
24 | if type(obj) is type and issubclass(obj, SignatureGenerator)] | 59 | if type(obj) is type and issubclass(obj, SignatureGenerator)] |
@@ -27,7 +62,6 @@ def init(d): | |||
27 | for sg in siggens: | 62 | for sg in siggens: |
28 | if desired == sg.name: | 63 | if desired == sg.name: |
29 | return sg(d) | 64 | return sg(d) |
30 | break | ||
31 | else: | 65 | else: |
32 | logger.error("Invalid signature generator '%s', using default 'noop'\n" | 66 | logger.error("Invalid signature generator '%s', using default 'noop'\n" |
33 | "Available generators: %s", desired, | 67 | "Available generators: %s", desired, |
@@ -39,11 +73,6 @@ class SignatureGenerator(object): | |||
39 | """ | 73 | """ |
40 | name = "noop" | 74 | name = "noop" |
41 | 75 | ||
42 | # If the derived class supports multiconfig datacaches, set this to True | ||
43 | # The default is False for backward compatibility with derived signature | ||
44 | # generators that do not understand multiconfig caches | ||
45 | supports_multiconfig_datacaches = False | ||
46 | |||
47 | def __init__(self, data): | 76 | def __init__(self, data): |
48 | self.basehash = {} | 77 | self.basehash = {} |
49 | self.taskhash = {} | 78 | self.taskhash = {} |
@@ -61,9 +90,39 @@ class SignatureGenerator(object): | |||
61 | def postparsing_clean_cache(self): | 90 | def postparsing_clean_cache(self): |
62 | return | 91 | return |
63 | 92 | ||
93 | def setup_datacache(self, datacaches): | ||
94 | self.datacaches = datacaches | ||
95 | |||
96 | def setup_datacache_from_datastore(self, mcfn, d): | ||
97 | # In task context we have no cache so setup internal data structures | ||
98 | # from the fully parsed data store provided | ||
99 | |||
100 | mc = d.getVar("__BBMULTICONFIG", False) or "" | ||
101 | tasks = d.getVar('__BBTASKS', False) | ||
102 | |||
103 | self.datacaches = {} | ||
104 | self.datacaches[mc] = types.SimpleNamespace() | ||
105 | setattr(self.datacaches[mc], "stamp", {}) | ||
106 | self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP') | ||
107 | setattr(self.datacaches[mc], "stamp_extrainfo", {}) | ||
108 | self.datacaches[mc].stamp_extrainfo[mcfn] = {} | ||
109 | for t in tasks: | ||
110 | flag = d.getVarFlag(t, "stamp-extra-info") | ||
111 | if flag: | ||
112 | self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag | ||
113 | |||
114 | def get_cached_unihash(self, tid): | ||
115 | return None | ||
116 | |||
64 | def get_unihash(self, tid): | 117 | def get_unihash(self, tid): |
118 | unihash = self.get_cached_unihash(tid) | ||
119 | if unihash: | ||
120 | return unihash | ||
65 | return self.taskhash[tid] | 121 | return self.taskhash[tid] |
66 | 122 | ||
123 | def get_unihashes(self, tids): | ||
124 | return {tid: self.get_unihash(tid) for tid in tids} | ||
125 | |||
67 | def prep_taskhash(self, tid, deps, dataCaches): | 126 | def prep_taskhash(self, tid, deps, dataCaches): |
68 | return | 127 | return |
69 | 128 | ||
@@ -75,17 +134,51 @@ class SignatureGenerator(object): | |||
75 | """Write/update the file checksum cache onto disk""" | 134 | """Write/update the file checksum cache onto disk""" |
76 | return | 135 | return |
77 | 136 | ||
137 | def stampfile_base(self, mcfn): | ||
138 | mc = bb.runqueue.mc_from_tid(mcfn) | ||
139 | return self.datacaches[mc].stamp[mcfn] | ||
140 | |||
141 | def stampfile_mcfn(self, taskname, mcfn, extrainfo=True): | ||
142 | mc = bb.runqueue.mc_from_tid(mcfn) | ||
143 | stamp = self.datacaches[mc].stamp[mcfn] | ||
144 | if not stamp: | ||
145 | return | ||
146 | |||
147 | stamp_extrainfo = "" | ||
148 | if extrainfo: | ||
149 | taskflagname = taskname | ||
150 | if taskname.endswith("_setscene"): | ||
151 | taskflagname = taskname.replace("_setscene", "") | ||
152 | stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or "" | ||
153 | |||
154 | return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo) | ||
155 | |||
78 | def stampfile(self, stampbase, file_name, taskname, extrainfo): | 156 | def stampfile(self, stampbase, file_name, taskname, extrainfo): |
79 | return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') | 157 | return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') |
80 | 158 | ||
159 | def stampcleanmask_mcfn(self, taskname, mcfn): | ||
160 | mc = bb.runqueue.mc_from_tid(mcfn) | ||
161 | stamp = self.datacaches[mc].stamp[mcfn] | ||
162 | if not stamp: | ||
163 | return [] | ||
164 | |||
165 | taskflagname = taskname | ||
166 | if taskname.endswith("_setscene"): | ||
167 | taskflagname = taskname.replace("_setscene", "") | ||
168 | stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or "" | ||
169 | |||
170 | return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo) | ||
171 | |||
81 | def stampcleanmask(self, stampbase, file_name, taskname, extrainfo): | 172 | def stampcleanmask(self, stampbase, file_name, taskname, extrainfo): |
82 | return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') | 173 | return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') |
83 | 174 | ||
84 | def dump_sigtask(self, fn, task, stampbase, runtime): | 175 | def dump_sigtask(self, mcfn, task, stampbase, runtime): |
85 | return | 176 | return |
86 | 177 | ||
87 | def invalidate_task(self, task, d, fn): | 178 | def invalidate_task(self, task, mcfn): |
88 | bb.build.del_stamp(task, d, fn) | 179 | mc = bb.runqueue.mc_from_tid(mcfn) |
180 | stamp = self.datacaches[mc].stamp[mcfn] | ||
181 | bb.utils.remove(stamp) | ||
89 | 182 | ||
90 | def dump_sigs(self, dataCache, options): | 183 | def dump_sigs(self, dataCache, options): |
91 | return | 184 | return |
@@ -108,40 +201,19 @@ class SignatureGenerator(object): | |||
108 | def save_unitaskhashes(self): | 201 | def save_unitaskhashes(self): |
109 | return | 202 | return |
110 | 203 | ||
111 | def set_setscene_tasks(self, setscene_tasks): | 204 | def copy_unitaskhashes(self, targetdir): |
112 | return | 205 | return |
113 | 206 | ||
114 | @classmethod | 207 | def set_setscene_tasks(self, setscene_tasks): |
115 | def get_data_caches(cls, dataCaches, mc): | 208 | return |
116 | """ | ||
117 | This function returns the datacaches that should be passed to signature | ||
118 | generator functions. If the signature generator supports multiconfig | ||
119 | caches, the entire dictionary of data caches is sent, otherwise a | ||
120 | special proxy is sent that support both index access to all | ||
121 | multiconfigs, and also direct access for the default multiconfig. | ||
122 | |||
123 | The proxy class allows code in this class itself to always use | ||
124 | multiconfig aware code (to ease maintenance), but derived classes that | ||
125 | are unaware of multiconfig data caches can still access the default | ||
126 | multiconfig as expected. | ||
127 | |||
128 | Do not override this function in derived classes; it will be removed in | ||
129 | the future when support for multiconfig data caches is mandatory | ||
130 | """ | ||
131 | class DataCacheProxy(object): | ||
132 | def __init__(self): | ||
133 | pass | ||
134 | |||
135 | def __getitem__(self, key): | ||
136 | return dataCaches[key] | ||
137 | |||
138 | def __getattr__(self, name): | ||
139 | return getattr(dataCaches[mc], name) | ||
140 | 209 | ||
141 | if cls.supports_multiconfig_datacaches: | 210 | def exit(self): |
142 | return dataCaches | 211 | return |
143 | 212 | ||
144 | return DataCacheProxy() | 213 | def build_pnid(mc, pn, taskname): |
214 | if mc: | ||
215 | return "mc:" + mc + ":" + pn + ":" + taskname | ||
216 | return pn + ":" + taskname | ||
145 | 217 | ||
146 | class SignatureGeneratorBasic(SignatureGenerator): | 218 | class SignatureGeneratorBasic(SignatureGenerator): |
147 | """ | 219 | """ |
@@ -152,15 +224,12 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
152 | self.basehash = {} | 224 | self.basehash = {} |
153 | self.taskhash = {} | 225 | self.taskhash = {} |
154 | self.unihash = {} | 226 | self.unihash = {} |
155 | self.taskdeps = {} | ||
156 | self.runtaskdeps = {} | 227 | self.runtaskdeps = {} |
157 | self.file_checksum_values = {} | 228 | self.file_checksum_values = {} |
158 | self.taints = {} | 229 | self.taints = {} |
159 | self.gendeps = {} | ||
160 | self.lookupcache = {} | ||
161 | self.setscenetasks = set() | 230 | self.setscenetasks = set() |
162 | self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) | 231 | self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split()) |
163 | self.taskwhitelist = None | 232 | self.taskhash_ignore_tasks = None |
164 | self.init_rundepcheck(data) | 233 | self.init_rundepcheck(data) |
165 | checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") | 234 | checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") |
166 | if checksum_cache_file: | 235 | if checksum_cache_file: |
@@ -175,21 +244,21 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
175 | self.tidtopn = {} | 244 | self.tidtopn = {} |
176 | 245 | ||
177 | def init_rundepcheck(self, data): | 246 | def init_rundepcheck(self, data): |
178 | self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None | 247 | self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None |
179 | if self.taskwhitelist: | 248 | if self.taskhash_ignore_tasks: |
180 | self.twl = re.compile(self.taskwhitelist) | 249 | self.twl = re.compile(self.taskhash_ignore_tasks) |
181 | else: | 250 | else: |
182 | self.twl = None | 251 | self.twl = None |
183 | 252 | ||
184 | def _build_data(self, fn, d): | 253 | def _build_data(self, mcfn, d): |
185 | 254 | ||
186 | ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') | 255 | ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') |
187 | tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basewhitelist) | 256 | tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars) |
188 | 257 | ||
189 | taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn) | 258 | taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn) |
190 | 259 | ||
191 | for task in tasklist: | 260 | for task in tasklist: |
192 | tid = fn + ":" + task | 261 | tid = mcfn + ":" + task |
193 | if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: | 262 | if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: |
194 | bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) | 263 | bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) |
195 | bb.error("The following commands may help:") | 264 | bb.error("The following commands may help:") |
@@ -200,11 +269,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
200 | bb.error("%s -Sprintdiff\n" % cmd) | 269 | bb.error("%s -Sprintdiff\n" % cmd) |
201 | self.basehash[tid] = basehash[tid] | 270 | self.basehash[tid] = basehash[tid] |
202 | 271 | ||
203 | self.taskdeps[fn] = taskdeps | 272 | return taskdeps, gendeps, lookupcache |
204 | self.gendeps[fn] = gendeps | ||
205 | self.lookupcache[fn] = lookupcache | ||
206 | |||
207 | return taskdeps | ||
208 | 273 | ||
209 | def set_setscene_tasks(self, setscene_tasks): | 274 | def set_setscene_tasks(self, setscene_tasks): |
210 | self.setscenetasks = set(setscene_tasks) | 275 | self.setscenetasks = set(setscene_tasks) |
@@ -212,35 +277,47 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
212 | def finalise(self, fn, d, variant): | 277 | def finalise(self, fn, d, variant): |
213 | 278 | ||
214 | mc = d.getVar("__BBMULTICONFIG", False) or "" | 279 | mc = d.getVar("__BBMULTICONFIG", False) or "" |
280 | mcfn = fn | ||
215 | if variant or mc: | 281 | if variant or mc: |
216 | fn = bb.cache.realfn2virtual(fn, variant, mc) | 282 | mcfn = bb.cache.realfn2virtual(fn, variant, mc) |
217 | 283 | ||
218 | try: | 284 | try: |
219 | taskdeps = self._build_data(fn, d) | 285 | taskdeps, gendeps, lookupcache = self._build_data(mcfn, d) |
220 | except bb.parse.SkipRecipe: | 286 | except bb.parse.SkipRecipe: |
221 | raise | 287 | raise |
222 | except: | 288 | except: |
223 | bb.warn("Error during finalise of %s" % fn) | 289 | bb.warn("Error during finalise of %s" % mcfn) |
224 | raise | 290 | raise |
225 | 291 | ||
292 | basehashes = {} | ||
293 | for task in taskdeps: | ||
294 | basehashes[task] = self.basehash[mcfn + ":" + task] | ||
295 | |||
296 | d.setVar("__siggen_basehashes", basehashes) | ||
297 | d.setVar("__siggen_gendeps", gendeps) | ||
298 | d.setVar("__siggen_varvals", lookupcache) | ||
299 | d.setVar("__siggen_taskdeps", taskdeps) | ||
300 | |||
226 | #Slow but can be useful for debugging mismatched basehashes | 301 | #Slow but can be useful for debugging mismatched basehashes |
227 | #for task in self.taskdeps[fn]: | 302 | #self.setup_datacache_from_datastore(mcfn, d) |
228 | # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) | 303 | #for task in taskdeps: |
304 | # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False) | ||
229 | 305 | ||
230 | for task in taskdeps: | 306 | def setup_datacache_from_datastore(self, mcfn, d): |
231 | d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task]) | 307 | super().setup_datacache_from_datastore(mcfn, d) |
232 | 308 | ||
233 | def postparsing_clean_cache(self): | 309 | mc = bb.runqueue.mc_from_tid(mcfn) |
234 | # | 310 | for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]: |
235 | # After parsing we can remove some things from memory to reduce our memory footprint | 311 | if not hasattr(self.datacaches[mc], attr): |
236 | # | 312 | setattr(self.datacaches[mc], attr, {}) |
237 | self.gendeps = {} | 313 | self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals") |
238 | self.lookupcache = {} | 314 | self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps") |
239 | self.taskdeps = {} | 315 | self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps") |
240 | 316 | ||
241 | def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): | 317 | def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): |
242 | # Return True if we should keep the dependency, False to drop it | 318 | # Return True if we should keep the dependency, False to drop it |
243 | # We only manipulate the dependencies for packages not in the whitelist | 319 | # We only manipulate the dependencies for packages not in the ignore |
320 | # list | ||
244 | if self.twl and not self.twl.search(recipename): | 321 | if self.twl and not self.twl.search(recipename): |
245 | # then process the actual dependencies | 322 | # then process the actual dependencies |
246 | if self.twl.search(depname): | 323 | if self.twl.search(depname): |
@@ -258,38 +335,37 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
258 | 335 | ||
259 | def prep_taskhash(self, tid, deps, dataCaches): | 336 | def prep_taskhash(self, tid, deps, dataCaches): |
260 | 337 | ||
261 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | 338 | (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid) |
262 | 339 | ||
263 | self.basehash[tid] = dataCaches[mc].basetaskhash[tid] | 340 | self.basehash[tid] = dataCaches[mc].basetaskhash[tid] |
264 | self.runtaskdeps[tid] = [] | 341 | self.runtaskdeps[tid] = [] |
265 | self.file_checksum_values[tid] = [] | 342 | self.file_checksum_values[tid] = [] |
266 | recipename = dataCaches[mc].pkg_fn[fn] | 343 | recipename = dataCaches[mc].pkg_fn[mcfn] |
267 | 344 | ||
268 | self.tidtopn[tid] = recipename | 345 | self.tidtopn[tid] = recipename |
346 | # save hashfn for deps into siginfo? | ||
347 | for dep in deps: | ||
348 | (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep) | ||
349 | dep_pn = dataCaches[depmc].pkg_fn[depmcfn] | ||
269 | 350 | ||
270 | for dep in sorted(deps, key=clean_basepath): | 351 | if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches): |
271 | (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep) | ||
272 | depname = dataCaches[depmc].pkg_fn[depmcfn] | ||
273 | if not self.supports_multiconfig_datacaches and mc != depmc: | ||
274 | # If the signature generator doesn't understand multiconfig | ||
275 | # data caches, any dependency not in the same multiconfig must | ||
276 | # be skipped for backward compatibility | ||
277 | continue | ||
278 | if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches): | ||
279 | continue | 352 | continue |
353 | |||
280 | if dep not in self.taskhash: | 354 | if dep not in self.taskhash: |
281 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) | 355 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) |
282 | self.runtaskdeps[tid].append(dep) | ||
283 | 356 | ||
284 | if task in dataCaches[mc].file_checksums[fn]: | 357 | dep_pnid = build_pnid(depmc, dep_pn, deptask) |
358 | self.runtaskdeps[tid].append((dep_pnid, dep)) | ||
359 | |||
360 | if task in dataCaches[mc].file_checksums[mcfn]: | ||
285 | if self.checksum_cache: | 361 | if self.checksum_cache: |
286 | checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) | 362 | checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude) |
287 | else: | 363 | else: |
288 | checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) | 364 | checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude) |
289 | for (f,cs) in checksums: | 365 | for (f,cs) in checksums: |
290 | self.file_checksum_values[tid].append((f,cs)) | 366 | self.file_checksum_values[tid].append((f,cs)) |
291 | 367 | ||
292 | taskdep = dataCaches[mc].task_deps[fn] | 368 | taskdep = dataCaches[mc].task_deps[mcfn] |
293 | if 'nostamp' in taskdep and task in taskdep['nostamp']: | 369 | if 'nostamp' in taskdep and task in taskdep['nostamp']: |
294 | # Nostamp tasks need an implicit taint so that they force any dependent tasks to run | 370 | # Nostamp tasks need an implicit taint so that they force any dependent tasks to run |
295 | if tid in self.taints and self.taints[tid].startswith("nostamp:"): | 371 | if tid in self.taints and self.taints[tid].startswith("nostamp:"): |
@@ -300,7 +376,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
300 | taint = str(uuid.uuid4()) | 376 | taint = str(uuid.uuid4()) |
301 | self.taints[tid] = "nostamp:" + taint | 377 | self.taints[tid] = "nostamp:" + taint |
302 | 378 | ||
303 | taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn]) | 379 | taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn]) |
304 | if taint: | 380 | if taint: |
305 | self.taints[tid] = taint | 381 | self.taints[tid] = taint |
306 | logger.warning("%s is tainted from a forced run" % tid) | 382 | logger.warning("%s is tainted from a forced run" % tid) |
@@ -310,22 +386,24 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
310 | def get_taskhash(self, tid, deps, dataCaches): | 386 | def get_taskhash(self, tid, deps, dataCaches): |
311 | 387 | ||
312 | data = self.basehash[tid] | 388 | data = self.basehash[tid] |
313 | for dep in self.runtaskdeps[tid]: | 389 | for dep in sorted(self.runtaskdeps[tid]): |
314 | data = data + self.get_unihash(dep) | 390 | data += self.get_unihash(dep[1]) |
315 | 391 | ||
316 | for (f, cs) in self.file_checksum_values[tid]: | 392 | for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path): |
317 | if cs: | 393 | if cs: |
318 | data = data + cs | 394 | if "/./" in f: |
395 | data += "./" + f.split("/./")[1] | ||
396 | data += cs | ||
319 | 397 | ||
320 | if tid in self.taints: | 398 | if tid in self.taints: |
321 | if self.taints[tid].startswith("nostamp:"): | 399 | if self.taints[tid].startswith("nostamp:"): |
322 | data = data + self.taints[tid][8:] | 400 | data += self.taints[tid][8:] |
323 | else: | 401 | else: |
324 | data = data + self.taints[tid] | 402 | data += self.taints[tid] |
325 | 403 | ||
326 | h = hashlib.sha256(data.encode("utf-8")).hexdigest() | 404 | h = hashlib.sha256(data.encode("utf-8")).hexdigest() |
327 | self.taskhash[tid] = h | 405 | self.taskhash[tid] = h |
328 | #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) | 406 | #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task]) |
329 | return h | 407 | return h |
330 | 408 | ||
331 | def writeout_file_checksum_cache(self): | 409 | def writeout_file_checksum_cache(self): |
@@ -340,9 +418,12 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
340 | def save_unitaskhashes(self): | 418 | def save_unitaskhashes(self): |
341 | self.unihash_cache.save(self.unitaskhashes) | 419 | self.unihash_cache.save(self.unitaskhashes) |
342 | 420 | ||
343 | def dump_sigtask(self, fn, task, stampbase, runtime): | 421 | def copy_unitaskhashes(self, targetdir): |
422 | self.unihash_cache.copyfile(targetdir) | ||
344 | 423 | ||
345 | tid = fn + ":" + task | 424 | def dump_sigtask(self, mcfn, task, stampbase, runtime): |
425 | tid = mcfn + ":" + task | ||
426 | mc = bb.runqueue.mc_from_tid(mcfn) | ||
346 | referencestamp = stampbase | 427 | referencestamp = stampbase |
347 | if isinstance(runtime, str) and runtime.startswith("customfile"): | 428 | if isinstance(runtime, str) and runtime.startswith("customfile"): |
348 | sigfile = stampbase | 429 | sigfile = stampbase |
@@ -357,29 +438,34 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
357 | 438 | ||
358 | data = {} | 439 | data = {} |
359 | data['task'] = task | 440 | data['task'] = task |
360 | data['basewhitelist'] = self.basewhitelist | 441 | data['basehash_ignore_vars'] = self.basehash_ignore_vars |
361 | data['taskwhitelist'] = self.taskwhitelist | 442 | data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks |
362 | data['taskdeps'] = self.taskdeps[fn][task] | 443 | data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task] |
363 | data['basehash'] = self.basehash[tid] | 444 | data['basehash'] = self.basehash[tid] |
364 | data['gendeps'] = {} | 445 | data['gendeps'] = {} |
365 | data['varvals'] = {} | 446 | data['varvals'] = {} |
366 | data['varvals'][task] = self.lookupcache[fn][task] | 447 | data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task] |
367 | for dep in self.taskdeps[fn][task]: | 448 | for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]: |
368 | if dep in self.basewhitelist: | 449 | if dep in self.basehash_ignore_vars: |
369 | continue | 450 | continue |
370 | data['gendeps'][dep] = self.gendeps[fn][dep] | 451 | data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep] |
371 | data['varvals'][dep] = self.lookupcache[fn][dep] | 452 | data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep] |
372 | 453 | ||
373 | if runtime and tid in self.taskhash: | 454 | if runtime and tid in self.taskhash: |
374 | data['runtaskdeps'] = self.runtaskdeps[tid] | 455 | data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])] |
375 | data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]] | 456 | data['file_checksum_values'] = [] |
457 | for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path): | ||
458 | if "/./" in f: | ||
459 | data['file_checksum_values'].append(("./" + f.split("/./")[1], cs)) | ||
460 | else: | ||
461 | data['file_checksum_values'].append((os.path.basename(f), cs)) | ||
376 | data['runtaskhashes'] = {} | 462 | data['runtaskhashes'] = {} |
377 | for dep in data['runtaskdeps']: | 463 | for dep in self.runtaskdeps[tid]: |
378 | data['runtaskhashes'][dep] = self.get_unihash(dep) | 464 | data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1]) |
379 | data['taskhash'] = self.taskhash[tid] | 465 | data['taskhash'] = self.taskhash[tid] |
380 | data['unihash'] = self.get_unihash(tid) | 466 | data['unihash'] = self.get_unihash(tid) |
381 | 467 | ||
382 | taint = self.read_taint(fn, task, referencestamp) | 468 | taint = self.read_taint(mcfn, task, referencestamp) |
383 | if taint: | 469 | if taint: |
384 | data['taint'] = taint | 470 | data['taint'] = taint |
385 | 471 | ||
@@ -396,13 +482,13 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
396 | bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) | 482 | bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) |
397 | sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) | 483 | sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) |
398 | 484 | ||
399 | fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") | 485 | fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") |
400 | try: | 486 | try: |
401 | with os.fdopen(fd, "wb") as stream: | 487 | with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f: |
402 | p = pickle.dump(data, stream, -1) | 488 | json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder) |
403 | stream.flush() | 489 | f.flush() |
404 | os.chmod(tmpfile, 0o664) | 490 | os.chmod(tmpfile, 0o664) |
405 | os.rename(tmpfile, sigfile) | 491 | bb.utils.rename(tmpfile, sigfile) |
406 | except (OSError, IOError) as err: | 492 | except (OSError, IOError) as err: |
407 | try: | 493 | try: |
408 | os.unlink(tmpfile) | 494 | os.unlink(tmpfile) |
@@ -410,18 +496,6 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
410 | pass | 496 | pass |
411 | raise err | 497 | raise err |
412 | 498 | ||
413 | def dump_sigfn(self, fn, dataCaches, options): | ||
414 | if fn in self.taskdeps: | ||
415 | for task in self.taskdeps[fn]: | ||
416 | tid = fn + ":" + task | ||
417 | mc = bb.runqueue.mc_from_tid(tid) | ||
418 | if tid not in self.taskhash: | ||
419 | continue | ||
420 | if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]: | ||
421 | bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid) | ||
422 | bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid])) | ||
423 | self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True) | ||
424 | |||
425 | class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | 499 | class SignatureGeneratorBasicHash(SignatureGeneratorBasic): |
426 | name = "basichash" | 500 | name = "basichash" |
427 | 501 | ||
@@ -432,11 +506,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | |||
432 | # If task is not in basehash, then error | 506 | # If task is not in basehash, then error |
433 | return self.basehash[tid] | 507 | return self.basehash[tid] |
434 | 508 | ||
435 | def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): | 509 | def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False): |
436 | if taskname != "do_setscene" and taskname.endswith("_setscene"): | 510 | if taskname.endswith("_setscene"): |
437 | tid = fn + ":" + taskname[:-9] | 511 | tid = mcfn + ":" + taskname[:-9] |
438 | else: | 512 | else: |
439 | tid = fn + ":" + taskname | 513 | tid = mcfn + ":" + taskname |
440 | if clean: | 514 | if clean: |
441 | h = "*" | 515 | h = "*" |
442 | else: | 516 | else: |
@@ -444,29 +518,106 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | |||
444 | 518 | ||
445 | return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') | 519 | return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') |
446 | 520 | ||
447 | def stampcleanmask(self, stampbase, fn, taskname, extrainfo): | 521 | def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo): |
448 | return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True) | 522 | return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True) |
523 | |||
524 | def invalidate_task(self, task, mcfn): | ||
525 | bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task)) | ||
449 | 526 | ||
450 | def invalidate_task(self, task, d, fn): | 527 | mc = bb.runqueue.mc_from_tid(mcfn) |
451 | bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task)) | 528 | stamp = self.datacaches[mc].stamp[mcfn] |
452 | bb.build.write_taint(task, d, fn) | 529 | |
530 | taintfn = stamp + '.' + task + '.taint' | ||
531 | |||
532 | import uuid | ||
533 | bb.utils.mkdirhier(os.path.dirname(taintfn)) | ||
534 | # The specific content of the taint file is not really important, | ||
535 | # we just need it to be random, so a random UUID is used | ||
536 | with open(taintfn, 'w') as taintf: | ||
537 | taintf.write(str(uuid.uuid4())) | ||
453 | 538 | ||
454 | class SignatureGeneratorUniHashMixIn(object): | 539 | class SignatureGeneratorUniHashMixIn(object): |
455 | def __init__(self, data): | 540 | def __init__(self, data): |
456 | self.extramethod = {} | 541 | self.extramethod = {} |
542 | # NOTE: The cache only tracks hashes that exist. Hashes that don't | ||
543 | # exist are always queries from the server since it is possible for | ||
544 | # hashes to appear over time, but much less likely for them to | ||
545 | # disappear | ||
546 | self.unihash_exists_cache = set() | ||
547 | self.username = None | ||
548 | self.password = None | ||
549 | self.env = {} | ||
550 | |||
551 | origenv = data.getVar("BB_ORIGENV") | ||
552 | for e in HASHSERV_ENVVARS: | ||
553 | value = data.getVar(e) | ||
554 | if not value and origenv: | ||
555 | value = origenv.getVar(e) | ||
556 | if value: | ||
557 | self.env[e] = value | ||
457 | super().__init__(data) | 558 | super().__init__(data) |
458 | 559 | ||
459 | def get_taskdata(self): | 560 | def get_taskdata(self): |
460 | return (self.server, self.method, self.extramethod) + super().get_taskdata() | 561 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() |
461 | 562 | ||
462 | def set_taskdata(self, data): | 563 | def set_taskdata(self, data): |
463 | self.server, self.method, self.extramethod = data[:3] | 564 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] |
464 | super().set_taskdata(data[3:]) | 565 | super().set_taskdata(data[7:]) |
566 | |||
567 | def get_hashserv_creds(self): | ||
568 | if self.username and self.password: | ||
569 | return { | ||
570 | "username": self.username, | ||
571 | "password": self.password, | ||
572 | } | ||
465 | 573 | ||
574 | return {} | ||
575 | |||
576 | @contextmanager | ||
577 | def _client_env(self): | ||
578 | orig_env = os.environ.copy() | ||
579 | try: | ||
580 | for k, v in self.env.items(): | ||
581 | os.environ[k] = v | ||
582 | |||
583 | yield | ||
584 | finally: | ||
585 | for k, v in self.env.items(): | ||
586 | if k in orig_env: | ||
587 | os.environ[k] = orig_env[k] | ||
588 | else: | ||
589 | del os.environ[k] | ||
590 | |||
591 | @contextmanager | ||
466 | def client(self): | 592 | def client(self): |
467 | if getattr(self, '_client', None) is None: | 593 | with self._client_env(): |
468 | self._client = hashserv.create_client(self.server) | 594 | if getattr(self, '_client', None) is None: |
469 | return self._client | 595 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) |
596 | yield self._client | ||
597 | |||
598 | @contextmanager | ||
599 | def client_pool(self): | ||
600 | with self._client_env(): | ||
601 | if getattr(self, '_client_pool', None) is None: | ||
602 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) | ||
603 | yield self._client_pool | ||
604 | |||
605 | def reset(self, data): | ||
606 | self.__close_clients() | ||
607 | return super().reset(data) | ||
608 | |||
609 | def exit(self): | ||
610 | self.__close_clients() | ||
611 | return super().exit() | ||
612 | |||
613 | def __close_clients(self): | ||
614 | with self._client_env(): | ||
615 | if getattr(self, '_client', None) is not None: | ||
616 | self._client.close() | ||
617 | self._client = None | ||
618 | if getattr(self, '_client_pool', None) is not None: | ||
619 | self._client_pool.close() | ||
620 | self._client_pool = None | ||
470 | 621 | ||
471 | def get_stampfile_hash(self, tid): | 622 | def get_stampfile_hash(self, tid): |
472 | if tid in self.taskhash: | 623 | if tid in self.taskhash: |
@@ -499,7 +650,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
499 | return None | 650 | return None |
500 | return unihash | 651 | return unihash |
501 | 652 | ||
502 | def get_unihash(self, tid): | 653 | def get_cached_unihash(self, tid): |
503 | taskhash = self.taskhash[tid] | 654 | taskhash = self.taskhash[tid] |
504 | 655 | ||
505 | # If its not a setscene task we can return | 656 | # If its not a setscene task we can return |
@@ -514,40 +665,105 @@ class SignatureGeneratorUniHashMixIn(object): | |||
514 | self.unihash[tid] = unihash | 665 | self.unihash[tid] = unihash |
515 | return unihash | 666 | return unihash |
516 | 667 | ||
517 | # In the absence of being able to discover a unique hash from the | 668 | return None |
518 | # server, make it be equivalent to the taskhash. The unique "hash" only | ||
519 | # really needs to be a unique string (not even necessarily a hash), but | ||
520 | # making it match the taskhash has a few advantages: | ||
521 | # | ||
522 | # 1) All of the sstate code that assumes hashes can be the same | ||
523 | # 2) It provides maximal compatibility with builders that don't use | ||
524 | # an equivalency server | ||
525 | # 3) The value is easy for multiple independent builders to derive the | ||
526 | # same unique hash from the same input. This means that if the | ||
527 | # independent builders find the same taskhash, but it isn't reported | ||
528 | # to the server, there is a better chance that they will agree on | ||
529 | # the unique hash. | ||
530 | unihash = taskhash | ||
531 | 669 | ||
532 | try: | 670 | def _get_method(self, tid): |
533 | method = self.method | 671 | method = self.method |
534 | if tid in self.extramethod: | 672 | if tid in self.extramethod: |
535 | method = method + self.extramethod[tid] | 673 | method = method + self.extramethod[tid] |
536 | data = self.client().get_unihash(method, self.taskhash[tid]) | 674 | |
537 | if data: | 675 | return method |
538 | unihash = data | 676 | |
677 | def unihashes_exist(self, query): | ||
678 | if len(query) == 0: | ||
679 | return {} | ||
680 | |||
681 | uncached_query = {} | ||
682 | result = {} | ||
683 | for key, unihash in query.items(): | ||
684 | if unihash in self.unihash_exists_cache: | ||
685 | result[key] = True | ||
686 | else: | ||
687 | uncached_query[key] = unihash | ||
688 | |||
689 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | ||
690 | # No parallelism required. Make the query serially with the single client | ||
691 | with self.client() as client: | ||
692 | uncached_result = { | ||
693 | key: client.unihash_exists(value) for key, value in uncached_query.items() | ||
694 | } | ||
695 | else: | ||
696 | with self.client_pool() as client_pool: | ||
697 | uncached_result = client_pool.unihashes_exist(uncached_query) | ||
698 | |||
699 | for key, exists in uncached_result.items(): | ||
700 | if exists: | ||
701 | self.unihash_exists_cache.add(query[key]) | ||
702 | result[key] = exists | ||
703 | |||
704 | return result | ||
705 | |||
706 | def get_unihash(self, tid): | ||
707 | return self.get_unihashes([tid])[tid] | ||
708 | |||
709 | def get_unihashes(self, tids): | ||
710 | """ | ||
711 | For a iterable of tids, returns a dictionary that maps each tid to a | ||
712 | unihash | ||
713 | """ | ||
714 | result = {} | ||
715 | queries = {} | ||
716 | query_result = {} | ||
717 | |||
718 | for tid in tids: | ||
719 | unihash = self.get_cached_unihash(tid) | ||
720 | if unihash: | ||
721 | result[tid] = unihash | ||
722 | else: | ||
723 | queries[tid] = (self._get_method(tid), self.taskhash[tid]) | ||
724 | |||
725 | if len(queries) == 0: | ||
726 | return result | ||
727 | |||
728 | if self.max_parallel <= 1 or len(queries) <= 1: | ||
729 | # No parallelism required. Make the query serially with the single client | ||
730 | with self.client() as client: | ||
731 | for tid, args in queries.items(): | ||
732 | query_result[tid] = client.get_unihash(*args) | ||
733 | else: | ||
734 | with self.client_pool() as client_pool: | ||
735 | query_result = client_pool.get_unihashes(queries) | ||
736 | |||
737 | for tid, unihash in query_result.items(): | ||
738 | # In the absence of being able to discover a unique hash from the | ||
739 | # server, make it be equivalent to the taskhash. The unique "hash" only | ||
740 | # really needs to be a unique string (not even necessarily a hash), but | ||
741 | # making it match the taskhash has a few advantages: | ||
742 | # | ||
743 | # 1) All of the sstate code that assumes hashes can be the same | ||
744 | # 2) It provides maximal compatibility with builders that don't use | ||
745 | # an equivalency server | ||
746 | # 3) The value is easy for multiple independent builders to derive the | ||
747 | # same unique hash from the same input. This means that if the | ||
748 | # independent builders find the same taskhash, but it isn't reported | ||
749 | # to the server, there is a better chance that they will agree on | ||
750 | # the unique hash. | ||
751 | taskhash = self.taskhash[tid] | ||
752 | if unihash: | ||
539 | # A unique hash equal to the taskhash is not very interesting, | 753 | # A unique hash equal to the taskhash is not very interesting, |
540 | # so it is reported it at debug level 2. If they differ, that | 754 | # so it is reported it at debug level 2. If they differ, that |
541 | # is much more interesting, so it is reported at debug level 1 | 755 | # is much more interesting, so it is reported at debug level 1 |
542 | hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) | 756 | hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) |
543 | else: | 757 | else: |
544 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) | 758 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) |
545 | except hashserv.client.HashConnectionError as e: | 759 | unihash = taskhash |
546 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | ||
547 | 760 | ||
548 | self.set_unihash(tid, unihash) | 761 | |
549 | self.unihash[tid] = unihash | 762 | self.set_unihash(tid, unihash) |
550 | return unihash | 763 | self.unihash[tid] = unihash |
764 | result[tid] = unihash | ||
765 | |||
766 | return result | ||
551 | 767 | ||
552 | def report_unihash(self, path, task, d): | 768 | def report_unihash(self, path, task, d): |
553 | import importlib | 769 | import importlib |
@@ -556,14 +772,14 @@ class SignatureGeneratorUniHashMixIn(object): | |||
556 | unihash = d.getVar('BB_UNIHASH') | 772 | unihash = d.getVar('BB_UNIHASH') |
557 | report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' | 773 | report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' |
558 | tempdir = d.getVar('T') | 774 | tempdir = d.getVar('T') |
559 | fn = d.getVar('BB_FILENAME') | 775 | mcfn = d.getVar('BB_FILENAME') |
560 | tid = fn + ':do_' + task | 776 | tid = mcfn + ':do_' + task |
561 | key = tid + ':' + taskhash | 777 | key = tid + ':' + taskhash |
562 | 778 | ||
563 | if self.setscenetasks and tid not in self.setscenetasks: | 779 | if self.setscenetasks and tid not in self.setscenetasks: |
564 | return | 780 | return |
565 | 781 | ||
566 | # This can happen if locked sigs are in action. Detect and just abort | 782 | # This can happen if locked sigs are in action. Detect and just exit |
567 | if taskhash != self.taskhash[tid]: | 783 | if taskhash != self.taskhash[tid]: |
568 | return | 784 | return |
569 | 785 | ||
@@ -611,17 +827,19 @@ class SignatureGeneratorUniHashMixIn(object): | |||
611 | if tid in self.extramethod: | 827 | if tid in self.extramethod: |
612 | method = method + self.extramethod[tid] | 828 | method = method + self.extramethod[tid] |
613 | 829 | ||
614 | data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) | 830 | with self.client() as client: |
831 | data = client.report_unihash(taskhash, method, outhash, unihash, extra_data) | ||
832 | |||
615 | new_unihash = data['unihash'] | 833 | new_unihash = data['unihash'] |
616 | 834 | ||
617 | if new_unihash != unihash: | 835 | if new_unihash != unihash: |
618 | hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) | 836 | hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) |
619 | bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) | 837 | bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d) |
620 | self.set_unihash(tid, new_unihash) | 838 | self.set_unihash(tid, new_unihash) |
621 | d.setVar('BB_UNIHASH', new_unihash) | 839 | d.setVar('BB_UNIHASH', new_unihash) |
622 | else: | 840 | else: |
623 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) | 841 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) |
624 | except hashserv.client.HashConnectionError as e: | 842 | except ConnectionError as e: |
625 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 843 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
626 | finally: | 844 | finally: |
627 | if sigfile: | 845 | if sigfile: |
@@ -642,7 +860,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
642 | if tid in self.extramethod: | 860 | if tid in self.extramethod: |
643 | method = method + self.extramethod[tid] | 861 | method = method + self.extramethod[tid] |
644 | 862 | ||
645 | data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) | 863 | with self.client() as client: |
864 | data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) | ||
865 | |||
646 | hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) | 866 | hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) |
647 | 867 | ||
648 | if data is None: | 868 | if data is None: |
@@ -661,7 +881,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
661 | # TODO: What to do here? | 881 | # TODO: What to do here? |
662 | hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) | 882 | hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) |
663 | 883 | ||
664 | except hashserv.client.HashConnectionError as e: | 884 | except ConnectionError as e: |
665 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 885 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
666 | 886 | ||
667 | return False | 887 | return False |
@@ -675,20 +895,20 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG | |||
675 | super().init_rundepcheck(data) | 895 | super().init_rundepcheck(data) |
676 | self.server = data.getVar('BB_HASHSERVE') | 896 | self.server = data.getVar('BB_HASHSERVE') |
677 | self.method = "sstate_output_hash" | 897 | self.method = "sstate_output_hash" |
898 | self.max_parallel = 1 | ||
678 | 899 | ||
679 | # | 900 | def clean_checksum_file_path(file_checksum_tuple): |
680 | # Dummy class used for bitbake-selftest | 901 | f, cs = file_checksum_tuple |
681 | # | 902 | if "/./" in f: |
682 | class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash): | 903 | return "./" + f.split("/./")[1] |
683 | name = "TestMulticonfigDepends" | 904 | return f |
684 | supports_multiconfig_datacaches = True | ||
685 | 905 | ||
686 | def dump_this_task(outfile, d): | 906 | def dump_this_task(outfile, d): |
687 | import bb.parse | 907 | import bb.parse |
688 | fn = d.getVar("BB_FILENAME") | 908 | mcfn = d.getVar("BB_FILENAME") |
689 | task = "do_" + d.getVar("BB_CURRENTTASK") | 909 | task = "do_" + d.getVar("BB_CURRENTTASK") |
690 | referencestamp = bb.build.stamp_internal(task, d, None, True) | 910 | referencestamp = bb.parse.siggen.stampfile_base(mcfn) |
691 | bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) | 911 | bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp) |
692 | 912 | ||
693 | def init_colors(enable_color): | 913 | def init_colors(enable_color): |
694 | """Initialise colour dict for passing to compare_sigfiles()""" | 914 | """Initialise colour dict for passing to compare_sigfiles()""" |
@@ -741,38 +961,15 @@ def list_inline_diff(oldlist, newlist, colors=None): | |||
741 | ret.append(item) | 961 | ret.append(item) |
742 | return '[%s]' % (', '.join(ret)) | 962 | return '[%s]' % (', '.join(ret)) |
743 | 963 | ||
744 | def clean_basepath(basepath): | 964 | # Handled renamed fields |
745 | basepath, dir, recipe_task = basepath.rsplit("/", 2) | 965 | def handle_renames(data): |
746 | cleaned = dir + '/' + recipe_task | 966 | if 'basewhitelist' in data: |
747 | 967 | data['basehash_ignore_vars'] = data['basewhitelist'] | |
748 | if basepath[0] == '/': | 968 | del data['basewhitelist'] |
749 | return cleaned | 969 | if 'taskwhitelist' in data: |
750 | 970 | data['taskhash_ignore_tasks'] = data['taskwhitelist'] | |
751 | if basepath.startswith("mc:") and basepath.count(':') >= 2: | 971 | del data['taskwhitelist'] |
752 | mc, mc_name, basepath = basepath.split(":", 2) | ||
753 | mc_suffix = ':mc:' + mc_name | ||
754 | else: | ||
755 | mc_suffix = '' | ||
756 | |||
757 | # mc stuff now removed from basepath. Whatever was next, if present will be the first | ||
758 | # suffix. ':/', recipe path start, marks the end of this. Something like | ||
759 | # 'virtual:a[:b[:c]]:/path...' (b and c being optional) | ||
760 | if basepath[0] != '/': | ||
761 | cleaned += ':' + basepath.split(':/', 1)[0] | ||
762 | |||
763 | return cleaned + mc_suffix | ||
764 | 972 | ||
765 | def clean_basepaths(a): | ||
766 | b = {} | ||
767 | for x in a: | ||
768 | b[clean_basepath(x)] = a[x] | ||
769 | return b | ||
770 | |||
771 | def clean_basepaths_list(a): | ||
772 | b = [] | ||
773 | for x in a: | ||
774 | b.append(clean_basepath(x)) | ||
775 | return b | ||
776 | 973 | ||
777 | def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | 974 | def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): |
778 | output = [] | 975 | output = [] |
@@ -794,20 +991,29 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
794 | formatparams.update(values) | 991 | formatparams.update(values) |
795 | return formatstr.format(**formatparams) | 992 | return formatstr.format(**formatparams) |
796 | 993 | ||
797 | with open(a, 'rb') as f: | 994 | try: |
798 | p1 = pickle.Unpickler(f) | 995 | with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f: |
799 | a_data = p1.load() | 996 | a_data = json.load(f, object_hook=SetDecoder) |
800 | with open(b, 'rb') as f: | 997 | except (TypeError, OSError) as err: |
801 | p2 = pickle.Unpickler(f) | 998 | bb.error("Failed to open sigdata file '%s': %s" % (a, str(err))) |
802 | b_data = p2.load() | 999 | raise err |
803 | 1000 | try: | |
804 | def dict_diff(a, b, whitelist=set()): | 1001 | with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f: |
1002 | b_data = json.load(f, object_hook=SetDecoder) | ||
1003 | except (TypeError, OSError) as err: | ||
1004 | bb.error("Failed to open sigdata file '%s': %s" % (b, str(err))) | ||
1005 | raise err | ||
1006 | |||
1007 | for data in [a_data, b_data]: | ||
1008 | handle_renames(data) | ||
1009 | |||
1010 | def dict_diff(a, b, ignored_vars=set()): | ||
805 | sa = set(a.keys()) | 1011 | sa = set(a.keys()) |
806 | sb = set(b.keys()) | 1012 | sb = set(b.keys()) |
807 | common = sa & sb | 1013 | common = sa & sb |
808 | changed = set() | 1014 | changed = set() |
809 | for i in common: | 1015 | for i in common: |
810 | if a[i] != b[i] and i not in whitelist: | 1016 | if a[i] != b[i] and i not in ignored_vars: |
811 | changed.add(i) | 1017 | changed.add(i) |
812 | added = sb - sa | 1018 | added = sb - sa |
813 | removed = sa - sb | 1019 | removed = sa - sb |
@@ -815,11 +1021,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
815 | 1021 | ||
816 | def file_checksums_diff(a, b): | 1022 | def file_checksums_diff(a, b): |
817 | from collections import Counter | 1023 | from collections import Counter |
818 | # Handle old siginfo format | 1024 | |
819 | if isinstance(a, dict): | 1025 | # Convert lists back to tuples |
820 | a = [(os.path.basename(f), cs) for f, cs in a.items()] | 1026 | a = [(f[0], f[1]) for f in a] |
821 | if isinstance(b, dict): | 1027 | b = [(f[0], f[1]) for f in b] |
822 | b = [(os.path.basename(f), cs) for f, cs in b.items()] | 1028 | |
823 | # Compare lists, ensuring we can handle duplicate filenames if they exist | 1029 | # Compare lists, ensuring we can handle duplicate filenames if they exist |
824 | removedcount = Counter(a) | 1030 | removedcount = Counter(a) |
825 | removedcount.subtract(b) | 1031 | removedcount.subtract(b) |
@@ -846,15 +1052,15 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
846 | removed = [x[0] for x in removed] | 1052 | removed = [x[0] for x in removed] |
847 | return changed, added, removed | 1053 | return changed, added, removed |
848 | 1054 | ||
849 | if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: | 1055 | if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']: |
850 | output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist'])) | 1056 | output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars'])) |
851 | if a_data['basewhitelist'] and b_data['basewhitelist']: | 1057 | if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']: |
852 | output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])) | 1058 | output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars'])) |
853 | 1059 | ||
854 | if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']: | 1060 | if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']: |
855 | output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist'])) | 1061 | output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks'])) |
856 | if a_data['taskwhitelist'] and b_data['taskwhitelist']: | 1062 | if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']: |
857 | output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])) | 1063 | output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks'])) |
858 | 1064 | ||
859 | if a_data['taskdeps'] != b_data['taskdeps']: | 1065 | if a_data['taskdeps'] != b_data['taskdeps']: |
860 | output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) | 1066 | output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) |
@@ -862,23 +1068,23 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
862 | if a_data['basehash'] != b_data['basehash'] and not collapsed: | 1068 | if a_data['basehash'] != b_data['basehash'] and not collapsed: |
863 | output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) | 1069 | output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) |
864 | 1070 | ||
865 | changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist']) | 1071 | changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars']) |
866 | if changed: | 1072 | if changed: |
867 | for dep in changed: | 1073 | for dep in sorted(changed): |
868 | output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) | 1074 | output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) |
869 | if a_data['gendeps'][dep] and b_data['gendeps'][dep]: | 1075 | if a_data['gendeps'][dep] and b_data['gendeps'][dep]: |
870 | output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) | 1076 | output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) |
871 | if added: | 1077 | if added: |
872 | for dep in added: | 1078 | for dep in sorted(added): |
873 | output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) | 1079 | output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) |
874 | if removed: | 1080 | if removed: |
875 | for dep in removed: | 1081 | for dep in sorted(removed): |
876 | output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) | 1082 | output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) |
877 | 1083 | ||
878 | 1084 | ||
879 | changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) | 1085 | changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) |
880 | if changed: | 1086 | if changed: |
881 | for dep in changed: | 1087 | for dep in sorted(changed): |
882 | oldval = a_data['varvals'][dep] | 1088 | oldval = a_data['varvals'][dep] |
883 | newval = b_data['varvals'][dep] | 1089 | newval = b_data['varvals'][dep] |
884 | if newval and oldval and ('\n' in oldval or '\n' in newval): | 1090 | if newval and oldval and ('\n' in oldval or '\n' in newval): |
@@ -902,9 +1108,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
902 | output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) | 1108 | output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) |
903 | 1109 | ||
904 | if not 'file_checksum_values' in a_data: | 1110 | if not 'file_checksum_values' in a_data: |
905 | a_data['file_checksum_values'] = {} | 1111 | a_data['file_checksum_values'] = [] |
906 | if not 'file_checksum_values' in b_data: | 1112 | if not 'file_checksum_values' in b_data: |
907 | b_data['file_checksum_values'] = {} | 1113 | b_data['file_checksum_values'] = [] |
908 | 1114 | ||
909 | changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) | 1115 | changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) |
910 | if changed: | 1116 | if changed: |
@@ -931,11 +1137,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
931 | a = a_data['runtaskdeps'][idx] | 1137 | a = a_data['runtaskdeps'][idx] |
932 | b = b_data['runtaskdeps'][idx] | 1138 | b = b_data['runtaskdeps'][idx] |
933 | if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: | 1139 | if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: |
934 | changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b])) | 1140 | changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b])) |
935 | 1141 | ||
936 | if changed: | 1142 | if changed: |
937 | clean_a = clean_basepaths_list(a_data['runtaskdeps']) | 1143 | clean_a = a_data['runtaskdeps'] |
938 | clean_b = clean_basepaths_list(b_data['runtaskdeps']) | 1144 | clean_b = b_data['runtaskdeps'] |
939 | if clean_a != clean_b: | 1145 | if clean_a != clean_b: |
940 | output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) | 1146 | output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) |
941 | else: | 1147 | else: |
@@ -948,7 +1154,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
948 | b = b_data['runtaskhashes'] | 1154 | b = b_data['runtaskhashes'] |
949 | changed, added, removed = dict_diff(a, b) | 1155 | changed, added, removed = dict_diff(a, b) |
950 | if added: | 1156 | if added: |
951 | for dep in added: | 1157 | for dep in sorted(added): |
952 | bdep_found = False | 1158 | bdep_found = False |
953 | if removed: | 1159 | if removed: |
954 | for bdep in removed: | 1160 | for bdep in removed: |
@@ -956,9 +1162,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
956 | #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) | 1162 | #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) |
957 | bdep_found = True | 1163 | bdep_found = True |
958 | if not bdep_found: | 1164 | if not bdep_found: |
959 | output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep])) | 1165 | output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep])) |
960 | if removed: | 1166 | if removed: |
961 | for dep in removed: | 1167 | for dep in sorted(removed): |
962 | adep_found = False | 1168 | adep_found = False |
963 | if added: | 1169 | if added: |
964 | for adep in added: | 1170 | for adep in added: |
@@ -966,11 +1172,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
966 | #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) | 1172 | #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) |
967 | adep_found = True | 1173 | adep_found = True |
968 | if not adep_found: | 1174 | if not adep_found: |
969 | output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep])) | 1175 | output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep])) |
970 | if changed: | 1176 | if changed: |
971 | for dep in changed: | 1177 | for dep in sorted(changed): |
972 | if not collapsed: | 1178 | if not collapsed: |
973 | output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep])) | 1179 | output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep])) |
974 | if callable(recursecb): | 1180 | if callable(recursecb): |
975 | recout = recursecb(dep, a[dep], b[dep]) | 1181 | recout = recursecb(dep, a[dep], b[dep]) |
976 | if recout: | 1182 | if recout: |
@@ -980,6 +1186,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): | |||
980 | # If a dependent hash changed, might as well print the line above and then defer to the changes in | 1186 | # If a dependent hash changed, might as well print the line above and then defer to the changes in |
981 | # that hash since in all likelyhood, they're the same changes this task also saw. | 1187 | # that hash since in all likelyhood, they're the same changes this task also saw. |
982 | output = [output[-1]] + recout | 1188 | output = [output[-1]] + recout |
1189 | break | ||
983 | 1190 | ||
984 | a_taint = a_data.get('taint', None) | 1191 | a_taint = a_data.get('taint', None) |
985 | b_taint = b_data.get('taint', None) | 1192 | b_taint = b_data.get('taint', None) |
@@ -1001,7 +1208,7 @@ def calc_basehash(sigdata): | |||
1001 | basedata = '' | 1208 | basedata = '' |
1002 | 1209 | ||
1003 | alldeps = sigdata['taskdeps'] | 1210 | alldeps = sigdata['taskdeps'] |
1004 | for dep in alldeps: | 1211 | for dep in sorted(alldeps): |
1005 | basedata = basedata + dep | 1212 | basedata = basedata + dep |
1006 | val = sigdata['varvals'][dep] | 1213 | val = sigdata['varvals'][dep] |
1007 | if val is not None: | 1214 | if val is not None: |
@@ -1017,6 +1224,8 @@ def calc_taskhash(sigdata): | |||
1017 | 1224 | ||
1018 | for c in sigdata['file_checksum_values']: | 1225 | for c in sigdata['file_checksum_values']: |
1019 | if c[1]: | 1226 | if c[1]: |
1227 | if "./" in c[0]: | ||
1228 | data = data + c[0] | ||
1020 | data = data + c[1] | 1229 | data = data + c[1] |
1021 | 1230 | ||
1022 | if 'taint' in sigdata: | 1231 | if 'taint' in sigdata: |
@@ -1031,32 +1240,37 @@ def calc_taskhash(sigdata): | |||
1031 | def dump_sigfile(a): | 1240 | def dump_sigfile(a): |
1032 | output = [] | 1241 | output = [] |
1033 | 1242 | ||
1034 | with open(a, 'rb') as f: | 1243 | try: |
1035 | p1 = pickle.Unpickler(f) | 1244 | with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f: |
1036 | a_data = p1.load() | 1245 | a_data = json.load(f, object_hook=SetDecoder) |
1246 | except (TypeError, OSError) as err: | ||
1247 | bb.error("Failed to open sigdata file '%s': %s" % (a, str(err))) | ||
1248 | raise err | ||
1249 | |||
1250 | handle_renames(a_data) | ||
1037 | 1251 | ||
1038 | output.append("basewhitelist: %s" % (a_data['basewhitelist'])) | 1252 | output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars']))) |
1039 | 1253 | ||
1040 | output.append("taskwhitelist: %s" % (a_data['taskwhitelist'])) | 1254 | output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or []))) |
1041 | 1255 | ||
1042 | output.append("Task dependencies: %s" % (sorted(a_data['taskdeps']))) | 1256 | output.append("Task dependencies: %s" % (sorted(a_data['taskdeps']))) |
1043 | 1257 | ||
1044 | output.append("basehash: %s" % (a_data['basehash'])) | 1258 | output.append("basehash: %s" % (a_data['basehash'])) |
1045 | 1259 | ||
1046 | for dep in a_data['gendeps']: | 1260 | for dep in sorted(a_data['gendeps']): |
1047 | output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])) | 1261 | output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep]))) |
1048 | 1262 | ||
1049 | for dep in a_data['varvals']: | 1263 | for dep in sorted(a_data['varvals']): |
1050 | output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep])) | 1264 | output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep])) |
1051 | 1265 | ||
1052 | if 'runtaskdeps' in a_data: | 1266 | if 'runtaskdeps' in a_data: |
1053 | output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps'])) | 1267 | output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps']))) |
1054 | 1268 | ||
1055 | if 'file_checksum_values' in a_data: | 1269 | if 'file_checksum_values' in a_data: |
1056 | output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values'])) | 1270 | output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values']))) |
1057 | 1271 | ||
1058 | if 'runtaskhashes' in a_data: | 1272 | if 'runtaskhashes' in a_data: |
1059 | for dep in a_data['runtaskhashes']: | 1273 | for dep in sorted(a_data['runtaskhashes']): |
1060 | output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])) | 1274 | output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])) |
1061 | 1275 | ||
1062 | if 'taint' in a_data: | 1276 | if 'taint' in a_data: |
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py index 47bad6d1fa..66545a65af 100644 --- a/bitbake/lib/bb/taskdata.py +++ b/bitbake/lib/bb/taskdata.py | |||
@@ -39,7 +39,7 @@ class TaskData: | |||
39 | """ | 39 | """ |
40 | BitBake Task Data implementation | 40 | BitBake Task Data implementation |
41 | """ | 41 | """ |
42 | def __init__(self, abort = True, skiplist = None, allowincomplete = False): | 42 | def __init__(self, halt = True, skiplist = None, allowincomplete = False): |
43 | self.build_targets = {} | 43 | self.build_targets = {} |
44 | self.run_targets = {} | 44 | self.run_targets = {} |
45 | 45 | ||
@@ -57,7 +57,7 @@ class TaskData: | |||
57 | self.failed_rdeps = [] | 57 | self.failed_rdeps = [] |
58 | self.failed_fns = [] | 58 | self.failed_fns = [] |
59 | 59 | ||
60 | self.abort = abort | 60 | self.halt = halt |
61 | self.allowincomplete = allowincomplete | 61 | self.allowincomplete = allowincomplete |
62 | 62 | ||
63 | self.skiplist = skiplist | 63 | self.skiplist = skiplist |
@@ -328,7 +328,7 @@ class TaskData: | |||
328 | try: | 328 | try: |
329 | self.add_provider_internal(cfgData, dataCache, item) | 329 | self.add_provider_internal(cfgData, dataCache, item) |
330 | except bb.providers.NoProvider: | 330 | except bb.providers.NoProvider: |
331 | if self.abort: | 331 | if self.halt: |
332 | raise | 332 | raise |
333 | self.remove_buildtarget(item) | 333 | self.remove_buildtarget(item) |
334 | 334 | ||
@@ -451,12 +451,12 @@ class TaskData: | |||
451 | for target in self.build_targets: | 451 | for target in self.build_targets: |
452 | if fn in self.build_targets[target]: | 452 | if fn in self.build_targets[target]: |
453 | self.build_targets[target].remove(fn) | 453 | self.build_targets[target].remove(fn) |
454 | if len(self.build_targets[target]) == 0: | 454 | if not self.build_targets[target]: |
455 | self.remove_buildtarget(target, missing_list) | 455 | self.remove_buildtarget(target, missing_list) |
456 | for target in self.run_targets: | 456 | for target in self.run_targets: |
457 | if fn in self.run_targets[target]: | 457 | if fn in self.run_targets[target]: |
458 | self.run_targets[target].remove(fn) | 458 | self.run_targets[target].remove(fn) |
459 | if len(self.run_targets[target]) == 0: | 459 | if not self.run_targets[target]: |
460 | self.remove_runtarget(target, missing_list) | 460 | self.remove_runtarget(target, missing_list) |
461 | 461 | ||
462 | def remove_buildtarget(self, target, missing_list=None): | 462 | def remove_buildtarget(self, target, missing_list=None): |
@@ -479,7 +479,7 @@ class TaskData: | |||
479 | fn = tid.rsplit(":",1)[0] | 479 | fn = tid.rsplit(":",1)[0] |
480 | self.fail_fn(fn, missing_list) | 480 | self.fail_fn(fn, missing_list) |
481 | 481 | ||
482 | if self.abort and target in self.external_targets: | 482 | if self.halt and target in self.external_targets: |
483 | logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list) | 483 | logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list) |
484 | raise bb.providers.NoProvider(target) | 484 | raise bb.providers.NoProvider(target) |
485 | 485 | ||
@@ -516,7 +516,7 @@ class TaskData: | |||
516 | self.add_provider_internal(cfgData, dataCache, target) | 516 | self.add_provider_internal(cfgData, dataCache, target) |
517 | added = added + 1 | 517 | added = added + 1 |
518 | except bb.providers.NoProvider: | 518 | except bb.providers.NoProvider: |
519 | if self.abort and target in self.external_targets and not self.allowincomplete: | 519 | if self.halt and target in self.external_targets and not self.allowincomplete: |
520 | raise | 520 | raise |
521 | if not self.allowincomplete: | 521 | if not self.allowincomplete: |
522 | self.remove_buildtarget(target) | 522 | self.remove_buildtarget(target) |
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py index 826a2d2f6d..f6585fb3aa 100644 --- a/bitbake/lib/bb/tests/codeparser.py +++ b/bitbake/lib/bb/tests/codeparser.py | |||
@@ -44,6 +44,7 @@ class VariableReferenceTest(ReferenceTest): | |||
44 | def parseExpression(self, exp): | 44 | def parseExpression(self, exp): |
45 | parsedvar = self.d.expandWithRefs(exp, None) | 45 | parsedvar = self.d.expandWithRefs(exp, None) |
46 | self.references = parsedvar.references | 46 | self.references = parsedvar.references |
47 | self.execs = parsedvar.execs | ||
47 | 48 | ||
48 | def test_simple_reference(self): | 49 | def test_simple_reference(self): |
49 | self.setEmptyVars(["FOO"]) | 50 | self.setEmptyVars(["FOO"]) |
@@ -61,6 +62,11 @@ class VariableReferenceTest(ReferenceTest): | |||
61 | self.parseExpression("${@d.getVar('BAR') + 'foo'}") | 62 | self.parseExpression("${@d.getVar('BAR') + 'foo'}") |
62 | self.assertReferences(set(["BAR"])) | 63 | self.assertReferences(set(["BAR"])) |
63 | 64 | ||
65 | def test_python_exec_reference(self): | ||
66 | self.parseExpression("${@eval('3 * 5')}") | ||
67 | self.assertReferences(set()) | ||
68 | self.assertExecs(set(["eval"])) | ||
69 | |||
64 | class ShellReferenceTest(ReferenceTest): | 70 | class ShellReferenceTest(ReferenceTest): |
65 | 71 | ||
66 | def parseExpression(self, exp): | 72 | def parseExpression(self, exp): |
@@ -111,9 +117,9 @@ ${D}${libdir}/pkgconfig/*.pc | |||
111 | self.assertExecs(set(["sed"])) | 117 | self.assertExecs(set(["sed"])) |
112 | 118 | ||
113 | def test_parameter_expansion_modifiers(self): | 119 | def test_parameter_expansion_modifiers(self): |
114 | # - and + are also valid modifiers for parameter expansion, but are | 120 | # -,+ and : are also valid modifiers for parameter expansion, but are |
115 | # valid characters in bitbake variable names, so are not included here | 121 | # valid characters in bitbake variable names, so are not included here |
116 | for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'): | 122 | for i in ('=', '?', '#', '%', '##', '%%'): |
117 | name = "foo%sbar" % i | 123 | name = "foo%sbar" % i |
118 | self.parseExpression("${%s}" % name) | 124 | self.parseExpression("${%s}" % name) |
119 | self.assertNotIn(name, self.references) | 125 | self.assertNotIn(name, self.references) |
@@ -318,7 +324,7 @@ d.getVar(a(), False) | |||
318 | "filename": "example.bb", | 324 | "filename": "example.bb", |
319 | }) | 325 | }) |
320 | 326 | ||
321 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) | 327 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) |
322 | 328 | ||
323 | self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"])) | 329 | self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"])) |
324 | 330 | ||
@@ -365,7 +371,7 @@ esac | |||
365 | self.d.setVarFlags("FOO", {"func": True}) | 371 | self.d.setVarFlags("FOO", {"func": True}) |
366 | self.setEmptyVars(execs) | 372 | self.setEmptyVars(execs) |
367 | 373 | ||
368 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) | 374 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) |
369 | 375 | ||
370 | self.assertEqual(deps, set(["somevar", "inverted"] + execs)) | 376 | self.assertEqual(deps, set(["somevar", "inverted"] + execs)) |
371 | 377 | ||
@@ -375,7 +381,7 @@ esac | |||
375 | self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") | 381 | self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") |
376 | self.d.setVarFlag("FOO", "vardeps", "oe_libinstall") | 382 | self.d.setVarFlag("FOO", "vardeps", "oe_libinstall") |
377 | 383 | ||
378 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) | 384 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) |
379 | 385 | ||
380 | self.assertEqual(deps, set(["oe_libinstall"])) | 386 | self.assertEqual(deps, set(["oe_libinstall"])) |
381 | 387 | ||
@@ -384,7 +390,7 @@ esac | |||
384 | self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") | 390 | self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") |
385 | self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}") | 391 | self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}") |
386 | 392 | ||
387 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) | 393 | deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) |
388 | 394 | ||
389 | self.assertEqual(deps, set(["oe_libinstall"])) | 395 | self.assertEqual(deps, set(["oe_libinstall"])) |
390 | 396 | ||
@@ -399,7 +405,7 @@ esac | |||
399 | # Check dependencies | 405 | # Check dependencies |
400 | self.d.setVar('ANOTHERVAR', expr) | 406 | self.d.setVar('ANOTHERVAR', expr) |
401 | self.d.setVar('TESTVAR', 'anothervalue testval testval2') | 407 | self.d.setVar('TESTVAR', 'anothervalue testval testval2') |
402 | deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d) | 408 | deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) |
403 | self.assertEqual(sorted(values.splitlines()), | 409 | self.assertEqual(sorted(values.splitlines()), |
404 | sorted([expr, | 410 | sorted([expr, |
405 | 'TESTVAR{anothervalue} = Set', | 411 | 'TESTVAR{anothervalue} = Set', |
@@ -412,11 +418,55 @@ esac | |||
412 | # Check final value | 418 | # Check final value |
413 | self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) | 419 | self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) |
414 | 420 | ||
421 | def test_contains_vardeps_excluded(self): | ||
422 | # Check the ignored_vars option to build_dependencies is handled by contains functionality | ||
423 | varval = '${TESTVAR2} ${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)}' | ||
424 | self.d.setVar('ANOTHERVAR', varval) | ||
425 | self.d.setVar('TESTVAR', 'anothervalue testval testval2') | ||
426 | self.d.setVar('TESTVAR2', 'testval3') | ||
427 | deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(["TESTVAR"]), self.d, self.d) | ||
428 | self.assertEqual(sorted(values.splitlines()), sorted([varval])) | ||
429 | self.assertEqual(deps, set(["TESTVAR2"])) | ||
430 | self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue']) | ||
431 | |||
432 | # Check the vardepsexclude flag is handled by contains functionality | ||
433 | self.d.setVarFlag('ANOTHERVAR', 'vardepsexclude', 'TESTVAR') | ||
434 | deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) | ||
435 | self.assertEqual(sorted(values.splitlines()), sorted([varval])) | ||
436 | self.assertEqual(deps, set(["TESTVAR2"])) | ||
437 | self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue']) | ||
438 | |||
439 | def test_contains_vardeps_override_operators(self): | ||
440 | # Check override operators handle dependencies correctly with the contains functionality | ||
441 | expr_plain = 'testval' | ||
442 | expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} ' | ||
443 | expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}' | ||
444 | expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}' | ||
445 | # Check dependencies | ||
446 | self.d.setVar('ANOTHERVAR', expr_plain) | ||
447 | self.d.prependVar('ANOTHERVAR', expr_prepend) | ||
448 | self.d.appendVar('ANOTHERVAR', expr_append) | ||
449 | self.d.setVar('ANOTHERVAR:remove', expr_remove) | ||
450 | self.d.setVar('TESTVAR1', 'blah') | ||
451 | self.d.setVar('TESTVAR2', 'testval2') | ||
452 | self.d.setVar('TESTVAR3', 'no-testval') | ||
453 | deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d) | ||
454 | self.assertEqual(sorted(values.splitlines()), | ||
455 | sorted([ | ||
456 | expr_prepend + expr_plain + expr_append, | ||
457 | '_remove of ' + expr_remove, | ||
458 | 'TESTVAR1{testval1} = Unset', | ||
459 | 'TESTVAR2{testval2} = Set', | ||
460 | 'TESTVAR3{no-testval} = Set', | ||
461 | ])) | ||
462 | # Check final value | ||
463 | self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2']) | ||
464 | |||
415 | #Currently no wildcard support | 465 | #Currently no wildcard support |
416 | #def test_vardeps_wildcards(self): | 466 | #def test_vardeps_wildcards(self): |
417 | # self.d.setVar("oe_libinstall", "echo test") | 467 | # self.d.setVar("oe_libinstall", "echo test") |
418 | # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") | 468 | # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") |
419 | # self.d.setVarFlag("FOO", "vardeps", "oe_*") | 469 | # self.d.setVarFlag("FOO", "vardeps", "oe_*") |
420 | # self.assertEquals(deps, set(["oe_libinstall"])) | 470 | # self.assertEqual(deps, set(["oe_libinstall"])) |
421 | 471 | ||
422 | 472 | ||
diff --git a/bitbake/lib/bb/tests/color.py b/bitbake/lib/bb/tests/color.py index bf03750c69..bb70cb393d 100644 --- a/bitbake/lib/bb/tests/color.py +++ b/bitbake/lib/bb/tests/color.py | |||
@@ -20,7 +20,7 @@ class ProgressWatcher: | |||
20 | def __init__(self): | 20 | def __init__(self): |
21 | self._reports = [] | 21 | self._reports = [] |
22 | 22 | ||
23 | def handle_event(self, event): | 23 | def handle_event(self, event, d): |
24 | self._reports.append((event.progress, event.rate)) | 24 | self._reports.append((event.progress, event.rate)) |
25 | 25 | ||
26 | def reports(self): | 26 | def reports(self): |
@@ -31,7 +31,7 @@ class ColorCodeTests(unittest.TestCase): | |||
31 | def setUp(self): | 31 | def setUp(self): |
32 | self.d = bb.data.init() | 32 | self.d = bb.data.init() |
33 | self._progress_watcher = ProgressWatcher() | 33 | self._progress_watcher = ProgressWatcher() |
34 | bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event) | 34 | bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event, data=self.d) |
35 | 35 | ||
36 | def tearDown(self): | 36 | def tearDown(self): |
37 | bb.event.remove("bb.build.TaskProgress", None) | 37 | bb.event.remove("bb.build.TaskProgress", None) |
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py new file mode 100644 index 0000000000..95af3f96d7 --- /dev/null +++ b/bitbake/lib/bb/tests/compression.py | |||
@@ -0,0 +1,100 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | from pathlib import Path | ||
8 | import bb.compress.lz4 | ||
9 | import bb.compress.zstd | ||
10 | import contextlib | ||
11 | import os | ||
12 | import shutil | ||
13 | import tempfile | ||
14 | import unittest | ||
15 | import subprocess | ||
16 | |||
17 | |||
18 | class CompressionTests(object): | ||
19 | def setUp(self): | ||
20 | self._t = tempfile.TemporaryDirectory() | ||
21 | self.tmpdir = Path(self._t.name) | ||
22 | self.addCleanup(self._t.cleanup) | ||
23 | |||
24 | def _file_helper(self, mode_suffix, data): | ||
25 | tmp_file = self.tmpdir / "compressed" | ||
26 | |||
27 | with self.do_open(tmp_file, mode="w" + mode_suffix) as f: | ||
28 | f.write(data) | ||
29 | |||
30 | with self.do_open(tmp_file, mode="r" + mode_suffix) as f: | ||
31 | read_data = f.read() | ||
32 | |||
33 | self.assertEqual(read_data, data) | ||
34 | |||
35 | def test_text_file(self): | ||
36 | self._file_helper("t", "Hello") | ||
37 | |||
38 | def test_binary_file(self): | ||
39 | self._file_helper("b", "Hello".encode("utf-8")) | ||
40 | |||
41 | def _pipe_helper(self, mode_suffix, data): | ||
42 | rfd, wfd = os.pipe() | ||
43 | with open(rfd, "rb") as r, open(wfd, "wb") as w: | ||
44 | with self.do_open(r, mode="r" + mode_suffix) as decompress: | ||
45 | with self.do_open(w, mode="w" + mode_suffix) as compress: | ||
46 | compress.write(data) | ||
47 | read_data = decompress.read() | ||
48 | |||
49 | self.assertEqual(read_data, data) | ||
50 | |||
51 | def test_text_pipe(self): | ||
52 | self._pipe_helper("t", "Hello") | ||
53 | |||
54 | def test_binary_pipe(self): | ||
55 | self._pipe_helper("b", "Hello".encode("utf-8")) | ||
56 | |||
57 | def test_bad_decompress(self): | ||
58 | tmp_file = self.tmpdir / "compressed" | ||
59 | with tmp_file.open("wb") as f: | ||
60 | f.write(b"\x00") | ||
61 | |||
62 | with self.assertRaises(OSError): | ||
63 | with self.do_open(tmp_file, mode="rb", stderr=subprocess.DEVNULL) as f: | ||
64 | data = f.read() | ||
65 | |||
66 | |||
67 | class LZ4Tests(CompressionTests, unittest.TestCase): | ||
68 | def setUp(self): | ||
69 | if shutil.which("lz4c") is None: | ||
70 | self.skipTest("'lz4c' not found") | ||
71 | super().setUp() | ||
72 | |||
73 | @contextlib.contextmanager | ||
74 | def do_open(self, *args, **kwargs): | ||
75 | with bb.compress.lz4.open(*args, **kwargs) as f: | ||
76 | yield f | ||
77 | |||
78 | |||
79 | class ZStdTests(CompressionTests, unittest.TestCase): | ||
80 | def setUp(self): | ||
81 | if shutil.which("zstd") is None: | ||
82 | self.skipTest("'zstd' not found") | ||
83 | super().setUp() | ||
84 | |||
85 | @contextlib.contextmanager | ||
86 | def do_open(self, *args, **kwargs): | ||
87 | with bb.compress.zstd.open(*args, **kwargs) as f: | ||
88 | yield f | ||
89 | |||
90 | |||
91 | class PZStdTests(CompressionTests, unittest.TestCase): | ||
92 | def setUp(self): | ||
93 | if shutil.which("pzstd") is None: | ||
94 | self.skipTest("'pzstd' not found") | ||
95 | super().setUp() | ||
96 | |||
97 | @contextlib.contextmanager | ||
98 | def do_open(self, *args, **kwargs): | ||
99 | with bb.compress.zstd.open(*args, num_threads=2, **kwargs) as f: | ||
100 | yield f | ||
diff --git a/bitbake/lib/bb/tests/cooker.py b/bitbake/lib/bb/tests/cooker.py index c82d4b7b81..9e524ae345 100644 --- a/bitbake/lib/bb/tests/cooker.py +++ b/bitbake/lib/bb/tests/cooker.py | |||
@@ -1,6 +1,8 @@ | |||
1 | # | 1 | # |
2 | # BitBake Tests for cooker.py | 2 | # BitBake Tests for cooker.py |
3 | # | 3 | # |
4 | # Copyright BitBake Contributors | ||
5 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | 6 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 7 | # |
6 | 8 | ||
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py index 1d4a64b109..cbc7c1ecd4 100644 --- a/bitbake/lib/bb/tests/data.py +++ b/bitbake/lib/bb/tests/data.py | |||
@@ -60,6 +60,15 @@ class DataExpansions(unittest.TestCase): | |||
60 | val = self.d.expand("${@5*12}") | 60 | val = self.d.expand("${@5*12}") |
61 | self.assertEqual(str(val), "60") | 61 | self.assertEqual(str(val), "60") |
62 | 62 | ||
63 | def test_python_snippet_w_dict(self): | ||
64 | val = self.d.expand("${@{ 'green': 1, 'blue': 2 }['green']}") | ||
65 | self.assertEqual(str(val), "1") | ||
66 | |||
67 | def test_python_unexpanded_multi(self): | ||
68 | self.d.setVar("bar", "${unsetvar}") | ||
69 | val = self.d.expand("${@2*2},${foo},${@d.getVar('foo') + ' ${bar}'},${foo}") | ||
70 | self.assertEqual(str(val), "4,value_of_foo,${@d.getVar('foo') + ' ${unsetvar}'},value_of_foo") | ||
71 | |||
63 | def test_expand_in_python_snippet(self): | 72 | def test_expand_in_python_snippet(self): |
64 | val = self.d.expand("${@'boo ' + '${foo}'}") | 73 | val = self.d.expand("${@'boo ' + '${foo}'}") |
65 | self.assertEqual(str(val), "boo value_of_foo") | 74 | self.assertEqual(str(val), "boo value_of_foo") |
@@ -68,6 +77,18 @@ class DataExpansions(unittest.TestCase): | |||
68 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") | 77 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") |
69 | self.assertEqual(str(val), "value_of_foo value_of_bar") | 78 | self.assertEqual(str(val), "value_of_foo value_of_bar") |
70 | 79 | ||
80 | def test_python_snippet_function_reference(self): | ||
81 | self.d.setVar("TESTVAL", "testvalue") | ||
82 | self.d.setVar("testfunc", 'd.getVar("TESTVAL")') | ||
83 | context = bb.utils.get_context() | ||
84 | context["testfunc"] = lambda d: d.getVar("TESTVAL") | ||
85 | val = self.d.expand("${@testfunc(d)}") | ||
86 | self.assertEqual(str(val), "testvalue") | ||
87 | |||
88 | def test_python_snippet_builtin_metadata(self): | ||
89 | self.d.setVar("eval", "INVALID") | ||
90 | self.d.expand("${@eval('3')}") | ||
91 | |||
71 | def test_python_unexpanded(self): | 92 | def test_python_unexpanded(self): |
72 | self.d.setVar("bar", "${unsetvar}") | 93 | self.d.setVar("bar", "${unsetvar}") |
73 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") | 94 | val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") |
@@ -245,35 +266,35 @@ class TestConcatOverride(unittest.TestCase): | |||
245 | 266 | ||
246 | def test_prepend(self): | 267 | def test_prepend(self): |
247 | self.d.setVar("TEST", "${VAL}") | 268 | self.d.setVar("TEST", "${VAL}") |
248 | self.d.setVar("TEST_prepend", "${FOO}:") | 269 | self.d.setVar("TEST:prepend", "${FOO}:") |
249 | self.assertEqual(self.d.getVar("TEST"), "foo:val") | 270 | self.assertEqual(self.d.getVar("TEST"), "foo:val") |
250 | 271 | ||
251 | def test_append(self): | 272 | def test_append(self): |
252 | self.d.setVar("TEST", "${VAL}") | 273 | self.d.setVar("TEST", "${VAL}") |
253 | self.d.setVar("TEST_append", ":${BAR}") | 274 | self.d.setVar("TEST:append", ":${BAR}") |
254 | self.assertEqual(self.d.getVar("TEST"), "val:bar") | 275 | self.assertEqual(self.d.getVar("TEST"), "val:bar") |
255 | 276 | ||
256 | def test_multiple_append(self): | 277 | def test_multiple_append(self): |
257 | self.d.setVar("TEST", "${VAL}") | 278 | self.d.setVar("TEST", "${VAL}") |
258 | self.d.setVar("TEST_prepend", "${FOO}:") | 279 | self.d.setVar("TEST:prepend", "${FOO}:") |
259 | self.d.setVar("TEST_append", ":val2") | 280 | self.d.setVar("TEST:append", ":val2") |
260 | self.d.setVar("TEST_append", ":${BAR}") | 281 | self.d.setVar("TEST:append", ":${BAR}") |
261 | self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") | 282 | self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") |
262 | 283 | ||
263 | def test_append_unset(self): | 284 | def test_append_unset(self): |
264 | self.d.setVar("TEST_prepend", "${FOO}:") | 285 | self.d.setVar("TEST:prepend", "${FOO}:") |
265 | self.d.setVar("TEST_append", ":val2") | 286 | self.d.setVar("TEST:append", ":val2") |
266 | self.d.setVar("TEST_append", ":${BAR}") | 287 | self.d.setVar("TEST:append", ":${BAR}") |
267 | self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") | 288 | self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") |
268 | 289 | ||
269 | def test_remove(self): | 290 | def test_remove(self): |
270 | self.d.setVar("TEST", "${VAL} ${BAR}") | 291 | self.d.setVar("TEST", "${VAL} ${BAR}") |
271 | self.d.setVar("TEST_remove", "val") | 292 | self.d.setVar("TEST:remove", "val") |
272 | self.assertEqual(self.d.getVar("TEST"), " bar") | 293 | self.assertEqual(self.d.getVar("TEST"), " bar") |
273 | 294 | ||
274 | def test_remove_cleared(self): | 295 | def test_remove_cleared(self): |
275 | self.d.setVar("TEST", "${VAL} ${BAR}") | 296 | self.d.setVar("TEST", "${VAL} ${BAR}") |
276 | self.d.setVar("TEST_remove", "val") | 297 | self.d.setVar("TEST:remove", "val") |
277 | self.d.setVar("TEST", "${VAL} ${BAR}") | 298 | self.d.setVar("TEST", "${VAL} ${BAR}") |
278 | self.assertEqual(self.d.getVar("TEST"), "val bar") | 299 | self.assertEqual(self.d.getVar("TEST"), "val bar") |
279 | 300 | ||
@@ -281,42 +302,42 @@ class TestConcatOverride(unittest.TestCase): | |||
281 | # (including that whitespace is preserved) | 302 | # (including that whitespace is preserved) |
282 | def test_remove_inactive_override(self): | 303 | def test_remove_inactive_override(self): |
283 | self.d.setVar("TEST", "${VAL} ${BAR} 123") | 304 | self.d.setVar("TEST", "${VAL} ${BAR} 123") |
284 | self.d.setVar("TEST_remove_inactiveoverride", "val") | 305 | self.d.setVar("TEST:remove:inactiveoverride", "val") |
285 | self.assertEqual(self.d.getVar("TEST"), "val bar 123") | 306 | self.assertEqual(self.d.getVar("TEST"), "val bar 123") |
286 | 307 | ||
287 | def test_doubleref_remove(self): | 308 | def test_doubleref_remove(self): |
288 | self.d.setVar("TEST", "${VAL} ${BAR}") | 309 | self.d.setVar("TEST", "${VAL} ${BAR}") |
289 | self.d.setVar("TEST_remove", "val") | 310 | self.d.setVar("TEST:remove", "val") |
290 | self.d.setVar("TEST_TEST", "${TEST} ${TEST}") | 311 | self.d.setVar("TEST_TEST", "${TEST} ${TEST}") |
291 | self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar") | 312 | self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar") |
292 | 313 | ||
293 | def test_empty_remove(self): | 314 | def test_empty_remove(self): |
294 | self.d.setVar("TEST", "") | 315 | self.d.setVar("TEST", "") |
295 | self.d.setVar("TEST_remove", "val") | 316 | self.d.setVar("TEST:remove", "val") |
296 | self.assertEqual(self.d.getVar("TEST"), "") | 317 | self.assertEqual(self.d.getVar("TEST"), "") |
297 | 318 | ||
298 | def test_remove_expansion(self): | 319 | def test_remove_expansion(self): |
299 | self.d.setVar("BAR", "Z") | 320 | self.d.setVar("BAR", "Z") |
300 | self.d.setVar("TEST", "${BAR}/X Y") | 321 | self.d.setVar("TEST", "${BAR}/X Y") |
301 | self.d.setVar("TEST_remove", "${BAR}/X") | 322 | self.d.setVar("TEST:remove", "${BAR}/X") |
302 | self.assertEqual(self.d.getVar("TEST"), " Y") | 323 | self.assertEqual(self.d.getVar("TEST"), " Y") |
303 | 324 | ||
304 | def test_remove_expansion_items(self): | 325 | def test_remove_expansion_items(self): |
305 | self.d.setVar("TEST", "A B C D") | 326 | self.d.setVar("TEST", "A B C D") |
306 | self.d.setVar("BAR", "B D") | 327 | self.d.setVar("BAR", "B D") |
307 | self.d.setVar("TEST_remove", "${BAR}") | 328 | self.d.setVar("TEST:remove", "${BAR}") |
308 | self.assertEqual(self.d.getVar("TEST"), "A C ") | 329 | self.assertEqual(self.d.getVar("TEST"), "A C ") |
309 | 330 | ||
310 | def test_remove_preserve_whitespace(self): | 331 | def test_remove_preserve_whitespace(self): |
311 | # When the removal isn't active, the original value should be preserved | 332 | # When the removal isn't active, the original value should be preserved |
312 | self.d.setVar("TEST", " A B") | 333 | self.d.setVar("TEST", " A B") |
313 | self.d.setVar("TEST_remove", "C") | 334 | self.d.setVar("TEST:remove", "C") |
314 | self.assertEqual(self.d.getVar("TEST"), " A B") | 335 | self.assertEqual(self.d.getVar("TEST"), " A B") |
315 | 336 | ||
316 | def test_remove_preserve_whitespace2(self): | 337 | def test_remove_preserve_whitespace2(self): |
317 | # When the removal is active preserve the whitespace | 338 | # When the removal is active preserve the whitespace |
318 | self.d.setVar("TEST", " A B") | 339 | self.d.setVar("TEST", " A B") |
319 | self.d.setVar("TEST_remove", "B") | 340 | self.d.setVar("TEST:remove", "B") |
320 | self.assertEqual(self.d.getVar("TEST"), " A ") | 341 | self.assertEqual(self.d.getVar("TEST"), " A ") |
321 | 342 | ||
322 | class TestOverrides(unittest.TestCase): | 343 | class TestOverrides(unittest.TestCase): |
@@ -329,81 +350,86 @@ class TestOverrides(unittest.TestCase): | |||
329 | self.assertEqual(self.d.getVar("TEST"), "testvalue") | 350 | self.assertEqual(self.d.getVar("TEST"), "testvalue") |
330 | 351 | ||
331 | def test_one_override(self): | 352 | def test_one_override(self): |
332 | self.d.setVar("TEST_bar", "testvalue2") | 353 | self.d.setVar("TEST:bar", "testvalue2") |
333 | self.assertEqual(self.d.getVar("TEST"), "testvalue2") | 354 | self.assertEqual(self.d.getVar("TEST"), "testvalue2") |
334 | 355 | ||
335 | def test_one_override_unset(self): | 356 | def test_one_override_unset(self): |
336 | self.d.setVar("TEST2_bar", "testvalue2") | 357 | self.d.setVar("TEST2:bar", "testvalue2") |
337 | 358 | ||
338 | self.assertEqual(self.d.getVar("TEST2"), "testvalue2") | 359 | self.assertEqual(self.d.getVar("TEST2"), "testvalue2") |
339 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) | 360 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2:bar']) |
340 | 361 | ||
341 | def test_multiple_override(self): | 362 | def test_multiple_override(self): |
342 | self.d.setVar("TEST_bar", "testvalue2") | 363 | self.d.setVar("TEST:bar", "testvalue2") |
343 | self.d.setVar("TEST_local", "testvalue3") | 364 | self.d.setVar("TEST:local", "testvalue3") |
344 | self.d.setVar("TEST_foo", "testvalue4") | 365 | self.d.setVar("TEST:foo", "testvalue4") |
345 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") | 366 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
346 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) | 367 | self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST:foo', 'OVERRIDES', 'TEST:bar', 'TEST:local']) |
347 | 368 | ||
348 | def test_multiple_combined_overrides(self): | 369 | def test_multiple_combined_overrides(self): |
349 | self.d.setVar("TEST_local_foo_bar", "testvalue3") | 370 | self.d.setVar("TEST:local:foo:bar", "testvalue3") |
350 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") | 371 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
351 | 372 | ||
352 | def test_multiple_overrides_unset(self): | 373 | def test_multiple_overrides_unset(self): |
353 | self.d.setVar("TEST2_local_foo_bar", "testvalue3") | 374 | self.d.setVar("TEST2:local:foo:bar", "testvalue3") |
354 | self.assertEqual(self.d.getVar("TEST2"), "testvalue3") | 375 | self.assertEqual(self.d.getVar("TEST2"), "testvalue3") |
355 | 376 | ||
356 | def test_keyexpansion_override(self): | 377 | def test_keyexpansion_override(self): |
357 | self.d.setVar("LOCAL", "local") | 378 | self.d.setVar("LOCAL", "local") |
358 | self.d.setVar("TEST_bar", "testvalue2") | 379 | self.d.setVar("TEST:bar", "testvalue2") |
359 | self.d.setVar("TEST_${LOCAL}", "testvalue3") | 380 | self.d.setVar("TEST:${LOCAL}", "testvalue3") |
360 | self.d.setVar("TEST_foo", "testvalue4") | 381 | self.d.setVar("TEST:foo", "testvalue4") |
361 | bb.data.expandKeys(self.d) | 382 | bb.data.expandKeys(self.d) |
362 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") | 383 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
363 | 384 | ||
364 | def test_rename_override(self): | 385 | def test_rename_override(self): |
365 | self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") | 386 | self.d.setVar("ALTERNATIVE:ncurses-tools:class-target", "a") |
366 | self.d.setVar("OVERRIDES", "class-target") | 387 | self.d.setVar("OVERRIDES", "class-target") |
367 | self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") | 388 | self.d.renameVar("ALTERNATIVE:ncurses-tools", "ALTERNATIVE:lib32-ncurses-tools") |
368 | self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a") | 389 | self.assertEqual(self.d.getVar("ALTERNATIVE:lib32-ncurses-tools"), "a") |
369 | 390 | ||
370 | def test_underscore_override(self): | 391 | def test_underscore_override(self): |
371 | self.d.setVar("TEST_bar", "testvalue2") | 392 | self.d.setVar("TEST:bar", "testvalue2") |
372 | self.d.setVar("TEST_some_val", "testvalue3") | 393 | self.d.setVar("TEST:some_val", "testvalue3") |
373 | self.d.setVar("TEST_foo", "testvalue4") | 394 | self.d.setVar("TEST:foo", "testvalue4") |
374 | self.d.setVar("OVERRIDES", "foo:bar:some_val") | 395 | self.d.setVar("OVERRIDES", "foo:bar:some_val") |
375 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") | 396 | self.assertEqual(self.d.getVar("TEST"), "testvalue3") |
376 | 397 | ||
398 | # Test an override with _<numeric> in it based on a real world OE issue | ||
399 | def test_underscore_override_2(self): | ||
400 | self.d.setVar("TARGET_ARCH", "x86_64") | ||
401 | self.d.setVar("PN", "test-${TARGET_ARCH}") | ||
402 | self.d.setVar("VERSION", "1") | ||
403 | self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2") | ||
404 | self.d.setVar("OVERRIDES", "pn-${PN}") | ||
405 | bb.data.expandKeys(self.d) | ||
406 | self.assertEqual(self.d.getVar("VERSION"), "2") | ||
407 | |||
377 | def test_remove_with_override(self): | 408 | def test_remove_with_override(self): |
378 | self.d.setVar("TEST_bar", "testvalue2") | 409 | self.d.setVar("TEST:bar", "testvalue2") |
379 | self.d.setVar("TEST_some_val", "testvalue3 testvalue5") | 410 | self.d.setVar("TEST:some_val", "testvalue3 testvalue5") |
380 | self.d.setVar("TEST_some_val_remove", "testvalue3") | 411 | self.d.setVar("TEST:some_val:remove", "testvalue3") |
381 | self.d.setVar("TEST_foo", "testvalue4") | 412 | self.d.setVar("TEST:foo", "testvalue4") |
382 | self.d.setVar("OVERRIDES", "foo:bar:some_val") | 413 | self.d.setVar("OVERRIDES", "foo:bar:some_val") |
383 | self.assertEqual(self.d.getVar("TEST"), " testvalue5") | 414 | self.assertEqual(self.d.getVar("TEST"), " testvalue5") |
384 | 415 | ||
385 | def test_append_and_override_1(self): | 416 | def test_append_and_override_1(self): |
386 | self.d.setVar("TEST_append", "testvalue2") | 417 | self.d.setVar("TEST:append", "testvalue2") |
387 | self.d.setVar("TEST_bar", "testvalue3") | 418 | self.d.setVar("TEST:bar", "testvalue3") |
388 | self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2") | 419 | self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2") |
389 | 420 | ||
390 | def test_append_and_override_2(self): | 421 | def test_append_and_override_2(self): |
391 | self.d.setVar("TEST_append_bar", "testvalue2") | 422 | self.d.setVar("TEST:append:bar", "testvalue2") |
392 | self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2") | 423 | self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2") |
393 | 424 | ||
394 | def test_append_and_override_3(self): | 425 | def test_append_and_override_3(self): |
395 | self.d.setVar("TEST_bar_append", "testvalue2") | 426 | self.d.setVar("TEST:bar:append", "testvalue2") |
396 | self.assertEqual(self.d.getVar("TEST"), "testvalue2") | 427 | self.assertEqual(self.d.getVar("TEST"), "testvalue2") |
397 | 428 | ||
398 | # Test an override with _<numeric> in it based on a real world OE issue | 429 | def test_append_and_unused_override(self): |
399 | def test_underscore_override(self): | 430 | # Had a bug where an unused override append could return "" instead of None |
400 | self.d.setVar("TARGET_ARCH", "x86_64") | 431 | self.d.setVar("BAR:append:unusedoverride", "testvalue2") |
401 | self.d.setVar("PN", "test-${TARGET_ARCH}") | 432 | self.assertEqual(self.d.getVar("BAR"), None) |
402 | self.d.setVar("VERSION", "1") | ||
403 | self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2") | ||
404 | self.d.setVar("OVERRIDES", "pn-${PN}") | ||
405 | bb.data.expandKeys(self.d) | ||
406 | self.assertEqual(self.d.getVar("VERSION"), "2") | ||
407 | 433 | ||
408 | class TestKeyExpansion(unittest.TestCase): | 434 | class TestKeyExpansion(unittest.TestCase): |
409 | def setUp(self): | 435 | def setUp(self): |
@@ -498,7 +524,7 @@ class TaskHash(unittest.TestCase): | |||
498 | d.setVar("VAR", "val") | 524 | d.setVar("VAR", "val") |
499 | # Adding an inactive removal shouldn't change the hash | 525 | # Adding an inactive removal shouldn't change the hash |
500 | d.setVar("BAR", "notbar") | 526 | d.setVar("BAR", "notbar") |
501 | d.setVar("MYCOMMAND_remove", "${BAR}") | 527 | d.setVar("MYCOMMAND:remove", "${BAR}") |
502 | nexthash = gettask_bashhash("mytask", d) | 528 | nexthash = gettask_bashhash("mytask", d) |
503 | self.assertEqual(orighash, nexthash) | 529 | self.assertEqual(orighash, nexthash) |
504 | 530 | ||
diff --git a/bitbake/lib/bb/tests/event.py b/bitbake/lib/bb/tests/event.py index 9ca7e9bc8e..ef61891d30 100644 --- a/bitbake/lib/bb/tests/event.py +++ b/bitbake/lib/bb/tests/event.py | |||
@@ -13,6 +13,7 @@ import pickle | |||
13 | import threading | 13 | import threading |
14 | import time | 14 | import time |
15 | import unittest | 15 | import unittest |
16 | import tempfile | ||
16 | from unittest.mock import Mock | 17 | from unittest.mock import Mock |
17 | from unittest.mock import call | 18 | from unittest.mock import call |
18 | 19 | ||
@@ -157,7 +158,7 @@ class EventHandlingTest(unittest.TestCase): | |||
157 | self._test_process.event_handler, | 158 | self._test_process.event_handler, |
158 | event, | 159 | event, |
159 | None) | 160 | None) |
160 | self._test_process.event_handler.assert_called_once_with(event) | 161 | self._test_process.event_handler.assert_called_once_with(event, None) |
161 | 162 | ||
162 | def test_fire_class_handlers(self): | 163 | def test_fire_class_handlers(self): |
163 | """ Test fire_class_handlers method """ | 164 | """ Test fire_class_handlers method """ |
@@ -175,10 +176,10 @@ class EventHandlingTest(unittest.TestCase): | |||
175 | bb.event.fire_class_handlers(event1, None) | 176 | bb.event.fire_class_handlers(event1, None) |
176 | bb.event.fire_class_handlers(event2, None) | 177 | bb.event.fire_class_handlers(event2, None) |
177 | bb.event.fire_class_handlers(event2, None) | 178 | bb.event.fire_class_handlers(event2, None) |
178 | expected_event_handler1 = [call(event1)] | 179 | expected_event_handler1 = [call(event1, None)] |
179 | expected_event_handler2 = [call(event1), | 180 | expected_event_handler2 = [call(event1, None), |
180 | call(event2), | 181 | call(event2, None), |
181 | call(event2)] | 182 | call(event2, None)] |
182 | self.assertEqual(self._test_process.event_handler1.call_args_list, | 183 | self.assertEqual(self._test_process.event_handler1.call_args_list, |
183 | expected_event_handler1) | 184 | expected_event_handler1) |
184 | self.assertEqual(self._test_process.event_handler2.call_args_list, | 185 | self.assertEqual(self._test_process.event_handler2.call_args_list, |
@@ -205,7 +206,7 @@ class EventHandlingTest(unittest.TestCase): | |||
205 | bb.event.fire_class_handlers(event2, None) | 206 | bb.event.fire_class_handlers(event2, None) |
206 | bb.event.fire_class_handlers(event2, None) | 207 | bb.event.fire_class_handlers(event2, None) |
207 | expected_event_handler1 = [] | 208 | expected_event_handler1 = [] |
208 | expected_event_handler2 = [call(event1)] | 209 | expected_event_handler2 = [call(event1, None)] |
209 | self.assertEqual(self._test_process.event_handler1.call_args_list, | 210 | self.assertEqual(self._test_process.event_handler1.call_args_list, |
210 | expected_event_handler1) | 211 | expected_event_handler1) |
211 | self.assertEqual(self._test_process.event_handler2.call_args_list, | 212 | self.assertEqual(self._test_process.event_handler2.call_args_list, |
@@ -223,7 +224,7 @@ class EventHandlingTest(unittest.TestCase): | |||
223 | self.assertEqual(result, bb.event.Registered) | 224 | self.assertEqual(result, bb.event.Registered) |
224 | bb.event.fire_class_handlers(event1, None) | 225 | bb.event.fire_class_handlers(event1, None) |
225 | bb.event.fire_class_handlers(event2, None) | 226 | bb.event.fire_class_handlers(event2, None) |
226 | expected = [call(event1), call(event2)] | 227 | expected = [call(event1, None), call(event2, None)] |
227 | self.assertEqual(self._test_process.event_handler1.call_args_list, | 228 | self.assertEqual(self._test_process.event_handler1.call_args_list, |
228 | expected) | 229 | expected) |
229 | 230 | ||
@@ -237,7 +238,7 @@ class EventHandlingTest(unittest.TestCase): | |||
237 | self.assertEqual(result, bb.event.Registered) | 238 | self.assertEqual(result, bb.event.Registered) |
238 | bb.event.fire_class_handlers(event1, None) | 239 | bb.event.fire_class_handlers(event1, None) |
239 | bb.event.fire_class_handlers(event2, None) | 240 | bb.event.fire_class_handlers(event2, None) |
240 | expected = [call(event1), call(event2), call(event1)] | 241 | expected = [call(event1, None), call(event2, None), call(event1, None)] |
241 | self.assertEqual(self._test_process.event_handler1.call_args_list, | 242 | self.assertEqual(self._test_process.event_handler1.call_args_list, |
242 | expected) | 243 | expected) |
243 | 244 | ||
@@ -251,7 +252,7 @@ class EventHandlingTest(unittest.TestCase): | |||
251 | self.assertEqual(result, bb.event.Registered) | 252 | self.assertEqual(result, bb.event.Registered) |
252 | bb.event.fire_class_handlers(event1, None) | 253 | bb.event.fire_class_handlers(event1, None) |
253 | bb.event.fire_class_handlers(event2, None) | 254 | bb.event.fire_class_handlers(event2, None) |
254 | expected = [call(event1), call(event2), call(event1), call(event2)] | 255 | expected = [call(event1,None), call(event2, None), call(event1, None), call(event2, None)] |
255 | self.assertEqual(self._test_process.event_handler1.call_args_list, | 256 | self.assertEqual(self._test_process.event_handler1.call_args_list, |
256 | expected) | 257 | expected) |
257 | 258 | ||
@@ -359,9 +360,10 @@ class EventHandlingTest(unittest.TestCase): | |||
359 | 360 | ||
360 | event1 = bb.event.ConfigParsed() | 361 | event1 = bb.event.ConfigParsed() |
361 | bb.event.fire(event1, None) | 362 | bb.event.fire(event1, None) |
362 | expected = [call(event1)] | 363 | expected = [call(event1, None)] |
363 | self.assertEqual(self._test_process.event_handler1.call_args_list, | 364 | self.assertEqual(self._test_process.event_handler1.call_args_list, |
364 | expected) | 365 | expected) |
366 | expected = [call(event1)] | ||
365 | self.assertEqual(self._test_ui1.event.send.call_args_list, | 367 | self.assertEqual(self._test_ui1.event.send.call_args_list, |
366 | expected) | 368 | expected) |
367 | 369 | ||
@@ -450,10 +452,9 @@ class EventHandlingTest(unittest.TestCase): | |||
450 | and disable threadlocks tests """ | 452 | and disable threadlocks tests """ |
451 | bb.event.fire(bb.event.OperationStarted(), None) | 453 | bb.event.fire(bb.event.OperationStarted(), None) |
452 | 454 | ||
453 | def test_enable_threadlock(self): | 455 | def test_event_threadlock(self): |
454 | """ Test enable_threadlock method """ | 456 | """ Test enable_threadlock method """ |
455 | self._set_threadlock_test_mockups() | 457 | self._set_threadlock_test_mockups() |
456 | bb.event.enable_threadlock() | ||
457 | self._set_and_run_threadlock_test_workers() | 458 | self._set_and_run_threadlock_test_workers() |
458 | # Calls to UI handlers should be in order as all the registered | 459 | # Calls to UI handlers should be in order as all the registered |
459 | # handlers for the event coming from the first worker should be | 460 | # handlers for the event coming from the first worker should be |
@@ -461,20 +462,6 @@ class EventHandlingTest(unittest.TestCase): | |||
461 | self.assertEqual(self._threadlock_test_calls, | 462 | self.assertEqual(self._threadlock_test_calls, |
462 | ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"]) | 463 | ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"]) |
463 | 464 | ||
464 | |||
465 | def test_disable_threadlock(self): | ||
466 | """ Test disable_threadlock method """ | ||
467 | self._set_threadlock_test_mockups() | ||
468 | bb.event.disable_threadlock() | ||
469 | self._set_and_run_threadlock_test_workers() | ||
470 | # Calls to UI handlers should be intertwined together. Thanks to the | ||
471 | # delay in the registered handlers for the event coming from the first | ||
472 | # worker, the event coming from the second worker starts being | ||
473 | # processed before finishing handling the first worker event. | ||
474 | self.assertEqual(self._threadlock_test_calls, | ||
475 | ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"]) | ||
476 | |||
477 | |||
478 | class EventClassesTest(unittest.TestCase): | 465 | class EventClassesTest(unittest.TestCase): |
479 | """ Event classes test class """ | 466 | """ Event classes test class """ |
480 | 467 | ||
@@ -482,6 +469,8 @@ class EventClassesTest(unittest.TestCase): | |||
482 | 469 | ||
483 | def setUp(self): | 470 | def setUp(self): |
484 | bb.event.worker_pid = EventClassesTest._worker_pid | 471 | bb.event.worker_pid = EventClassesTest._worker_pid |
472 | self.d = bb.data.init() | ||
473 | bb.parse.siggen = bb.siggen.init(self.d) | ||
485 | 474 | ||
486 | def test_Event(self): | 475 | def test_Event(self): |
487 | """ Test the Event base class """ | 476 | """ Test the Event base class """ |
@@ -964,3 +953,24 @@ class EventClassesTest(unittest.TestCase): | |||
964 | event = bb.event.FindSigInfoResult(result) | 953 | event = bb.event.FindSigInfoResult(result) |
965 | self.assertEqual(event.result, result) | 954 | self.assertEqual(event.result, result) |
966 | self.assertEqual(event.pid, EventClassesTest._worker_pid) | 955 | self.assertEqual(event.pid, EventClassesTest._worker_pid) |
956 | |||
957 | def test_lineno_in_eventhandler(self): | ||
958 | # The error lineno is 5, not 4 since the first line is '\n' | ||
959 | error_line = """ | ||
960 | # Comment line1 | ||
961 | # Comment line2 | ||
962 | python test_lineno_in_eventhandler() { | ||
963 | This is an error line | ||
964 | } | ||
965 | addhandler test_lineno_in_eventhandler | ||
966 | test_lineno_in_eventhandler[eventmask] = "bb.event.ConfigParsed" | ||
967 | """ | ||
968 | |||
969 | with self.assertLogs() as logs: | ||
970 | f = tempfile.NamedTemporaryFile(suffix = '.bb') | ||
971 | f.write(bytes(error_line, "utf-8")) | ||
972 | f.flush() | ||
973 | d = bb.parse.handle(f.name, self.d)[''] | ||
974 | |||
975 | output = "".join(logs.output) | ||
976 | self.assertTrue(" line 5\n" in output) | ||
diff --git a/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html new file mode 100644 index 0000000000..4a1eb4de13 --- /dev/null +++ b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html | |||
@@ -0,0 +1,59 @@ | |||
1 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"> | ||
2 | <html> | ||
3 | <head> | ||
4 | <title>Index of /debian/pool/main/m/minicom</title> | ||
5 | </head> | ||
6 | <body> | ||
7 | <h1>Index of /debian/pool/main/m/minicom</h1> | ||
8 | <table> | ||
9 | <tr><th valign="top"><img src="/icons/blank.gif" alt="[ICO]"></th><th><a href="?C=N;O=D">Name</a></th><th><a href="?C=M;O=A">Last modified</a></th><th><a href="?C=S;O=A">Size</a></th></tr> | ||
10 | <tr><th colspan="4"><hr></th></tr> | ||
11 | <tr><td valign="top"><img src="/icons/back.gif" alt="[PARENTDIR]"></td><td><a href="/debian/pool/main/m/">Parent Directory</a></td><td> </td><td align="right"> - </td></tr> | ||
12 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.debian.tar.xz">minicom_2.7-1+deb8u1.debian.tar.xz</a></td><td align="right">2017-04-24 08:22 </td><td align="right"> 14K</td></tr> | ||
13 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.dsc">minicom_2.7-1+deb8u1.dsc</a></td><td align="right">2017-04-24 08:22 </td><td align="right">1.9K</td></tr> | ||
14 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_amd64.deb">minicom_2.7-1+deb8u1_amd64.deb</a></td><td align="right">2017-04-25 21:10 </td><td align="right">257K</td></tr> | ||
15 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armel.deb">minicom_2.7-1+deb8u1_armel.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">246K</td></tr> | ||
16 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armhf.deb">minicom_2.7-1+deb8u1_armhf.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">245K</td></tr> | ||
17 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_i386.deb">minicom_2.7-1+deb8u1_i386.deb</a></td><td align="right">2017-04-25 21:41 </td><td align="right">258K</td></tr> | ||
18 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.debian.tar.xz">minicom_2.7-1.1.debian.tar.xz</a></td><td align="right">2017-04-22 09:34 </td><td align="right"> 14K</td></tr> | ||
19 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.dsc">minicom_2.7-1.1.dsc</a></td><td align="right">2017-04-22 09:34 </td><td align="right">1.9K</td></tr> | ||
20 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_amd64.deb">minicom_2.7-1.1_amd64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr> | ||
21 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_arm64.deb">minicom_2.7-1.1_arm64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">250K</td></tr> | ||
22 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armel.deb">minicom_2.7-1.1_armel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">255K</td></tr> | ||
23 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armhf.deb">minicom_2.7-1.1_armhf.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">254K</td></tr> | ||
24 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_i386.deb">minicom_2.7-1.1_i386.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">266K</td></tr> | ||
25 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips.deb">minicom_2.7-1.1_mips.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">258K</td></tr> | ||
26 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips64el.deb">minicom_2.7-1.1_mips64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr> | ||
27 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mipsel.deb">minicom_2.7-1.1_mipsel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr> | ||
28 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_ppc64el.deb">minicom_2.7-1.1_ppc64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">253K</td></tr> | ||
29 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_s390x.deb">minicom_2.7-1.1_s390x.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr> | ||
30 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_amd64.deb">minicom_2.7.1-1+b1_amd64.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">262K</td></tr> | ||
31 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_arm64.deb">minicom_2.7.1-1+b1_arm64.deb</a></td><td align="right">2018-05-06 07:58 </td><td align="right">250K</td></tr> | ||
32 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armel.deb">minicom_2.7.1-1+b1_armel.deb</a></td><td align="right">2018-05-06 08:45 </td><td align="right">253K</td></tr> | ||
33 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armhf.deb">minicom_2.7.1-1+b1_armhf.deb</a></td><td align="right">2018-05-06 10:42 </td><td align="right">253K</td></tr> | ||
34 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_i386.deb">minicom_2.7.1-1+b1_i386.deb</a></td><td align="right">2018-05-06 08:55 </td><td align="right">266K</td></tr> | ||
35 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mips.deb">minicom_2.7.1-1+b1_mips.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">258K</td></tr> | ||
36 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mipsel.deb">minicom_2.7.1-1+b1_mipsel.deb</a></td><td align="right">2018-05-06 12:13 </td><td align="right">259K</td></tr> | ||
37 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_ppc64el.deb">minicom_2.7.1-1+b1_ppc64el.deb</a></td><td align="right">2018-05-06 09:10 </td><td align="right">260K</td></tr> | ||
38 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_s390x.deb">minicom_2.7.1-1+b1_s390x.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">257K</td></tr> | ||
39 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b2_mips64el.deb">minicom_2.7.1-1+b2_mips64el.deb</a></td><td align="right">2018-05-06 09:41 </td><td align="right">260K</td></tr> | ||
40 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.debian.tar.xz">minicom_2.7.1-1.debian.tar.xz</a></td><td align="right">2017-08-13 15:40 </td><td align="right"> 14K</td></tr> | ||
41 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.dsc">minicom_2.7.1-1.dsc</a></td><td align="right">2017-08-13 15:40 </td><td align="right">1.8K</td></tr> | ||
42 | <tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.1.orig.tar.gz">minicom_2.7.1.orig.tar.gz</a></td><td align="right">2017-08-13 15:40 </td><td align="right">855K</td></tr> | ||
43 | <tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.orig.tar.gz">minicom_2.7.orig.tar.gz</a></td><td align="right">2014-01-01 09:36 </td><td align="right">843K</td></tr> | ||
44 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.debian.tar.xz">minicom_2.8-2.debian.tar.xz</a></td><td align="right">2021-06-15 03:47 </td><td align="right"> 14K</td></tr> | ||
45 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.dsc">minicom_2.8-2.dsc</a></td><td align="right">2021-06-15 03:47 </td><td align="right">1.8K</td></tr> | ||
46 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_amd64.deb">minicom_2.8-2_amd64.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">280K</td></tr> | ||
47 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_arm64.deb">minicom_2.8-2_arm64.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">275K</td></tr> | ||
48 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armel.deb">minicom_2.8-2_armel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">271K</td></tr> | ||
49 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armhf.deb">minicom_2.8-2_armhf.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">272K</td></tr> | ||
50 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_i386.deb">minicom_2.8-2_i386.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">285K</td></tr> | ||
51 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mips64el.deb">minicom_2.8-2_mips64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">277K</td></tr> | ||
52 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mipsel.deb">minicom_2.8-2_mipsel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">278K</td></tr> | ||
53 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_ppc64el.deb">minicom_2.8-2_ppc64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">286K</td></tr> | ||
54 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_s390x.deb">minicom_2.8-2_s390x.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">275K</td></tr> | ||
55 | <tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8.orig.tar.bz2">minicom_2.8.orig.tar.bz2</a></td><td align="right">2021-01-03 12:44 </td><td align="right">598K</td></tr> | ||
56 | <tr><th colspan="4"><hr></th></tr> | ||
57 | </table> | ||
58 | <address>Apache Server at ftp.debian.org Port 80</address> | ||
59 | </body></html> | ||
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html new file mode 100644 index 0000000000..4e41af6d6a --- /dev/null +++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html | |||
@@ -0,0 +1,20 @@ | |||
1 | <!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style> | ||
2 | |||
3 | <title>Index of /sources/libxml2/2.10/</title> | ||
4 | </head><body><h1>Index of /sources/libxml2/2.10/</h1> | ||
5 | <table id="list"><thead><tr><th style="width:55%"><a href="?C=N&O=A">File Name</a> <a href="?C=N&O=D"> ↓ </a></th><th style="width:20%"><a href="?C=S&O=A">File Size</a> <a href="?C=S&O=D"> ↓ </a></th><th style="width:25%"><a href="?C=M&O=A">Date</a> <a href="?C=M&O=D"> ↓ </a></th></tr></thead> | ||
6 | <tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr> | ||
7 | <tr><td class="link"><a href="LATEST-IS-2.10.3" title="LATEST-IS-2.10.3">LATEST-IS-2.10.3</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr> | ||
8 | <tr><td class="link"><a href="libxml2-2.10.0.news" title="libxml2-2.10.0.news">libxml2-2.10.0.news</a></td><td class="size">7.1 KiB</td><td class="date">2022-Aug-17 11:55</td></tr> | ||
9 | <tr><td class="link"><a href="libxml2-2.10.0.sha256sum" title="libxml2-2.10.0.sha256sum">libxml2-2.10.0.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-17 11:55</td></tr> | ||
10 | <tr><td class="link"><a href="libxml2-2.10.0.tar.xz" title="libxml2-2.10.0.tar.xz">libxml2-2.10.0.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-17 11:55</td></tr> | ||
11 | <tr><td class="link"><a href="libxml2-2.10.1.news" title="libxml2-2.10.1.news">libxml2-2.10.1.news</a></td><td class="size">455 B</td><td class="date">2022-Aug-25 11:33</td></tr> | ||
12 | <tr><td class="link"><a href="libxml2-2.10.1.sha256sum" title="libxml2-2.10.1.sha256sum">libxml2-2.10.1.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-25 11:33</td></tr> | ||
13 | <tr><td class="link"><a href="libxml2-2.10.1.tar.xz" title="libxml2-2.10.1.tar.xz">libxml2-2.10.1.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-25 11:33</td></tr> | ||
14 | <tr><td class="link"><a href="libxml2-2.10.2.news" title="libxml2-2.10.2.news">libxml2-2.10.2.news</a></td><td class="size">309 B</td><td class="date">2022-Aug-29 14:56</td></tr> | ||
15 | <tr><td class="link"><a href="libxml2-2.10.2.sha256sum" title="libxml2-2.10.2.sha256sum">libxml2-2.10.2.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-29 14:56</td></tr> | ||
16 | <tr><td class="link"><a href="libxml2-2.10.2.tar.xz" title="libxml2-2.10.2.tar.xz">libxml2-2.10.2.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Aug-29 14:56</td></tr> | ||
17 | <tr><td class="link"><a href="libxml2-2.10.3.news" title="libxml2-2.10.3.news">libxml2-2.10.3.news</a></td><td class="size">294 B</td><td class="date">2022-Oct-14 12:55</td></tr> | ||
18 | <tr><td class="link"><a href="libxml2-2.10.3.sha256sum" title="libxml2-2.10.3.sha256sum">libxml2-2.10.3.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Oct-14 12:55</td></tr> | ||
19 | <tr><td class="link"><a href="libxml2-2.10.3.tar.xz" title="libxml2-2.10.3.tar.xz">libxml2-2.10.3.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr> | ||
20 | </tbody></table></body></html> | ||
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html new file mode 100644 index 0000000000..abdfdd0fa2 --- /dev/null +++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html | |||
@@ -0,0 +1,40 @@ | |||
1 | <!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style> | ||
2 | |||
3 | <title>Index of /sources/libxml2/2.9/</title> | ||
4 | </head><body><h1>Index of /sources/libxml2/2.9/</h1> | ||
5 | <table id="list"><thead><tr><th style="width:55%"><a href="?C=N&O=A">File Name</a> <a href="?C=N&O=D"> ↓ </a></th><th style="width:20%"><a href="?C=S&O=A">File Size</a> <a href="?C=S&O=D"> ↓ </a></th><th style="width:25%"><a href="?C=M&O=A">Date</a> <a href="?C=M&O=D"> ↓ </a></th></tr></thead> | ||
6 | <tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr> | ||
7 | <tr><td class="link"><a href="LATEST-IS-2.9.14" title="LATEST-IS-2.9.14">LATEST-IS-2.9.14</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr> | ||
8 | <tr><td class="link"><a href="libxml2-2.9.0.sha256sum" title="libxml2-2.9.0.sha256sum">libxml2-2.9.0.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:27</td></tr> | ||
9 | <tr><td class="link"><a href="libxml2-2.9.0.tar.xz" title="libxml2-2.9.0.tar.xz">libxml2-2.9.0.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:27</td></tr> | ||
10 | <tr><td class="link"><a href="libxml2-2.9.1.sha256sum" title="libxml2-2.9.1.sha256sum">libxml2-2.9.1.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:28</td></tr> | ||
11 | <tr><td class="link"><a href="libxml2-2.9.1.tar.xz" title="libxml2-2.9.1.tar.xz">libxml2-2.9.1.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:28</td></tr> | ||
12 | <tr><td class="link"><a href="libxml2-2.9.10.sha256sum" title="libxml2-2.9.10.sha256sum">libxml2-2.9.10.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:42</td></tr> | ||
13 | <tr><td class="link"><a href="libxml2-2.9.10.tar.xz" title="libxml2-2.9.10.tar.xz">libxml2-2.9.10.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:42</td></tr> | ||
14 | <tr><td class="link"><a href="libxml2-2.9.11.sha256sum" title="libxml2-2.9.11.sha256sum">libxml2-2.9.11.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:43</td></tr> | ||
15 | <tr><td class="link"><a href="libxml2-2.9.11.tar.xz" title="libxml2-2.9.11.tar.xz">libxml2-2.9.11.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:43</td></tr> | ||
16 | <tr><td class="link"><a href="libxml2-2.9.12.sha256sum" title="libxml2-2.9.12.sha256sum">libxml2-2.9.12.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:45</td></tr> | ||
17 | <tr><td class="link"><a href="libxml2-2.9.12.tar.xz" title="libxml2-2.9.12.tar.xz">libxml2-2.9.12.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:45</td></tr> | ||
18 | <tr><td class="link"><a href="libxml2-2.9.13.news" title="libxml2-2.9.13.news">libxml2-2.9.13.news</a></td><td class="size">26.6 KiB</td><td class="date">2022-Feb-20 12:42</td></tr> | ||
19 | <tr><td class="link"><a href="libxml2-2.9.13.sha256sum" title="libxml2-2.9.13.sha256sum">libxml2-2.9.13.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Feb-20 12:42</td></tr> | ||
20 | <tr><td class="link"><a href="libxml2-2.9.13.tar.xz" title="libxml2-2.9.13.tar.xz">libxml2-2.9.13.tar.xz</a></td><td class="size">3.1 MiB</td><td class="date">2022-Feb-20 12:42</td></tr> | ||
21 | <tr><td class="link"><a href="libxml2-2.9.14.news" title="libxml2-2.9.14.news">libxml2-2.9.14.news</a></td><td class="size">1.0 KiB</td><td class="date">2022-May-02 12:03</td></tr> | ||
22 | <tr><td class="link"><a href="libxml2-2.9.14.sha256sum" title="libxml2-2.9.14.sha256sum">libxml2-2.9.14.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-May-02 12:03</td></tr> | ||
23 | <tr><td class="link"><a href="libxml2-2.9.14.tar.xz" title="libxml2-2.9.14.tar.xz">libxml2-2.9.14.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr> | ||
24 | <tr><td class="link"><a href="libxml2-2.9.2.sha256sum" title="libxml2-2.9.2.sha256sum">libxml2-2.9.2.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:30</td></tr> | ||
25 | <tr><td class="link"><a href="libxml2-2.9.2.tar.xz" title="libxml2-2.9.2.tar.xz">libxml2-2.9.2.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:30</td></tr> | ||
26 | <tr><td class="link"><a href="libxml2-2.9.3.sha256sum" title="libxml2-2.9.3.sha256sum">libxml2-2.9.3.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:31</td></tr> | ||
27 | <tr><td class="link"><a href="libxml2-2.9.3.tar.xz" title="libxml2-2.9.3.tar.xz">libxml2-2.9.3.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:31</td></tr> | ||
28 | <tr><td class="link"><a href="libxml2-2.9.4.sha256sum" title="libxml2-2.9.4.sha256sum">libxml2-2.9.4.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:33</td></tr> | ||
29 | <tr><td class="link"><a href="libxml2-2.9.4.tar.xz" title="libxml2-2.9.4.tar.xz">libxml2-2.9.4.tar.xz</a></td><td class="size">2.9 MiB</td><td class="date">2022-Feb-14 18:33</td></tr> | ||
30 | <tr><td class="link"><a href="libxml2-2.9.5.sha256sum" title="libxml2-2.9.5.sha256sum">libxml2-2.9.5.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:35</td></tr> | ||
31 | <tr><td class="link"><a href="libxml2-2.9.5.tar.xz" title="libxml2-2.9.5.tar.xz">libxml2-2.9.5.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:35</td></tr> | ||
32 | <tr><td class="link"><a href="libxml2-2.9.6.sha256sum" title="libxml2-2.9.6.sha256sum">libxml2-2.9.6.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:36</td></tr> | ||
33 | <tr><td class="link"><a href="libxml2-2.9.6.tar.xz" title="libxml2-2.9.6.tar.xz">libxml2-2.9.6.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:36</td></tr> | ||
34 | <tr><td class="link"><a href="libxml2-2.9.7.sha256sum" title="libxml2-2.9.7.sha256sum">libxml2-2.9.7.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:37</td></tr> | ||
35 | <tr><td class="link"><a href="libxml2-2.9.7.tar.xz" title="libxml2-2.9.7.tar.xz">libxml2-2.9.7.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:37</td></tr> | ||
36 | <tr><td class="link"><a href="libxml2-2.9.8.sha256sum" title="libxml2-2.9.8.sha256sum">libxml2-2.9.8.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:39</td></tr> | ||
37 | <tr><td class="link"><a href="libxml2-2.9.8.tar.xz" title="libxml2-2.9.8.tar.xz">libxml2-2.9.8.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:39</td></tr> | ||
38 | <tr><td class="link"><a href="libxml2-2.9.9.sha256sum" title="libxml2-2.9.9.sha256sum">libxml2-2.9.9.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:40</td></tr> | ||
39 | <tr><td class="link"><a href="libxml2-2.9.9.tar.xz" title="libxml2-2.9.9.tar.xz">libxml2-2.9.9.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:40</td></tr> | ||
40 | </tbody></table></body></html> | ||
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html new file mode 100644 index 0000000000..c183e06a55 --- /dev/null +++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html | |||
@@ -0,0 +1,19 @@ | |||
1 | <!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style> | ||
2 | |||
3 | <title>Index of /sources/libxml2/</title> | ||
4 | </head><body><h1>Index of /sources/libxml2/</h1> | ||
5 | <table id="list"><thead><tr><th style="width:55%"><a href="?C=N&O=A">File Name</a> <a href="?C=N&O=D"> ↓ </a></th><th style="width:20%"><a href="?C=S&O=A">File Size</a> <a href="?C=S&O=D"> ↓ </a></th><th style="width:25%"><a href="?C=M&O=A">Date</a> <a href="?C=M&O=D"> ↓ </a></th></tr></thead> | ||
6 | <tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr> | ||
7 | <tr><td class="link"><a href="2.0/" title="2.0">2.0/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr> | ||
8 | <tr><td class="link"><a href="2.1/" title="2.1">2.1/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr> | ||
9 | <tr><td class="link"><a href="2.10/" title="2.10">2.10/</a></td><td class="size">-</td><td class="date">2022-Oct-14 12:55</td></tr> | ||
10 | <tr><td class="link"><a href="2.2/" title="2.2">2.2/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr> | ||
11 | <tr><td class="link"><a href="2.3/" title="2.3">2.3/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr> | ||
12 | <tr><td class="link"><a href="2.4/" title="2.4">2.4/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr> | ||
13 | <tr><td class="link"><a href="2.5/" title="2.5">2.5/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr> | ||
14 | <tr><td class="link"><a href="2.6/" title="2.6">2.6/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr> | ||
15 | <tr><td class="link"><a href="2.7/" title="2.7">2.7/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:24</td></tr> | ||
16 | <tr><td class="link"><a href="2.8/" title="2.8">2.8/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:26</td></tr> | ||
17 | <tr><td class="link"><a href="2.9/" title="2.9">2.9/</a></td><td class="size">-</td><td class="date">2022-May-02 12:04</td></tr> | ||
18 | <tr><td class="link"><a href="cache.json" title="cache.json">cache.json</a></td><td class="size">22.8 KiB</td><td class="date">2022-Oct-14 12:55</td></tr> | ||
19 | </tbody></table></body></html> | ||
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py index 7b2dac7b86..85c1f79ff3 100644 --- a/bitbake/lib/bb/tests/fetch.py +++ b/bitbake/lib/bb/tests/fetch.py | |||
@@ -6,11 +6,14 @@ | |||
6 | # SPDX-License-Identifier: GPL-2.0-only | 6 | # SPDX-License-Identifier: GPL-2.0-only |
7 | # | 7 | # |
8 | 8 | ||
9 | import contextlib | ||
9 | import unittest | 10 | import unittest |
10 | import hashlib | 11 | import hashlib |
11 | import tempfile | 12 | import tempfile |
12 | import collections | 13 | import collections |
13 | import os | 14 | import os |
15 | import signal | ||
16 | import tarfile | ||
14 | from bb.fetch2 import URI | 17 | from bb.fetch2 import URI |
15 | from bb.fetch2 import FetchMethod | 18 | from bb.fetch2 import FetchMethod |
16 | import bb | 19 | import bb |
@@ -18,9 +21,28 @@ from bb.tests.support.httpserver import HTTPService | |||
18 | 21 | ||
19 | def skipIfNoNetwork(): | 22 | def skipIfNoNetwork(): |
20 | if os.environ.get("BB_SKIP_NETTESTS") == "yes": | 23 | if os.environ.get("BB_SKIP_NETTESTS") == "yes": |
21 | return unittest.skip("Network tests being skipped") | 24 | return unittest.skip("network test") |
22 | return lambda f: f | 25 | return lambda f: f |
23 | 26 | ||
27 | class TestTimeout(Exception): | ||
28 | # Indicate to pytest that this is not a test suite | ||
29 | __test__ = False | ||
30 | |||
31 | class Timeout(): | ||
32 | |||
33 | def __init__(self, seconds): | ||
34 | self.seconds = seconds | ||
35 | |||
36 | def handle_timeout(self, signum, frame): | ||
37 | raise TestTimeout("Test failed: timeout reached") | ||
38 | |||
39 | def __enter__(self): | ||
40 | signal.signal(signal.SIGALRM, self.handle_timeout) | ||
41 | signal.alarm(self.seconds) | ||
42 | |||
43 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
44 | signal.alarm(0) | ||
45 | |||
24 | class URITest(unittest.TestCase): | 46 | class URITest(unittest.TestCase): |
25 | test_uris = { | 47 | test_uris = { |
26 | "http://www.google.com/index.html" : { | 48 | "http://www.google.com/index.html" : { |
@@ -286,6 +308,21 @@ class URITest(unittest.TestCase): | |||
286 | 'params': {"someparam" : "1"}, | 308 | 'params': {"someparam" : "1"}, |
287 | 'query': {}, | 309 | 'query': {}, |
288 | 'relative': True | 310 | 'relative': True |
311 | }, | ||
312 | "https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip": { | ||
313 | 'uri': 'https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip', | ||
314 | 'scheme': 'https', | ||
315 | 'hostname': 'www.innodisk.com', | ||
316 | 'port': None, | ||
317 | 'hostport': 'www.innodisk.com', | ||
318 | 'path': '/Download_file', | ||
319 | 'userinfo': '', | ||
320 | 'userinfo': '', | ||
321 | 'username': '', | ||
322 | 'password': '', | ||
323 | 'params': {"downloadfilename" : "EGPL-T101.zip"}, | ||
324 | 'query': {"9BE0BF6657": None}, | ||
325 | 'relative': False | ||
289 | } | 326 | } |
290 | 327 | ||
291 | } | 328 | } |
@@ -376,7 +413,7 @@ class FetcherTest(unittest.TestCase): | |||
376 | def setUp(self): | 413 | def setUp(self): |
377 | self.origdir = os.getcwd() | 414 | self.origdir = os.getcwd() |
378 | self.d = bb.data.init() | 415 | self.d = bb.data.init() |
379 | self.tempdir = tempfile.mkdtemp() | 416 | self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-") |
380 | self.dldir = os.path.join(self.tempdir, "download") | 417 | self.dldir = os.path.join(self.tempdir, "download") |
381 | os.mkdir(self.dldir) | 418 | os.mkdir(self.dldir) |
382 | self.d.setVar("DL_DIR", self.dldir) | 419 | self.d.setVar("DL_DIR", self.dldir) |
@@ -390,57 +427,94 @@ class FetcherTest(unittest.TestCase): | |||
390 | if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes": | 427 | if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes": |
391 | print("Not cleaning up %s. Please remove manually." % self.tempdir) | 428 | print("Not cleaning up %s. Please remove manually." % self.tempdir) |
392 | else: | 429 | else: |
430 | bb.process.run('chmod u+rw -R %s' % self.tempdir) | ||
393 | bb.utils.prunedir(self.tempdir) | 431 | bb.utils.prunedir(self.tempdir) |
394 | 432 | ||
433 | def git(self, cmd, cwd=None): | ||
434 | if isinstance(cmd, str): | ||
435 | cmd = 'git -c safe.bareRepository=all ' + cmd | ||
436 | else: | ||
437 | cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd | ||
438 | if cwd is None: | ||
439 | cwd = self.gitdir | ||
440 | return bb.process.run(cmd, cwd=cwd)[0] | ||
441 | |||
442 | def git_init(self, cwd=None): | ||
443 | self.git('init', cwd=cwd) | ||
444 | # Explicitly set initial branch to master as | ||
445 | # a common setup is to use other default | ||
446 | # branch than master. | ||
447 | self.git(['checkout', '-b', 'master'], cwd=cwd) | ||
448 | |||
449 | try: | ||
450 | self.git(['config', 'user.email'], cwd=cwd) | ||
451 | except bb.process.ExecutionError: | ||
452 | self.git(['config', 'user.email', 'you@example.com'], cwd=cwd) | ||
453 | |||
454 | try: | ||
455 | self.git(['config', 'user.name'], cwd=cwd) | ||
456 | except bb.process.ExecutionError: | ||
457 | self.git(['config', 'user.name', 'Your Name'], cwd=cwd) | ||
458 | |||
395 | class MirrorUriTest(FetcherTest): | 459 | class MirrorUriTest(FetcherTest): |
396 | 460 | ||
397 | replaceuris = { | 461 | replaceuris = { |
398 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") | 462 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") |
399 | : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", | 463 | : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", |
400 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") | 464 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") |
401 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 465 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", |
402 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") | 466 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") |
403 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 467 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", |
404 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") | 468 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") |
405 | : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 469 | : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", |
406 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") | 470 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") |
407 | : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", | 471 | : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", |
408 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") | 472 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") |
409 | : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", | 473 | : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", |
410 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") | 474 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") |
411 | : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", | 475 | : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", |
412 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3") | 476 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3") |
413 | : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", | 477 | : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", |
414 | ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz") | 478 | ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz") |
415 | : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", | 479 | : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", |
416 | ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist") | 480 | ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist") |
417 | : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", | 481 | : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", |
418 | ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") | 482 | ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") |
419 | : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", | 483 | : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", |
420 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") | 484 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") |
421 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 485 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", |
422 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") | 486 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") |
423 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 487 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", |
424 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") | 488 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") |
425 | : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 489 | : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", |
426 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") | 490 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") |
427 | : "http://somewhere2.org/somefile_1.2.3.tar.gz", | 491 | : "http://somewhere2.org/somefile_1.2.3.tar.gz", |
428 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") | 492 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") |
429 | : "http://somewhere2.org/somefile_1.2.3.tar.gz", | 493 | : "http://somewhere2.org/somefile_1.2.3.tar.gz", |
430 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http") | 494 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http") |
431 | : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", | 495 | : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
496 | ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http") | ||
497 | : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", | ||
498 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=git;branch=master", "git://someserver.org/bitbake", "git://someotherserver.org/bitbake;protocol=https") | ||
499 | : "git://someotherserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=https;branch=master", | ||
500 | ("gitsm://git.qemu.org/git/seabios.git/;protocol=https;name=roms/seabios;subpath=roms/seabios;bareclone=1;nobranch=1;rev=1234567890123456789012345678901234567890", "gitsm://.*/.*", "http://petalinux.xilinx.com/sswreleases/rel-v${XILINX_VER_MAIN}/downloads") : "http://petalinux.xilinx.com/sswreleases/rel-v%24%7BXILINX_VER_MAIN%7D/downloads/git2_git.qemu.org.git.seabios.git..tar.gz", | ||
501 | ("https://somewhere.org/example/1.0.0/example;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/PATH") | ||
502 | : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", | ||
503 | ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz") | ||
504 | : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", | ||
432 | 505 | ||
433 | #Renaming files doesn't work | 506 | #Renaming files doesn't work |
434 | #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" | 507 | #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" |
435 | #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", | 508 | #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", |
436 | } | 509 | } |
437 | 510 | ||
438 | mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \ | 511 | mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ |
439 | "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \ | 512 | "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ |
440 | "https://.*/.* file:///someotherpath/downloads/ \n" \ | 513 | "https://.*/.* file:///someotherpath/downloads/ " \ |
441 | "http://.*/.* file:///someotherpath/downloads/ \n" | 514 | "http://.*/.* file:///someotherpath/downloads/" |
442 | 515 | ||
443 | def test_urireplace(self): | 516 | def test_urireplace(self): |
517 | self.d.setVar("FILESPATH", ".") | ||
444 | for k, v in self.replaceuris.items(): | 518 | for k, v in self.replaceuris.items(): |
445 | ud = bb.fetch.FetchData(k[0], self.d) | 519 | ud = bb.fetch.FetchData(k[0], self.d) |
446 | ud.setup_localpath(self.d) | 520 | ud.setup_localpath(self.d) |
@@ -463,8 +537,8 @@ class MirrorUriTest(FetcherTest): | |||
463 | 537 | ||
464 | def test_mirror_of_mirror(self): | 538 | def test_mirror_of_mirror(self): |
465 | # Test if mirror of a mirror works | 539 | # Test if mirror of a mirror works |
466 | mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n" | 540 | mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" |
467 | mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n" | 541 | mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/" |
468 | fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) | 542 | fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) |
469 | mirrors = bb.fetch2.mirror_from_string(mirrorvar) | 543 | mirrors = bb.fetch2.mirror_from_string(mirrorvar) |
470 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) | 544 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) |
@@ -473,8 +547,8 @@ class MirrorUriTest(FetcherTest): | |||
473 | 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', | 547 | 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', |
474 | 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) | 548 | 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) |
475 | 549 | ||
476 | recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \ | 550 | recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ " \ |
477 | "https://.*/[^/]* https://BBBB/B/B/B/ \n" | 551 | "https://.*/[^/]* https://BBBB/B/B/B/" |
478 | 552 | ||
479 | def test_recursive(self): | 553 | def test_recursive(self): |
480 | fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) | 554 | fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) |
@@ -488,15 +562,15 @@ class MirrorUriTest(FetcherTest): | |||
488 | class GitDownloadDirectoryNamingTest(FetcherTest): | 562 | class GitDownloadDirectoryNamingTest(FetcherTest): |
489 | def setUp(self): | 563 | def setUp(self): |
490 | super(GitDownloadDirectoryNamingTest, self).setUp() | 564 | super(GitDownloadDirectoryNamingTest, self).setUp() |
491 | self.recipe_url = "git://git.openembedded.org/bitbake" | 565 | self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https" |
492 | self.recipe_dir = "git.openembedded.org.bitbake" | 566 | self.recipe_dir = "git.openembedded.org.bitbake" |
493 | self.mirror_url = "git://github.com/openembedded/bitbake.git" | 567 | self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master" |
494 | self.mirror_dir = "github.com.openembedded.bitbake.git" | 568 | self.mirror_dir = "github.com.openembedded.bitbake.git" |
495 | 569 | ||
496 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') | 570 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') |
497 | 571 | ||
498 | def setup_mirror_rewrite(self): | 572 | def setup_mirror_rewrite(self): |
499 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") | 573 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url) |
500 | 574 | ||
501 | @skipIfNoNetwork() | 575 | @skipIfNoNetwork() |
502 | def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self): | 576 | def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self): |
@@ -536,16 +610,16 @@ class GitDownloadDirectoryNamingTest(FetcherTest): | |||
536 | class TarballNamingTest(FetcherTest): | 610 | class TarballNamingTest(FetcherTest): |
537 | def setUp(self): | 611 | def setUp(self): |
538 | super(TarballNamingTest, self).setUp() | 612 | super(TarballNamingTest, self).setUp() |
539 | self.recipe_url = "git://git.openembedded.org/bitbake" | 613 | self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https" |
540 | self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" | 614 | self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" |
541 | self.mirror_url = "git://github.com/openembedded/bitbake.git" | 615 | self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master" |
542 | self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz" | 616 | self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz" |
543 | 617 | ||
544 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') | 618 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') |
545 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') | 619 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') |
546 | 620 | ||
547 | def setup_mirror_rewrite(self): | 621 | def setup_mirror_rewrite(self): |
548 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") | 622 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url) |
549 | 623 | ||
550 | @skipIfNoNetwork() | 624 | @skipIfNoNetwork() |
551 | def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self): | 625 | def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self): |
@@ -570,9 +644,9 @@ class TarballNamingTest(FetcherTest): | |||
570 | class GitShallowTarballNamingTest(FetcherTest): | 644 | class GitShallowTarballNamingTest(FetcherTest): |
571 | def setUp(self): | 645 | def setUp(self): |
572 | super(GitShallowTarballNamingTest, self).setUp() | 646 | super(GitShallowTarballNamingTest, self).setUp() |
573 | self.recipe_url = "git://git.openembedded.org/bitbake" | 647 | self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https" |
574 | self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz" | 648 | self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz" |
575 | self.mirror_url = "git://github.com/openembedded/bitbake.git" | 649 | self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master" |
576 | self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz" | 650 | self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz" |
577 | 651 | ||
578 | self.d.setVar('BB_GIT_SHALLOW', '1') | 652 | self.d.setVar('BB_GIT_SHALLOW', '1') |
@@ -580,7 +654,7 @@ class GitShallowTarballNamingTest(FetcherTest): | |||
580 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') | 654 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') |
581 | 655 | ||
582 | def setup_mirror_rewrite(self): | 656 | def setup_mirror_rewrite(self): |
583 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") | 657 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url) |
584 | 658 | ||
585 | @skipIfNoNetwork() | 659 | @skipIfNoNetwork() |
586 | def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self): | 660 | def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self): |
@@ -602,6 +676,39 @@ class GitShallowTarballNamingTest(FetcherTest): | |||
602 | self.assertIn(self.mirror_tarball, dir) | 676 | self.assertIn(self.mirror_tarball, dir) |
603 | 677 | ||
604 | 678 | ||
679 | class CleanTarballTest(FetcherTest): | ||
680 | def setUp(self): | ||
681 | super(CleanTarballTest, self).setUp() | ||
682 | self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https" | ||
683 | self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" | ||
684 | |||
685 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') | ||
686 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') | ||
687 | |||
688 | @skipIfNoNetwork() | ||
689 | def test_that_the_tarball_contents_does_not_leak_info(self): | ||
690 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
691 | |||
692 | fetcher.download() | ||
693 | |||
694 | fetcher.unpack(self.unpackdir) | ||
695 | mtime = bb.process.run('git log --all -1 --format=%ct', | ||
696 | cwd=os.path.join(self.unpackdir, 'git')) | ||
697 | self.assertEqual(len(mtime), 2) | ||
698 | mtime = int(mtime[0]) | ||
699 | |||
700 | archive = tarfile.open(os.path.join(self.dldir, self.recipe_tarball)) | ||
701 | self.assertNotEqual(len(archive.members), 0) | ||
702 | for member in archive.members: | ||
703 | if member.name == ".": | ||
704 | continue | ||
705 | self.assertEqual(member.uname, 'oe', "user name for %s differs" % member.name) | ||
706 | self.assertEqual(member.uid, 0, "uid for %s differs" % member.name) | ||
707 | self.assertEqual(member.gname, 'oe', "group name for %s differs" % member.name) | ||
708 | self.assertEqual(member.gid, 0, "gid for %s differs" % member.name) | ||
709 | self.assertEqual(member.mtime, mtime, "mtime for %s differs" % member.name) | ||
710 | |||
711 | |||
605 | class FetcherLocalTest(FetcherTest): | 712 | class FetcherLocalTest(FetcherTest): |
606 | def setUp(self): | 713 | def setUp(self): |
607 | def touch(fn): | 714 | def touch(fn): |
@@ -619,6 +726,9 @@ class FetcherLocalTest(FetcherTest): | |||
619 | os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir')) | 726 | os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir')) |
620 | touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e')) | 727 | touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e')) |
621 | touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device')) | 728 | touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device')) |
729 | bb.process.run('tar cf archive.tar -C dir .', cwd=self.localsrcdir) | ||
730 | bb.process.run('tar czf archive.tar.gz -C dir .', cwd=self.localsrcdir) | ||
731 | bb.process.run('tar cjf archive.tar.bz2 -C dir .', cwd=self.localsrcdir) | ||
622 | self.d.setVar("FILESPATH", self.localsrcdir) | 732 | self.d.setVar("FILESPATH", self.localsrcdir) |
623 | 733 | ||
624 | def fetchUnpack(self, uris): | 734 | def fetchUnpack(self, uris): |
@@ -632,6 +742,11 @@ class FetcherLocalTest(FetcherTest): | |||
632 | flst.sort() | 742 | flst.sort() |
633 | return flst | 743 | return flst |
634 | 744 | ||
745 | def test_local_checksum_fails_no_file(self): | ||
746 | self.d.setVar("SRC_URI", "file://404") | ||
747 | with self.assertRaises(bb.BBHandledException): | ||
748 | bb.fetch.get_checksum_file_list(self.d) | ||
749 | |||
635 | def test_local(self): | 750 | def test_local(self): |
636 | tree = self.fetchUnpack(['file://a', 'file://dir/c']) | 751 | tree = self.fetchUnpack(['file://a', 'file://dir/c']) |
637 | self.assertEqual(tree, ['a', 'dir/c']) | 752 | self.assertEqual(tree, ['a', 'dir/c']) |
@@ -673,57 +788,58 @@ class FetcherLocalTest(FetcherTest): | |||
673 | with self.assertRaises(bb.fetch2.UnpackError): | 788 | with self.assertRaises(bb.fetch2.UnpackError): |
674 | self.fetchUnpack(['file://a;subdir=/bin/sh']) | 789 | self.fetchUnpack(['file://a;subdir=/bin/sh']) |
675 | 790 | ||
676 | def test_local_gitfetch_usehead(self): | 791 | def test_local_striplevel(self): |
792 | tree = self.fetchUnpack(['file://archive.tar;subdir=bar;striplevel=1']) | ||
793 | self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e']) | ||
794 | |||
795 | def test_local_striplevel_gzip(self): | ||
796 | tree = self.fetchUnpack(['file://archive.tar.gz;subdir=bar;striplevel=1']) | ||
797 | self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e']) | ||
798 | |||
799 | def test_local_striplevel_bzip2(self): | ||
800 | tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1']) | ||
801 | self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e']) | ||
802 | |||
803 | def dummyGitTest(self, suffix): | ||
677 | # Create dummy local Git repo | 804 | # Create dummy local Git repo |
678 | src_dir = tempfile.mkdtemp(dir=self.tempdir, | 805 | src_dir = tempfile.mkdtemp(dir=self.tempdir, |
679 | prefix='gitfetch_localusehead_') | 806 | prefix='gitfetch_localusehead_') |
680 | src_dir = os.path.abspath(src_dir) | 807 | self.gitdir = os.path.abspath(src_dir) |
681 | bb.process.run("git init", cwd=src_dir) | 808 | self.git_init() |
682 | bb.process.run("git commit --allow-empty -m'Dummy commit'", | 809 | self.git(['commit', '--allow-empty', '-m', 'Dummy commit']) |
683 | cwd=src_dir) | ||
684 | # Use other branch than master | 810 | # Use other branch than master |
685 | bb.process.run("git checkout -b my-devel", cwd=src_dir) | 811 | self.git(['checkout', '-b', 'my-devel']) |
686 | bb.process.run("git commit --allow-empty -m'Dummy commit 2'", | 812 | self.git(['commit', '--allow-empty', '-m', 'Dummy commit 2']) |
687 | cwd=src_dir) | 813 | orig_rev = self.git(['rev-parse', 'HEAD']).strip() |
688 | stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir) | ||
689 | orig_rev = stdout[0].strip() | ||
690 | 814 | ||
691 | # Fetch and check revision | 815 | # Fetch and check revision |
692 | self.d.setVar("SRCREV", "AUTOINC") | 816 | self.d.setVar("SRCREV", "AUTOINC") |
693 | url = "git://" + src_dir + ";protocol=file;usehead=1" | 817 | self.d.setVar("__BBSRCREV_SEEN", "1") |
818 | url = "git://" + self.gitdir + ";branch=master;protocol=file;" + suffix | ||
694 | fetcher = bb.fetch.Fetch([url], self.d) | 819 | fetcher = bb.fetch.Fetch([url], self.d) |
695 | fetcher.download() | 820 | fetcher.download() |
696 | fetcher.unpack(self.unpackdir) | 821 | fetcher.unpack(self.unpackdir) |
697 | stdout = bb.process.run("git rev-parse HEAD", | 822 | unpack_rev = self.git(['rev-parse', 'HEAD'], |
698 | cwd=os.path.join(self.unpackdir, 'git')) | 823 | cwd=os.path.join(self.unpackdir, 'git')).strip() |
699 | unpack_rev = stdout[0].strip() | ||
700 | self.assertEqual(orig_rev, unpack_rev) | 824 | self.assertEqual(orig_rev, unpack_rev) |
701 | 825 | ||
826 | def test_local_gitfetch_usehead(self): | ||
827 | self.dummyGitTest("usehead=1") | ||
828 | |||
702 | def test_local_gitfetch_usehead_withname(self): | 829 | def test_local_gitfetch_usehead_withname(self): |
703 | # Create dummy local Git repo | 830 | self.dummyGitTest("usehead=1;name=newName") |
704 | src_dir = tempfile.mkdtemp(dir=self.tempdir, | ||
705 | prefix='gitfetch_localusehead_') | ||
706 | src_dir = os.path.abspath(src_dir) | ||
707 | bb.process.run("git init", cwd=src_dir) | ||
708 | bb.process.run("git commit --allow-empty -m'Dummy commit'", | ||
709 | cwd=src_dir) | ||
710 | # Use other branch than master | ||
711 | bb.process.run("git checkout -b my-devel", cwd=src_dir) | ||
712 | bb.process.run("git commit --allow-empty -m'Dummy commit 2'", | ||
713 | cwd=src_dir) | ||
714 | stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir) | ||
715 | orig_rev = stdout[0].strip() | ||
716 | 831 | ||
717 | # Fetch and check revision | 832 | def test_local_gitfetch_shared(self): |
718 | self.d.setVar("SRCREV", "AUTOINC") | 833 | self.dummyGitTest("usehead=1;name=sharedName") |
719 | url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName" | 834 | alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') |
720 | fetcher = bb.fetch.Fetch([url], self.d) | 835 | self.assertTrue(os.path.exists(alt)) |
721 | fetcher.download() | 836 | |
722 | fetcher.unpack(self.unpackdir) | 837 | def test_local_gitfetch_noshared(self): |
723 | stdout = bb.process.run("git rev-parse HEAD", | 838 | self.d.setVar('BB_GIT_NOSHARED', '1') |
724 | cwd=os.path.join(self.unpackdir, 'git')) | 839 | self.unpackdir += '_noshared' |
725 | unpack_rev = stdout[0].strip() | 840 | self.dummyGitTest("usehead=1;name=noSharedName") |
726 | self.assertEqual(orig_rev, unpack_rev) | 841 | alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') |
842 | self.assertFalse(os.path.exists(alt)) | ||
727 | 843 | ||
728 | class FetcherNoNetworkTest(FetcherTest): | 844 | class FetcherNoNetworkTest(FetcherTest): |
729 | def setUp(self): | 845 | def setUp(self): |
@@ -831,12 +947,12 @@ class FetcherNoNetworkTest(FetcherTest): | |||
831 | class FetcherNetworkTest(FetcherTest): | 947 | class FetcherNetworkTest(FetcherTest): |
832 | @skipIfNoNetwork() | 948 | @skipIfNoNetwork() |
833 | def test_fetch(self): | 949 | def test_fetch(self): |
834 | fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) | 950 | fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) |
835 | fetcher.download() | 951 | fetcher.download() |
836 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | 952 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) |
837 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892) | 953 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892) |
838 | self.d.setVar("BB_NO_NETWORK", "1") | 954 | self.d.setVar("BB_NO_NETWORK", "1") |
839 | fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) | 955 | fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) |
840 | fetcher.download() | 956 | fetcher.download() |
841 | fetcher.unpack(self.unpackdir) | 957 | fetcher.unpack(self.unpackdir) |
842 | self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9) | 958 | self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9) |
@@ -844,21 +960,22 @@ class FetcherNetworkTest(FetcherTest): | |||
844 | 960 | ||
845 | @skipIfNoNetwork() | 961 | @skipIfNoNetwork() |
846 | def test_fetch_mirror(self): | 962 | def test_fetch_mirror(self): |
847 | self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") | 963 | self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake") |
848 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) | 964 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) |
849 | fetcher.download() | 965 | fetcher.download() |
850 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | 966 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) |
851 | 967 | ||
852 | @skipIfNoNetwork() | 968 | @skipIfNoNetwork() |
853 | def test_fetch_mirror_of_mirror(self): | 969 | def test_fetch_mirror_of_mirror(self): |
854 | self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake") | 970 | self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake") |
855 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) | 971 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) |
856 | fetcher.download() | 972 | fetcher.download() |
857 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | 973 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) |
858 | 974 | ||
859 | @skipIfNoNetwork() | 975 | @skipIfNoNetwork() |
860 | def test_fetch_file_mirror_of_mirror(self): | 976 | def test_fetch_file_mirror_of_mirror(self): |
861 | self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake") | 977 | self.d.setVar("FILESPATH", ".") |
978 | self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ file:///some1where/.* file://some2where/ file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake") | ||
862 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) | 979 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) |
863 | os.mkdir(self.dldir + "/some2where") | 980 | os.mkdir(self.dldir + "/some2where") |
864 | fetcher.download() | 981 | fetcher.download() |
@@ -866,16 +983,46 @@ class FetcherNetworkTest(FetcherTest): | |||
866 | 983 | ||
867 | @skipIfNoNetwork() | 984 | @skipIfNoNetwork() |
868 | def test_fetch_premirror(self): | 985 | def test_fetch_premirror(self): |
869 | self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") | 986 | self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake") |
870 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) | 987 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) |
871 | fetcher.download() | 988 | fetcher.download() |
872 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | 989 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) |
873 | 990 | ||
874 | @skipIfNoNetwork() | 991 | @skipIfNoNetwork() |
992 | def test_fetch_specify_downloadfilename(self): | ||
993 | fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d) | ||
994 | fetcher.download() | ||
995 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749) | ||
996 | |||
997 | @skipIfNoNetwork() | ||
998 | def test_fetch_premirror_specify_downloadfilename_regex_uri(self): | ||
999 | self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/") | ||
1000 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d) | ||
1001 | fetcher.download() | ||
1002 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | ||
1003 | |||
1004 | @skipIfNoNetwork() | ||
1005 | # BZ13039 | ||
1006 | def test_fetch_premirror_specify_downloadfilename_specific_uri(self): | ||
1007 | self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake") | ||
1008 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d) | ||
1009 | fetcher.download() | ||
1010 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | ||
1011 | |||
1012 | @skipIfNoNetwork() | ||
1013 | def test_fetch_premirror_use_downloadfilename_to_fetch(self): | ||
1014 | # Ensure downloadfilename is used when fetching from premirror. | ||
1015 | self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake") | ||
1016 | fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d) | ||
1017 | fetcher.download() | ||
1018 | self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) | ||
1019 | |||
1020 | @skipIfNoNetwork() | ||
875 | def gitfetcher(self, url1, url2): | 1021 | def gitfetcher(self, url1, url2): |
876 | def checkrevision(self, fetcher): | 1022 | def checkrevision(self, fetcher): |
877 | fetcher.unpack(self.unpackdir) | 1023 | fetcher.unpack(self.unpackdir) |
878 | revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip() | 1024 | revision = self.git(['rev-parse', 'HEAD'], |
1025 | cwd=os.path.join(self.unpackdir, 'git')).strip() | ||
879 | self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5") | 1026 | self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5") |
880 | 1027 | ||
881 | self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1") | 1028 | self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1") |
@@ -893,25 +1040,25 @@ class FetcherNetworkTest(FetcherTest): | |||
893 | 1040 | ||
894 | @skipIfNoNetwork() | 1041 | @skipIfNoNetwork() |
895 | def test_gitfetch(self): | 1042 | def test_gitfetch(self): |
896 | url1 = url2 = "git://git.openembedded.org/bitbake" | 1043 | url1 = url2 = "git://git.openembedded.org/bitbake;branch=master;protocol=https" |
897 | self.gitfetcher(url1, url2) | 1044 | self.gitfetcher(url1, url2) |
898 | 1045 | ||
899 | @skipIfNoNetwork() | 1046 | @skipIfNoNetwork() |
900 | def test_gitfetch_goodsrcrev(self): | 1047 | def test_gitfetch_goodsrcrev(self): |
901 | # SRCREV is set but matches rev= parameter | 1048 | # SRCREV is set but matches rev= parameter |
902 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5" | 1049 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https" |
903 | self.gitfetcher(url1, url2) | 1050 | self.gitfetcher(url1, url2) |
904 | 1051 | ||
905 | @skipIfNoNetwork() | 1052 | @skipIfNoNetwork() |
906 | def test_gitfetch_badsrcrev(self): | 1053 | def test_gitfetch_badsrcrev(self): |
907 | # SRCREV is set but does not match rev= parameter | 1054 | # SRCREV is set but does not match rev= parameter |
908 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5" | 1055 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https" |
909 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) | 1056 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) |
910 | 1057 | ||
911 | @skipIfNoNetwork() | 1058 | @skipIfNoNetwork() |
912 | def test_gitfetch_tagandrev(self): | 1059 | def test_gitfetch_tagandrev(self): |
913 | # SRCREV is set but does not match rev= parameter | 1060 | # SRCREV is set but does not match rev= parameter |
914 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5" | 1061 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https" |
915 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) | 1062 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) |
916 | 1063 | ||
917 | @skipIfNoNetwork() | 1064 | @skipIfNoNetwork() |
@@ -920,7 +1067,7 @@ class FetcherNetworkTest(FetcherTest): | |||
920 | # `usehead=1' and instead fetch the specified SRCREV. See | 1067 | # `usehead=1' and instead fetch the specified SRCREV. See |
921 | # test_local_gitfetch_usehead() for a positive use of the usehead | 1068 | # test_local_gitfetch_usehead() for a positive use of the usehead |
922 | # feature. | 1069 | # feature. |
923 | url = "git://git.openembedded.org/bitbake;usehead=1" | 1070 | url = "git://git.openembedded.org/bitbake;usehead=1;branch=master;protocol=https" |
924 | self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) | 1071 | self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) |
925 | 1072 | ||
926 | @skipIfNoNetwork() | 1073 | @skipIfNoNetwork() |
@@ -929,38 +1076,38 @@ class FetcherNetworkTest(FetcherTest): | |||
929 | # `usehead=1' and instead fetch the specified SRCREV. See | 1076 | # `usehead=1' and instead fetch the specified SRCREV. See |
930 | # test_local_gitfetch_usehead() for a positive use of the usehead | 1077 | # test_local_gitfetch_usehead() for a positive use of the usehead |
931 | # feature. | 1078 | # feature. |
932 | url = "git://git.openembedded.org/bitbake;usehead=1;name=newName" | 1079 | url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master;protocol=https" |
933 | self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) | 1080 | self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) |
934 | 1081 | ||
935 | @skipIfNoNetwork() | 1082 | @skipIfNoNetwork() |
936 | def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self): | 1083 | def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self): |
937 | recipeurl = "git://git.openembedded.org/bitbake" | 1084 | recipeurl = "git://git.openembedded.org/bitbake;branch=master;protocol=https" |
938 | mirrorurl = "git://someserver.org/bitbake" | 1085 | mirrorurl = "git://someserver.org/bitbake;branch=master;protocol=https" |
939 | self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") | 1086 | self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake") |
940 | self.gitfetcher(recipeurl, mirrorurl) | 1087 | self.gitfetcher(recipeurl, mirrorurl) |
941 | 1088 | ||
942 | @skipIfNoNetwork() | 1089 | @skipIfNoNetwork() |
943 | def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self): | 1090 | def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self): |
944 | recipeurl = "git://someserver.org/bitbake" | 1091 | recipeurl = "git://someserver.org/bitbake;branch=master;protocol=https" |
945 | self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") | 1092 | self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake") |
946 | self.gitfetcher(recipeurl, recipeurl) | 1093 | self.gitfetcher(recipeurl, recipeurl) |
947 | 1094 | ||
948 | @skipIfNoNetwork() | 1095 | @skipIfNoNetwork() |
949 | def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): | 1096 | def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): |
950 | realurl = "git://git.openembedded.org/bitbake" | 1097 | realurl = "https://git.openembedded.org/bitbake" |
951 | recipeurl = "git://someserver.org/bitbake" | 1098 | recipeurl = "git://someserver.org/bitbake;protocol=https" |
952 | self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") | 1099 | self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") |
953 | os.chdir(self.tempdir) | 1100 | os.chdir(self.tempdir) |
954 | bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True) | 1101 | self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir) |
955 | self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (recipeurl, self.sourcedir)) | 1102 | self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file" % (recipeurl, self.sourcedir)) |
956 | self.gitfetcher(recipeurl, recipeurl) | 1103 | self.gitfetcher(recipeurl, recipeurl) |
957 | 1104 | ||
958 | @skipIfNoNetwork() | 1105 | @skipIfNoNetwork() |
959 | def test_git_submodule(self): | 1106 | def test_git_submodule(self): |
960 | # URL with ssh submodules | 1107 | # URL with ssh submodules |
961 | url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7" | 1108 | url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master;protocol=https" |
962 | # Original URL (comment this if you have ssh access to git.yoctoproject.org) | 1109 | # Original URL (comment this if you have ssh access to git.yoctoproject.org) |
963 | url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee" | 1110 | url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master;protocol=https" |
964 | fetcher = bb.fetch.Fetch([url], self.d) | 1111 | fetcher = bb.fetch.Fetch([url], self.d) |
965 | fetcher.download() | 1112 | fetcher.download() |
966 | # Previous cwd has been deleted | 1113 | # Previous cwd has been deleted |
@@ -977,10 +1124,29 @@ class FetcherNetworkTest(FetcherTest): | |||
977 | self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing') | 1124 | self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing') |
978 | 1125 | ||
979 | @skipIfNoNetwork() | 1126 | @skipIfNoNetwork() |
1127 | def test_git_submodule_restricted_network_premirrors(self): | ||
1128 | # this test is to ensure that premirrors will be tried in restricted network | ||
1129 | # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses | ||
1130 | url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231" | ||
1131 | # create a download directory to be used as premirror later | ||
1132 | tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-") | ||
1133 | dl_premirror = os.path.join(tempdir, "download-premirror") | ||
1134 | os.mkdir(dl_premirror) | ||
1135 | self.d.setVar("DL_DIR", dl_premirror) | ||
1136 | fetcher = bb.fetch.Fetch([url], self.d) | ||
1137 | fetcher.download() | ||
1138 | # now use the premirror in restricted network | ||
1139 | self.d.setVar("DL_DIR", self.dldir) | ||
1140 | self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror) | ||
1141 | self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain") | ||
1142 | fetcher = bb.fetch.Fetch([url], self.d) | ||
1143 | fetcher.download() | ||
1144 | |||
1145 | @skipIfNoNetwork() | ||
980 | def test_git_submodule_dbus_broker(self): | 1146 | def test_git_submodule_dbus_broker(self): |
981 | # The following external repositories have show failures in fetch and unpack operations | 1147 | # The following external repositories have show failures in fetch and unpack operations |
982 | # We want to avoid regressions! | 1148 | # We want to avoid regressions! |
983 | url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main" | 1149 | url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main" |
984 | fetcher = bb.fetch.Fetch([url], self.d) | 1150 | fetcher = bb.fetch.Fetch([url], self.d) |
985 | fetcher.download() | 1151 | fetcher.download() |
986 | # Previous cwd has been deleted | 1152 | # Previous cwd has been deleted |
@@ -996,7 +1162,7 @@ class FetcherNetworkTest(FetcherTest): | |||
996 | 1162 | ||
997 | @skipIfNoNetwork() | 1163 | @skipIfNoNetwork() |
998 | def test_git_submodule_CLI11(self): | 1164 | def test_git_submodule_CLI11(self): |
999 | url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" | 1165 | url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main" |
1000 | fetcher = bb.fetch.Fetch([url], self.d) | 1166 | fetcher = bb.fetch.Fetch([url], self.d) |
1001 | fetcher.download() | 1167 | fetcher.download() |
1002 | # Previous cwd has been deleted | 1168 | # Previous cwd has been deleted |
@@ -1011,12 +1177,12 @@ class FetcherNetworkTest(FetcherTest): | |||
1011 | @skipIfNoNetwork() | 1177 | @skipIfNoNetwork() |
1012 | def test_git_submodule_update_CLI11(self): | 1178 | def test_git_submodule_update_CLI11(self): |
1013 | """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ | 1179 | """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ |
1014 | url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" | 1180 | url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main" |
1015 | fetcher = bb.fetch.Fetch([url], self.d) | 1181 | fetcher = bb.fetch.Fetch([url], self.d) |
1016 | fetcher.download() | 1182 | fetcher.download() |
1017 | 1183 | ||
1018 | # CLI11 that pulls in a newer nlohmann-json | 1184 | # CLI11 that pulls in a newer nlohmann-json |
1019 | url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" | 1185 | url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main" |
1020 | fetcher = bb.fetch.Fetch([url], self.d) | 1186 | fetcher = bb.fetch.Fetch([url], self.d) |
1021 | fetcher.download() | 1187 | fetcher.download() |
1022 | # Previous cwd has been deleted | 1188 | # Previous cwd has been deleted |
@@ -1030,7 +1196,7 @@ class FetcherNetworkTest(FetcherTest): | |||
1030 | 1196 | ||
1031 | @skipIfNoNetwork() | 1197 | @skipIfNoNetwork() |
1032 | def test_git_submodule_aktualizr(self): | 1198 | def test_git_submodule_aktualizr(self): |
1033 | url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44" | 1199 | url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44" |
1034 | fetcher = bb.fetch.Fetch([url], self.d) | 1200 | fetcher = bb.fetch.Fetch([url], self.d) |
1035 | fetcher.download() | 1201 | fetcher.download() |
1036 | # Previous cwd has been deleted | 1202 | # Previous cwd has been deleted |
@@ -1050,7 +1216,7 @@ class FetcherNetworkTest(FetcherTest): | |||
1050 | """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ | 1216 | """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ |
1051 | 1217 | ||
1052 | # This repository also has submodules where the module (name), path and url do not align | 1218 | # This repository also has submodules where the module (name), path and url do not align |
1053 | url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699" | 1219 | url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main" |
1054 | fetcher = bb.fetch.Fetch([url], self.d) | 1220 | fetcher = bb.fetch.Fetch([url], self.d) |
1055 | fetcher.download() | 1221 | fetcher.download() |
1056 | # Previous cwd has been deleted | 1222 | # Previous cwd has been deleted |
@@ -1073,6 +1239,15 @@ class FetcherNetworkTest(FetcherTest): | |||
1073 | self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout') | 1239 | self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout') |
1074 | self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout') | 1240 | self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout') |
1075 | 1241 | ||
1242 | @skipIfNoNetwork() | ||
1243 | def test_git_submodule_reference_to_parent(self): | ||
1244 | self.recipe_url = "gitsm://github.com/gflags/gflags.git;protocol=https;branch=master" | ||
1245 | self.d.setVar("SRCREV", "14e1138441bbbb584160cb1c0a0426ec1bac35f1") | ||
1246 | with Timeout(60): | ||
1247 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
1248 | with self.assertRaises(bb.fetch2.FetchError): | ||
1249 | fetcher.download() | ||
1250 | |||
1076 | class SVNTest(FetcherTest): | 1251 | class SVNTest(FetcherTest): |
1077 | def skipIfNoSvn(): | 1252 | def skipIfNoSvn(): |
1078 | import shutil | 1253 | import shutil |
@@ -1107,8 +1282,9 @@ class SVNTest(FetcherTest): | |||
1107 | cwd=repo_dir) | 1282 | cwd=repo_dir) |
1108 | 1283 | ||
1109 | bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir) | 1284 | bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir) |
1110 | # Github will emulate SVN. Use this to check if we're downloding... | 1285 | # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/) |
1111 | bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .", | 1286 | # Use still accessible svn repo (only trunk to avoid longer downloads) |
1287 | bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .", | ||
1112 | cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) | 1288 | cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) |
1113 | bb.process.run("svn commit --non-interactive -m 'Add external'", | 1289 | bb.process.run("svn commit --non-interactive -m 'Add external'", |
1114 | cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) | 1290 | cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) |
@@ -1136,8 +1312,8 @@ class SVNTest(FetcherTest): | |||
1136 | 1312 | ||
1137 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") | 1313 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") |
1138 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") | 1314 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") |
1139 | self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist") | 1315 | self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist") |
1140 | self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit") | 1316 | self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit") |
1141 | 1317 | ||
1142 | @skipIfNoSvn() | 1318 | @skipIfNoSvn() |
1143 | def test_external_svn(self): | 1319 | def test_external_svn(self): |
@@ -1150,49 +1326,49 @@ class SVNTest(FetcherTest): | |||
1150 | 1326 | ||
1151 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") | 1327 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") |
1152 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") | 1328 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") |
1153 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist") | 1329 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist") |
1154 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit") | 1330 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit") |
1155 | 1331 | ||
1156 | class TrustedNetworksTest(FetcherTest): | 1332 | class TrustedNetworksTest(FetcherTest): |
1157 | def test_trusted_network(self): | 1333 | def test_trusted_network(self): |
1158 | # Ensure trusted_network returns False when the host IS in the list. | 1334 | # Ensure trusted_network returns False when the host IS in the list. |
1159 | url = "git://Someserver.org/foo;rev=1" | 1335 | url = "git://Someserver.org/foo;rev=1;branch=master" |
1160 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org") | 1336 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org") |
1161 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) | 1337 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) |
1162 | 1338 | ||
1163 | def test_wild_trusted_network(self): | 1339 | def test_wild_trusted_network(self): |
1164 | # Ensure trusted_network returns true when the *.host IS in the list. | 1340 | # Ensure trusted_network returns true when the *.host IS in the list. |
1165 | url = "git://Someserver.org/foo;rev=1" | 1341 | url = "git://Someserver.org/foo;rev=1;branch=master" |
1166 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") | 1342 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") |
1167 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) | 1343 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) |
1168 | 1344 | ||
1169 | def test_prefix_wild_trusted_network(self): | 1345 | def test_prefix_wild_trusted_network(self): |
1170 | # Ensure trusted_network returns true when the prefix matches *.host. | 1346 | # Ensure trusted_network returns true when the prefix matches *.host. |
1171 | url = "git://git.Someserver.org/foo;rev=1" | 1347 | url = "git://git.Someserver.org/foo;rev=1;branch=master" |
1172 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") | 1348 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") |
1173 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) | 1349 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) |
1174 | 1350 | ||
1175 | def test_two_prefix_wild_trusted_network(self): | 1351 | def test_two_prefix_wild_trusted_network(self): |
1176 | # Ensure trusted_network returns true when the prefix matches *.host. | 1352 | # Ensure trusted_network returns true when the prefix matches *.host. |
1177 | url = "git://something.git.Someserver.org/foo;rev=1" | 1353 | url = "git://something.git.Someserver.org/foo;rev=1;branch=master" |
1178 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") | 1354 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") |
1179 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) | 1355 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) |
1180 | 1356 | ||
1181 | def test_port_trusted_network(self): | 1357 | def test_port_trusted_network(self): |
1182 | # Ensure trusted_network returns True, even if the url specifies a port. | 1358 | # Ensure trusted_network returns True, even if the url specifies a port. |
1183 | url = "git://someserver.org:8080/foo;rev=1" | 1359 | url = "git://someserver.org:8080/foo;rev=1;branch=master" |
1184 | self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org") | 1360 | self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org") |
1185 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) | 1361 | self.assertTrue(bb.fetch.trusted_network(self.d, url)) |
1186 | 1362 | ||
1187 | def test_untrusted_network(self): | 1363 | def test_untrusted_network(self): |
1188 | # Ensure trusted_network returns False when the host is NOT in the list. | 1364 | # Ensure trusted_network returns False when the host is NOT in the list. |
1189 | url = "git://someserver.org/foo;rev=1" | 1365 | url = "git://someserver.org/foo;rev=1;branch=master" |
1190 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") | 1366 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") |
1191 | self.assertFalse(bb.fetch.trusted_network(self.d, url)) | 1367 | self.assertFalse(bb.fetch.trusted_network(self.d, url)) |
1192 | 1368 | ||
1193 | def test_wild_untrusted_network(self): | 1369 | def test_wild_untrusted_network(self): |
1194 | # Ensure trusted_network returns False when the host is NOT in the list. | 1370 | # Ensure trusted_network returns False when the host is NOT in the list. |
1195 | url = "git://*.someserver.org/foo;rev=1" | 1371 | url = "git://*.someserver.org/foo;rev=1;branch=master" |
1196 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") | 1372 | self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") |
1197 | self.assertFalse(bb.fetch.trusted_network(self.d, url)) | 1373 | self.assertFalse(bb.fetch.trusted_network(self.d, url)) |
1198 | 1374 | ||
@@ -1202,14 +1378,17 @@ class URLHandle(unittest.TestCase): | |||
1202 | "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), | 1378 | "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), |
1203 | "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), | 1379 | "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), |
1204 | "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), | 1380 | "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), |
1205 | "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}), | 1381 | "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}), |
1206 | "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), | 1382 | "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), |
1383 | "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}), | ||
1384 | r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}), | ||
1207 | } | 1385 | } |
1208 | # we require a pathname to encodeurl but users can still pass such urls to | 1386 | # we require a pathname to encodeurl but users can still pass such urls to |
1209 | # decodeurl and we need to handle them | 1387 | # decodeurl and we need to handle them |
1210 | decodedata = datatable.copy() | 1388 | decodedata = datatable.copy() |
1211 | decodedata.update({ | 1389 | decodedata.update({ |
1212 | "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}), | 1390 | "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}), |
1391 | "npmsw://some.registry.url;package=@pkg;version=latest": ('npmsw', 'some.registry.url', '/', '', '', {'package': '@pkg', 'version': 'latest'}), | ||
1213 | }) | 1392 | }) |
1214 | 1393 | ||
1215 | def test_decodeurl(self): | 1394 | def test_decodeurl(self): |
@@ -1226,37 +1405,39 @@ class FetchLatestVersionTest(FetcherTest): | |||
1226 | 1405 | ||
1227 | test_git_uris = { | 1406 | test_git_uris = { |
1228 | # version pattern "X.Y.Z" | 1407 | # version pattern "X.Y.Z" |
1229 | ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "") | 1408 | ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "") |
1230 | : "1.99.4", | 1409 | : "1.99.4", |
1231 | # version pattern "vX.Y" | 1410 | # version pattern "vX.Y" |
1232 | # mirror of git.infradead.org since network issues interfered with testing | 1411 | # mirror of git.infradead.org since network issues interfered with testing |
1233 | ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "") | 1412 | ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "") |
1234 | : "1.5.0", | 1413 | : "1.5.0", |
1235 | # version pattern "pkg_name-X.Y" | 1414 | # version pattern "pkg_name-X.Y" |
1236 | # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing | 1415 | # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing |
1237 | ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "") | 1416 | ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "") |
1238 | : "1.0", | 1417 | : "1.0", |
1239 | # version pattern "pkg_name-vX.Y.Z" | 1418 | # version pattern "pkg_name-vX.Y.Z" |
1240 | ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "") | 1419 | ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "") |
1241 | : "1.4.0", | 1420 | : "1.4.0", |
1242 | # combination version pattern | 1421 | # combination version pattern |
1243 | ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "") | 1422 | ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") |
1244 | : "1.2.0", | 1423 | : "1.2.0", |
1245 | ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "") | 1424 | ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") |
1246 | : "2014.01", | 1425 | : "2014.01", |
1247 | # version pattern "yyyymmdd" | 1426 | # version pattern "yyyymmdd" |
1248 | ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "") | 1427 | ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") |
1249 | : "20120614", | 1428 | : "20120614", |
1250 | # packages with a valid UPSTREAM_CHECK_GITTAGREGEX | 1429 | # packages with a valid UPSTREAM_CHECK_GITTAGREGEX |
1251 | # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing | 1430 | # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing |
1252 | ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))") | 1431 | ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "") |
1253 | : "0.4.3", | 1432 | : "0.4.3", |
1254 | ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))") | 1433 | ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "") |
1255 | : "11.0.0", | 1434 | : "11.0.0", |
1256 | ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") | 1435 | ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "") |
1257 | : "1.3.59", | 1436 | : "1.3.59", |
1258 | ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") | 1437 | ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "") |
1259 | : "3.82+dbg0.9", | 1438 | : "3.82+dbg0.9", |
1439 | ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0") | ||
1440 | : "0.28.0", | ||
1260 | } | 1441 | } |
1261 | 1442 | ||
1262 | test_wget_uris = { | 1443 | test_wget_uris = { |
@@ -1272,13 +1453,16 @@ class FetchLatestVersionTest(FetcherTest): | |||
1272 | # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz | 1453 | # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz |
1273 | ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") | 1454 | ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") |
1274 | : "2.8.12.1", | 1455 | : "2.8.12.1", |
1456 | # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz | ||
1457 | ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "") | ||
1458 | : "2.10.3", | ||
1275 | # | 1459 | # |
1276 | # packages with versions only in current directory | 1460 | # packages with versions only in current directory |
1277 | # | 1461 | # |
1278 | # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 | 1462 | # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 |
1279 | ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") | 1463 | ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") |
1280 | : "2.19", | 1464 | : "2.19", |
1281 | # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 | 1465 | # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 |
1282 | ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") | 1466 | ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") |
1283 | : "20120814", | 1467 | : "20120814", |
1284 | # | 1468 | # |
@@ -1295,12 +1479,18 @@ class FetchLatestVersionTest(FetcherTest): | |||
1295 | # | 1479 | # |
1296 | # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 | 1480 | # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 |
1297 | # https://github.com/apple/cups/releases | 1481 | # https://github.com/apple/cups/releases |
1298 | ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") | 1482 | ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") |
1299 | : "2.0.0", | 1483 | : "2.0.0", |
1300 | # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz | 1484 | # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz |
1301 | # http://ftp.debian.org/debian/pool/main/d/db5.3/ | 1485 | # http://ftp.debian.org/debian/pool/main/d/db5.3/ |
1302 | ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") | 1486 | ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") |
1303 | : "5.3.10", | 1487 | : "5.3.10", |
1488 | # | ||
1489 | # packages where the tarball compression changed in the new version | ||
1490 | # | ||
1491 | # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz | ||
1492 | ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "") | ||
1493 | : "2.8", | ||
1304 | } | 1494 | } |
1305 | 1495 | ||
1306 | @skipIfNoNetwork() | 1496 | @skipIfNoNetwork() |
@@ -1315,6 +1505,9 @@ class FetchLatestVersionTest(FetcherTest): | |||
1315 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) | 1505 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) |
1316 | r = bb.utils.vercmp_string(v, verstring) | 1506 | r = bb.utils.vercmp_string(v, verstring) |
1317 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) | 1507 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) |
1508 | if k[4]: | ||
1509 | r = bb.utils.vercmp_string(verstring, k[4]) | ||
1510 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4])) | ||
1318 | 1511 | ||
1319 | def test_wget_latest_versionstring(self): | 1512 | def test_wget_latest_versionstring(self): |
1320 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" | 1513 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" |
@@ -1341,17 +1534,14 @@ class FetchLatestVersionTest(FetcherTest): | |||
1341 | 1534 | ||
1342 | 1535 | ||
1343 | class FetchCheckStatusTest(FetcherTest): | 1536 | class FetchCheckStatusTest(FetcherTest): |
1344 | test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", | 1537 | test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", |
1345 | "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", | 1538 | "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", |
1346 | "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", | 1539 | "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", |
1347 | "https://yoctoproject.org/", | 1540 | "https://yoctoproject.org/", |
1348 | "https://yoctoproject.org/documentation", | 1541 | "https://docs.yoctoproject.org", |
1349 | "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", | 1542 | "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", |
1350 | "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", | 1543 | "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", |
1351 | "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", | 1544 | "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", |
1352 | "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz", | ||
1353 | "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz", | ||
1354 | "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz", | ||
1355 | # GitHub releases are hosted on Amazon S3, which doesn't support HEAD | 1545 | # GitHub releases are hosted on Amazon S3, which doesn't support HEAD |
1356 | "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz" | 1546 | "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz" |
1357 | ] | 1547 | ] |
@@ -1389,7 +1579,7 @@ class GitMakeShallowTest(FetcherTest): | |||
1389 | FetcherTest.setUp(self) | 1579 | FetcherTest.setUp(self) |
1390 | self.gitdir = os.path.join(self.tempdir, 'gitshallow') | 1580 | self.gitdir = os.path.join(self.tempdir, 'gitshallow') |
1391 | bb.utils.mkdirhier(self.gitdir) | 1581 | bb.utils.mkdirhier(self.gitdir) |
1392 | bb.process.run('git init', cwd=self.gitdir) | 1582 | self.git_init() |
1393 | 1583 | ||
1394 | def assertRefs(self, expected_refs): | 1584 | def assertRefs(self, expected_refs): |
1395 | actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines() | 1585 | actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines() |
@@ -1403,13 +1593,6 @@ class GitMakeShallowTest(FetcherTest): | |||
1403 | actual_count = len(revs.splitlines()) | 1593 | actual_count = len(revs.splitlines()) |
1404 | self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) | 1594 | self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) |
1405 | 1595 | ||
1406 | def git(self, cmd): | ||
1407 | if isinstance(cmd, str): | ||
1408 | cmd = 'git ' + cmd | ||
1409 | else: | ||
1410 | cmd = ['git'] + cmd | ||
1411 | return bb.process.run(cmd, cwd=self.gitdir)[0] | ||
1412 | |||
1413 | def make_shallow(self, args=None): | 1596 | def make_shallow(self, args=None): |
1414 | if args is None: | 1597 | if args is None: |
1415 | args = ['HEAD'] | 1598 | args = ['HEAD'] |
@@ -1512,13 +1695,13 @@ class GitShallowTest(FetcherTest): | |||
1512 | self.srcdir = os.path.join(self.tempdir, 'gitsource') | 1695 | self.srcdir = os.path.join(self.tempdir, 'gitsource') |
1513 | 1696 | ||
1514 | bb.utils.mkdirhier(self.srcdir) | 1697 | bb.utils.mkdirhier(self.srcdir) |
1515 | self.git('init', cwd=self.srcdir) | 1698 | self.git_init(cwd=self.srcdir) |
1516 | self.d.setVar('WORKDIR', self.tempdir) | 1699 | self.d.setVar('WORKDIR', self.tempdir) |
1517 | self.d.setVar('S', self.gitdir) | 1700 | self.d.setVar('S', self.gitdir) |
1518 | self.d.delVar('PREMIRRORS') | 1701 | self.d.delVar('PREMIRRORS') |
1519 | self.d.delVar('MIRRORS') | 1702 | self.d.delVar('MIRRORS') |
1520 | 1703 | ||
1521 | uri = 'git://%s;protocol=file;subdir=${S}' % self.srcdir | 1704 | uri = 'git://%s;protocol=file;subdir=${S};branch=master' % self.srcdir |
1522 | self.d.setVar('SRC_URI', uri) | 1705 | self.d.setVar('SRC_URI', uri) |
1523 | self.d.setVar('SRCREV', '${AUTOREV}') | 1706 | self.d.setVar('SRCREV', '${AUTOREV}') |
1524 | self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') | 1707 | self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') |
@@ -1526,6 +1709,7 @@ class GitShallowTest(FetcherTest): | |||
1526 | self.d.setVar('BB_GIT_SHALLOW', '1') | 1709 | self.d.setVar('BB_GIT_SHALLOW', '1') |
1527 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0') | 1710 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0') |
1528 | self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') | 1711 | self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') |
1712 | self.d.setVar("__BBSRCREV_SEEN", "1") | ||
1529 | 1713 | ||
1530 | def assertRefs(self, expected_refs, cwd=None): | 1714 | def assertRefs(self, expected_refs, cwd=None): |
1531 | if cwd is None: | 1715 | if cwd is None: |
@@ -1543,15 +1727,6 @@ class GitShallowTest(FetcherTest): | |||
1543 | actual_count = len(revs.splitlines()) | 1727 | actual_count = len(revs.splitlines()) |
1544 | self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) | 1728 | self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) |
1545 | 1729 | ||
1546 | def git(self, cmd, cwd=None): | ||
1547 | if isinstance(cmd, str): | ||
1548 | cmd = 'git ' + cmd | ||
1549 | else: | ||
1550 | cmd = ['git'] + cmd | ||
1551 | if cwd is None: | ||
1552 | cwd = self.gitdir | ||
1553 | return bb.process.run(cmd, cwd=cwd)[0] | ||
1554 | |||
1555 | def add_empty_file(self, path, cwd=None, msg=None): | 1730 | def add_empty_file(self, path, cwd=None, msg=None): |
1556 | if msg is None: | 1731 | if msg is None: |
1557 | msg = path | 1732 | msg = path |
@@ -1594,6 +1769,7 @@ class GitShallowTest(FetcherTest): | |||
1594 | 1769 | ||
1595 | # fetch and unpack, from the shallow tarball | 1770 | # fetch and unpack, from the shallow tarball |
1596 | bb.utils.remove(self.gitdir, recurse=True) | 1771 | bb.utils.remove(self.gitdir, recurse=True) |
1772 | bb.process.run('chmod u+w -R "%s"' % ud.clonedir) | ||
1597 | bb.utils.remove(ud.clonedir, recurse=True) | 1773 | bb.utils.remove(ud.clonedir, recurse=True) |
1598 | bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) | 1774 | bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) |
1599 | 1775 | ||
@@ -1745,7 +1921,7 @@ class GitShallowTest(FetcherTest): | |||
1745 | 1921 | ||
1746 | smdir = os.path.join(self.tempdir, 'gitsubmodule') | 1922 | smdir = os.path.join(self.tempdir, 'gitsubmodule') |
1747 | bb.utils.mkdirhier(smdir) | 1923 | bb.utils.mkdirhier(smdir) |
1748 | self.git('init', cwd=smdir) | 1924 | self.git_init(cwd=smdir) |
1749 | # Make this look like it was cloned from a remote... | 1925 | # Make this look like it was cloned from a remote... |
1750 | self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) | 1926 | self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) |
1751 | self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) | 1927 | self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) |
@@ -1753,11 +1929,11 @@ class GitShallowTest(FetcherTest): | |||
1753 | self.add_empty_file('bsub', cwd=smdir) | 1929 | self.add_empty_file('bsub', cwd=smdir) |
1754 | 1930 | ||
1755 | self.git('submodule init', cwd=self.srcdir) | 1931 | self.git('submodule init', cwd=self.srcdir) |
1756 | self.git('submodule add file://%s' % smdir, cwd=self.srcdir) | 1932 | self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir) |
1757 | self.git('submodule update', cwd=self.srcdir) | 1933 | self.git('submodule update', cwd=self.srcdir) |
1758 | self.git('commit -m submodule -a', cwd=self.srcdir) | 1934 | self.git('commit -m submodule -a', cwd=self.srcdir) |
1759 | 1935 | ||
1760 | uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir | 1936 | uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir |
1761 | fetcher, ud = self.fetch_shallow(uri) | 1937 | fetcher, ud = self.fetch_shallow(uri) |
1762 | 1938 | ||
1763 | # Verify the main repository is shallow | 1939 | # Verify the main repository is shallow |
@@ -1775,7 +1951,7 @@ class GitShallowTest(FetcherTest): | |||
1775 | 1951 | ||
1776 | smdir = os.path.join(self.tempdir, 'gitsubmodule') | 1952 | smdir = os.path.join(self.tempdir, 'gitsubmodule') |
1777 | bb.utils.mkdirhier(smdir) | 1953 | bb.utils.mkdirhier(smdir) |
1778 | self.git('init', cwd=smdir) | 1954 | self.git_init(cwd=smdir) |
1779 | # Make this look like it was cloned from a remote... | 1955 | # Make this look like it was cloned from a remote... |
1780 | self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) | 1956 | self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) |
1781 | self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) | 1957 | self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) |
@@ -1783,7 +1959,7 @@ class GitShallowTest(FetcherTest): | |||
1783 | self.add_empty_file('bsub', cwd=smdir) | 1959 | self.add_empty_file('bsub', cwd=smdir) |
1784 | 1960 | ||
1785 | self.git('submodule init', cwd=self.srcdir) | 1961 | self.git('submodule init', cwd=self.srcdir) |
1786 | self.git('submodule add file://%s' % smdir, cwd=self.srcdir) | 1962 | self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir) |
1787 | self.git('submodule update', cwd=self.srcdir) | 1963 | self.git('submodule update', cwd=self.srcdir) |
1788 | self.git('commit -m submodule -a', cwd=self.srcdir) | 1964 | self.git('commit -m submodule -a', cwd=self.srcdir) |
1789 | 1965 | ||
@@ -1794,8 +1970,8 @@ class GitShallowTest(FetcherTest): | |||
1794 | 1970 | ||
1795 | # Set up the mirror | 1971 | # Set up the mirror |
1796 | mirrordir = os.path.join(self.tempdir, 'mirror') | 1972 | mirrordir = os.path.join(self.tempdir, 'mirror') |
1797 | os.rename(self.dldir, mirrordir) | 1973 | bb.utils.rename(self.dldir, mirrordir) |
1798 | self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/\n' % mirrordir) | 1974 | self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/' % mirrordir) |
1799 | 1975 | ||
1800 | # Fetch from the mirror | 1976 | # Fetch from the mirror |
1801 | bb.utils.remove(self.dldir, recurse=True) | 1977 | bb.utils.remove(self.dldir, recurse=True) |
@@ -1818,10 +1994,10 @@ class GitShallowTest(FetcherTest): | |||
1818 | self.git('annex init', cwd=self.srcdir) | 1994 | self.git('annex init', cwd=self.srcdir) |
1819 | open(os.path.join(self.srcdir, 'c'), 'w').close() | 1995 | open(os.path.join(self.srcdir, 'c'), 'w').close() |
1820 | self.git('annex add c', cwd=self.srcdir) | 1996 | self.git('annex add c', cwd=self.srcdir) |
1821 | self.git('commit -m annex-c -a', cwd=self.srcdir) | 1997 | self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir) |
1822 | bb.process.run('chmod u+w -R %s' % os.path.join(self.srcdir, '.git', 'annex')) | 1998 | bb.process.run('chmod u+w -R %s' % self.srcdir) |
1823 | 1999 | ||
1824 | uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir | 2000 | uri = 'gitannex://%s;protocol=file;subdir=${S};branch=master' % self.srcdir |
1825 | fetcher, ud = self.fetch_shallow(uri) | 2001 | fetcher, ud = self.fetch_shallow(uri) |
1826 | 2002 | ||
1827 | self.assertRevCount(1) | 2003 | self.assertRevCount(1) |
@@ -1910,9 +2086,9 @@ class GitShallowTest(FetcherTest): | |||
1910 | # Set up the mirror | 2086 | # Set up the mirror |
1911 | mirrordir = os.path.join(self.tempdir, 'mirror') | 2087 | mirrordir = os.path.join(self.tempdir, 'mirror') |
1912 | bb.utils.mkdirhier(mirrordir) | 2088 | bb.utils.mkdirhier(mirrordir) |
1913 | self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/\n' % mirrordir) | 2089 | self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/' % mirrordir) |
1914 | 2090 | ||
1915 | os.rename(os.path.join(self.dldir, mirrortarball), | 2091 | bb.utils.rename(os.path.join(self.dldir, mirrortarball), |
1916 | os.path.join(mirrordir, mirrortarball)) | 2092 | os.path.join(mirrordir, mirrortarball)) |
1917 | 2093 | ||
1918 | # Fetch from the mirror | 2094 | # Fetch from the mirror |
@@ -2033,7 +2209,7 @@ class GitShallowTest(FetcherTest): | |||
2033 | 2209 | ||
2034 | @skipIfNoNetwork() | 2210 | @skipIfNoNetwork() |
2035 | def test_bitbake(self): | 2211 | def test_bitbake(self): |
2036 | self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir) | 2212 | self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir) |
2037 | self.git('config core.bare true', cwd=self.srcdir) | 2213 | self.git('config core.bare true', cwd=self.srcdir) |
2038 | self.git('fetch', cwd=self.srcdir) | 2214 | self.git('fetch', cwd=self.srcdir) |
2039 | 2215 | ||
@@ -2068,7 +2244,7 @@ class GitShallowTest(FetcherTest): | |||
2068 | self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') | 2244 | self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') |
2069 | self.d.setVar('BB_GIT_SHALLOW', '1') | 2245 | self.d.setVar('BB_GIT_SHALLOW', '1') |
2070 | self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') | 2246 | self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') |
2071 | fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests"], self.d) | 2247 | fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d) |
2072 | fetcher.download() | 2248 | fetcher.download() |
2073 | 2249 | ||
2074 | bb.utils.remove(self.dldir + "/*.tar.gz") | 2250 | bb.utils.remove(self.dldir + "/*.tar.gz") |
@@ -2078,12 +2254,18 @@ class GitShallowTest(FetcherTest): | |||
2078 | self.assertIn("fstests.doap", dir) | 2254 | self.assertIn("fstests.doap", dir) |
2079 | 2255 | ||
2080 | class GitLfsTest(FetcherTest): | 2256 | class GitLfsTest(FetcherTest): |
2257 | def skipIfNoGitLFS(): | ||
2258 | import shutil | ||
2259 | if not shutil.which('git-lfs'): | ||
2260 | return unittest.skip('git-lfs not installed') | ||
2261 | return lambda f: f | ||
2262 | |||
2081 | def setUp(self): | 2263 | def setUp(self): |
2082 | FetcherTest.setUp(self) | 2264 | FetcherTest.setUp(self) |
2083 | 2265 | ||
2084 | self.gitdir = os.path.join(self.tempdir, 'git') | 2266 | self.gitdir = os.path.join(self.tempdir, 'git') |
2085 | self.srcdir = os.path.join(self.tempdir, 'gitsource') | 2267 | self.srcdir = os.path.join(self.tempdir, 'gitsource') |
2086 | 2268 | ||
2087 | self.d.setVar('WORKDIR', self.tempdir) | 2269 | self.d.setVar('WORKDIR', self.tempdir) |
2088 | self.d.setVar('S', self.gitdir) | 2270 | self.d.setVar('S', self.gitdir) |
2089 | self.d.delVar('PREMIRRORS') | 2271 | self.d.delVar('PREMIRRORS') |
@@ -2091,22 +2273,18 @@ class GitLfsTest(FetcherTest): | |||
2091 | 2273 | ||
2092 | self.d.setVar('SRCREV', '${AUTOREV}') | 2274 | self.d.setVar('SRCREV', '${AUTOREV}') |
2093 | self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') | 2275 | self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') |
2276 | self.d.setVar("__BBSRCREV_SEEN", "1") | ||
2094 | 2277 | ||
2095 | bb.utils.mkdirhier(self.srcdir) | 2278 | bb.utils.mkdirhier(self.srcdir) |
2096 | self.git('init', cwd=self.srcdir) | 2279 | self.git_init(cwd=self.srcdir) |
2097 | with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs: | 2280 | self.commit_file('.gitattributes', '*.mp3 filter=lfs -text') |
2098 | attrs.write('*.mp3 filter=lfs -text') | ||
2099 | self.git(['add', '.gitattributes'], cwd=self.srcdir) | ||
2100 | self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir) | ||
2101 | 2281 | ||
2102 | def git(self, cmd, cwd=None): | 2282 | def commit_file(self, filename, content): |
2103 | if isinstance(cmd, str): | 2283 | with open(os.path.join(self.srcdir, filename), "w") as f: |
2104 | cmd = 'git ' + cmd | 2284 | f.write(content) |
2105 | else: | 2285 | self.git(["add", filename], cwd=self.srcdir) |
2106 | cmd = ['git'] + cmd | 2286 | self.git(["commit", "-m", "Change"], cwd=self.srcdir) |
2107 | if cwd is None: | 2287 | return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip() |
2108 | cwd = self.gitdir | ||
2109 | return bb.process.run(cmd, cwd=cwd)[0] | ||
2110 | 2288 | ||
2111 | def fetch(self, uri=None, download=True): | 2289 | def fetch(self, uri=None, download=True): |
2112 | uris = self.d.getVar('SRC_URI').split() | 2290 | uris = self.d.getVar('SRC_URI').split() |
@@ -2119,65 +2297,158 @@ class GitLfsTest(FetcherTest): | |||
2119 | ud = fetcher.ud[uri] | 2297 | ud = fetcher.ud[uri] |
2120 | return fetcher, ud | 2298 | return fetcher, ud |
2121 | 2299 | ||
2300 | def get_real_git_lfs_file(self): | ||
2301 | self.d.setVar('PATH', os.environ.get('PATH')) | ||
2302 | fetcher, ud = self.fetch() | ||
2303 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2304 | unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg") | ||
2305 | return unpacked_lfs_file | ||
2306 | |||
2307 | @skipIfNoGitLFS() | ||
2308 | def test_fetch_lfs_on_srcrev_change(self): | ||
2309 | """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested""" | ||
2310 | self.git(["lfs", "install", "--local"], cwd=self.srcdir) | ||
2311 | |||
2312 | @contextlib.contextmanager | ||
2313 | def hide_upstream_repository(): | ||
2314 | """Hide the upstream repository to make sure that git lfs cannot pull from it""" | ||
2315 | temp_name = self.srcdir + ".bak" | ||
2316 | os.rename(self.srcdir, temp_name) | ||
2317 | try: | ||
2318 | yield | ||
2319 | finally: | ||
2320 | os.rename(temp_name, self.srcdir) | ||
2321 | |||
2322 | def fetch_and_verify(revision, filename, content): | ||
2323 | self.d.setVar('SRCREV', revision) | ||
2324 | fetcher, ud = self.fetch() | ||
2325 | |||
2326 | with hide_upstream_repository(): | ||
2327 | workdir = self.d.getVar('WORKDIR') | ||
2328 | fetcher.unpack(workdir) | ||
2329 | |||
2330 | with open(os.path.join(workdir, "git", filename)) as f: | ||
2331 | self.assertEqual(f.read(), content) | ||
2332 | |||
2333 | commit_1 = self.commit_file("a.mp3", "version 1") | ||
2334 | commit_2 = self.commit_file("a.mp3", "version 2") | ||
2335 | |||
2336 | self.d.setVar('SRC_URI', "git://%s;protocol=file;lfs=1;branch=master" % self.srcdir) | ||
2337 | |||
2338 | # Seed the local download folder by fetching the latest commit and verifying that the LFS contents are | ||
2339 | # available even when the upstream repository disappears. | ||
2340 | fetch_and_verify(commit_2, "a.mp3", "version 2") | ||
2341 | # Verify that even when an older revision is fetched, the needed LFS objects are fetched into the download | ||
2342 | # folder. | ||
2343 | fetch_and_verify(commit_1, "a.mp3", "version 1") | ||
2344 | |||
2345 | @skipIfNoGitLFS() | ||
2346 | @skipIfNoNetwork() | ||
2347 | def test_real_git_lfs_repo_succeeds_without_lfs_param(self): | ||
2348 | self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master") | ||
2349 | f = self.get_real_git_lfs_file() | ||
2350 | self.assertTrue(os.path.exists(f)) | ||
2351 | self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f)) | ||
2352 | |||
2353 | @skipIfNoGitLFS() | ||
2354 | @skipIfNoNetwork() | ||
2355 | def test_real_git_lfs_repo_succeeds(self): | ||
2356 | self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1") | ||
2357 | f = self.get_real_git_lfs_file() | ||
2358 | self.assertTrue(os.path.exists(f)) | ||
2359 | self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f)) | ||
2360 | |||
2361 | @skipIfNoGitLFS() | ||
2362 | @skipIfNoNetwork() | ||
2363 | def test_real_git_lfs_repo_skips(self): | ||
2364 | self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0") | ||
2365 | f = self.get_real_git_lfs_file() | ||
2366 | # This is the actual non-smudged placeholder file on the repo if git-lfs does not run | ||
2367 | lfs_file = ( | ||
2368 | 'version https://git-lfs.github.com/spec/v1\n' | ||
2369 | 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n' | ||
2370 | 'size 11423554\n' | ||
2371 | ) | ||
2372 | |||
2373 | with open(f) as fh: | ||
2374 | self.assertEqual(lfs_file, fh.read()) | ||
2375 | |||
2376 | @skipIfNoGitLFS() | ||
2122 | def test_lfs_enabled(self): | 2377 | def test_lfs_enabled(self): |
2123 | import shutil | 2378 | import shutil |
2124 | 2379 | ||
2125 | uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir | 2380 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir |
2126 | self.d.setVar('SRC_URI', uri) | 2381 | self.d.setVar('SRC_URI', uri) |
2127 | 2382 | ||
2128 | # Careful: suppress initial attempt at downloading until | 2383 | # With git-lfs installed, test that we can fetch and unpack |
2129 | # we know whether git-lfs is installed. | 2384 | fetcher, ud = self.fetch() |
2130 | fetcher, ud = self.fetch(uri=None, download=False) | 2385 | shutil.rmtree(self.gitdir, ignore_errors=True) |
2131 | self.assertIsNotNone(ud.method._find_git_lfs) | 2386 | fetcher.unpack(self.d.getVar('WORKDIR')) |
2132 | |||
2133 | # If git-lfs can be found, the unpack should be successful. Only | ||
2134 | # attempt this with the real live copy of git-lfs installed. | ||
2135 | if ud.method._find_git_lfs(self.d): | ||
2136 | fetcher.download() | ||
2137 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2138 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2139 | |||
2140 | # If git-lfs cannot be found, the unpack should throw an error | ||
2141 | with self.assertRaises(bb.fetch2.FetchError): | ||
2142 | fetcher.download() | ||
2143 | ud.method._find_git_lfs = lambda d: False | ||
2144 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2145 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2146 | 2387 | ||
2388 | @skipIfNoGitLFS() | ||
2147 | def test_lfs_disabled(self): | 2389 | def test_lfs_disabled(self): |
2148 | import shutil | 2390 | import shutil |
2149 | 2391 | ||
2150 | uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir | 2392 | uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir |
2151 | self.d.setVar('SRC_URI', uri) | 2393 | self.d.setVar('SRC_URI', uri) |
2152 | 2394 | ||
2153 | # In contrast to test_lfs_enabled(), allow the implicit download | 2395 | # Verify that the fetcher can survive even if the source |
2154 | # done by self.fetch() to occur here. The point of this test case | ||
2155 | # is to verify that the fetcher can survive even if the source | ||
2156 | # repository has Git LFS usage configured. | 2396 | # repository has Git LFS usage configured. |
2157 | fetcher, ud = self.fetch() | 2397 | fetcher, ud = self.fetch() |
2158 | self.assertIsNotNone(ud.method._find_git_lfs) | ||
2159 | |||
2160 | # If git-lfs can be found, the unpack should be successful. A | ||
2161 | # live copy of git-lfs is not required for this case, so | ||
2162 | # unconditionally forge its presence. | ||
2163 | ud.method._find_git_lfs = lambda d: True | ||
2164 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2165 | fetcher.unpack(self.d.getVar('WORKDIR')) | 2398 | fetcher.unpack(self.d.getVar('WORKDIR')) |
2166 | 2399 | ||
2167 | # If git-lfs cannot be found, the unpack should be successful | 2400 | def test_lfs_enabled_not_installed(self): |
2168 | ud.method._find_git_lfs = lambda d: False | 2401 | import shutil |
2169 | shutil.rmtree(self.gitdir, ignore_errors=True) | 2402 | |
2170 | fetcher.unpack(self.d.getVar('WORKDIR')) | 2403 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir |
2404 | self.d.setVar('SRC_URI', uri) | ||
2405 | |||
2406 | # Careful: suppress initial attempt at downloading | ||
2407 | fetcher, ud = self.fetch(uri=None, download=False) | ||
2408 | |||
2409 | # Artificially assert that git-lfs is not installed, so | ||
2410 | # we can verify a failure to unpack in it's absence. | ||
2411 | old_find_git_lfs = ud.method._find_git_lfs | ||
2412 | try: | ||
2413 | # If git-lfs cannot be found, the unpack should throw an error | ||
2414 | with self.assertRaises(bb.fetch2.FetchError): | ||
2415 | fetcher.download() | ||
2416 | ud.method._find_git_lfs = lambda d: False | ||
2417 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2418 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2419 | finally: | ||
2420 | ud.method._find_git_lfs = old_find_git_lfs | ||
2421 | |||
2422 | def test_lfs_disabled_not_installed(self): | ||
2423 | import shutil | ||
2424 | |||
2425 | uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir | ||
2426 | self.d.setVar('SRC_URI', uri) | ||
2427 | |||
2428 | # Careful: suppress initial attempt at downloading | ||
2429 | fetcher, ud = self.fetch(uri=None, download=False) | ||
2430 | |||
2431 | # Artificially assert that git-lfs is not installed, so | ||
2432 | # we can verify a failure to unpack in it's absence. | ||
2433 | old_find_git_lfs = ud.method._find_git_lfs | ||
2434 | try: | ||
2435 | # Even if git-lfs cannot be found, the unpack should be successful | ||
2436 | fetcher.download() | ||
2437 | ud.method._find_git_lfs = lambda d: False | ||
2438 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2439 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2440 | finally: | ||
2441 | ud.method._find_git_lfs = old_find_git_lfs | ||
2171 | 2442 | ||
2172 | class GitURLWithSpacesTest(FetcherTest): | 2443 | class GitURLWithSpacesTest(FetcherTest): |
2173 | test_git_urls = { | 2444 | test_git_urls = { |
2174 | "git://tfs-example.org:22/tfs/example%20path/example.git" : { | 2445 | "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : { |
2175 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git', | 2446 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master', |
2176 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', | 2447 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', |
2177 | 'path': '/tfs/example path/example.git' | 2448 | 'path': '/tfs/example path/example.git' |
2178 | }, | 2449 | }, |
2179 | "git://tfs-example.org:22/tfs/example%20path/example%20repo.git" : { | 2450 | "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : { |
2180 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git', | 2451 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master', |
2181 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', | 2452 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', |
2182 | 'path': '/tfs/example path/example repo.git' | 2453 | 'path': '/tfs/example path/example repo.git' |
2183 | } | 2454 | } |
@@ -2201,11 +2472,129 @@ class GitURLWithSpacesTest(FetcherTest): | |||
2201 | self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) | 2472 | self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) |
2202 | self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) | 2473 | self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) |
2203 | 2474 | ||
2475 | class CrateTest(FetcherTest): | ||
2476 | @skipIfNoNetwork() | ||
2477 | def test_crate_url(self): | ||
2478 | |||
2479 | uri = "crate://crates.io/glob/0.2.11" | ||
2480 | self.d.setVar('SRC_URI', uri) | ||
2481 | |||
2482 | uris = self.d.getVar('SRC_URI').split() | ||
2483 | d = self.d | ||
2484 | |||
2485 | fetcher = bb.fetch2.Fetch(uris, self.d) | ||
2486 | ud = fetcher.ud[fetcher.urls[0]] | ||
2487 | |||
2488 | self.assertIn("name", ud.parm) | ||
2489 | self.assertEqual(ud.parm["name"], "glob-0.2.11") | ||
2490 | self.assertIn("downloadfilename", ud.parm) | ||
2491 | self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate") | ||
2492 | |||
2493 | fetcher.download() | ||
2494 | fetcher.unpack(self.tempdir) | ||
2495 | self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked']) | ||
2496 | self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done']) | ||
2497 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json")) | ||
2498 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs")) | ||
2499 | |||
2500 | @skipIfNoNetwork() | ||
2501 | def test_crate_url_matching_recipe(self): | ||
2502 | |||
2503 | self.d.setVar('BP', 'glob-0.2.11') | ||
2504 | |||
2505 | uri = "crate://crates.io/glob/0.2.11" | ||
2506 | self.d.setVar('SRC_URI', uri) | ||
2507 | |||
2508 | uris = self.d.getVar('SRC_URI').split() | ||
2509 | d = self.d | ||
2510 | |||
2511 | fetcher = bb.fetch2.Fetch(uris, self.d) | ||
2512 | ud = fetcher.ud[fetcher.urls[0]] | ||
2513 | |||
2514 | self.assertIn("name", ud.parm) | ||
2515 | self.assertEqual(ud.parm["name"], "glob-0.2.11") | ||
2516 | self.assertIn("downloadfilename", ud.parm) | ||
2517 | self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate") | ||
2518 | |||
2519 | fetcher.download() | ||
2520 | fetcher.unpack(self.tempdir) | ||
2521 | self.assertEqual(sorted(os.listdir(self.tempdir)), ['download', 'glob-0.2.11', 'unpacked']) | ||
2522 | self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done']) | ||
2523 | self.assertTrue(os.path.exists(self.tempdir + "/glob-0.2.11/src/lib.rs")) | ||
2524 | |||
2525 | @skipIfNoNetwork() | ||
2526 | def test_crate_url_params(self): | ||
2527 | |||
2528 | uri = "crate://crates.io/aho-corasick/0.7.20;name=aho-corasick-renamed" | ||
2529 | self.d.setVar('SRC_URI', uri) | ||
2530 | |||
2531 | uris = self.d.getVar('SRC_URI').split() | ||
2532 | d = self.d | ||
2533 | |||
2534 | fetcher = bb.fetch2.Fetch(uris, self.d) | ||
2535 | ud = fetcher.ud[fetcher.urls[0]] | ||
2536 | |||
2537 | self.assertIn("name", ud.parm) | ||
2538 | self.assertEqual(ud.parm["name"], "aho-corasick-renamed") | ||
2539 | self.assertIn("downloadfilename", ud.parm) | ||
2540 | self.assertEqual(ud.parm["downloadfilename"], "aho-corasick-0.7.20.crate") | ||
2541 | |||
2542 | fetcher.download() | ||
2543 | fetcher.unpack(self.tempdir) | ||
2544 | self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked']) | ||
2545 | self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['aho-corasick-0.7.20.crate', 'aho-corasick-0.7.20.crate.done']) | ||
2546 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/.cargo-checksum.json")) | ||
2547 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/src/lib.rs")) | ||
2548 | |||
2549 | @skipIfNoNetwork() | ||
2550 | def test_crate_url_multi(self): | ||
2551 | |||
2552 | uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35" | ||
2553 | self.d.setVar('SRC_URI', uri) | ||
2554 | |||
2555 | uris = self.d.getVar('SRC_URI').split() | ||
2556 | d = self.d | ||
2557 | |||
2558 | fetcher = bb.fetch2.Fetch(uris, self.d) | ||
2559 | ud = fetcher.ud[fetcher.urls[0]] | ||
2560 | |||
2561 | self.assertIn("name", ud.parm) | ||
2562 | self.assertEqual(ud.parm["name"], "glob-0.2.11") | ||
2563 | self.assertIn("downloadfilename", ud.parm) | ||
2564 | self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate") | ||
2565 | |||
2566 | ud = fetcher.ud[fetcher.urls[1]] | ||
2567 | self.assertIn("name", ud.parm) | ||
2568 | self.assertEqual(ud.parm["name"], "time-0.1.35") | ||
2569 | self.assertIn("downloadfilename", ud.parm) | ||
2570 | self.assertEqual(ud.parm["downloadfilename"], "time-0.1.35.crate") | ||
2571 | |||
2572 | fetcher.download() | ||
2573 | fetcher.unpack(self.tempdir) | ||
2574 | self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked']) | ||
2575 | self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done', 'time-0.1.35.crate', 'time-0.1.35.crate.done']) | ||
2576 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json")) | ||
2577 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs")) | ||
2578 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json")) | ||
2579 | self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs")) | ||
2580 | |||
2581 | @skipIfNoNetwork() | ||
2582 | def test_crate_incorrect_cksum(self): | ||
2583 | uri = "crate://crates.io/aho-corasick/0.7.20" | ||
2584 | self.d.setVar('SRC_URI', uri) | ||
2585 | self.d.setVarFlag("SRC_URI", "aho-corasick-0.7.20.sha256sum", hashlib.sha256("Invalid".encode("utf-8")).hexdigest()) | ||
2586 | |||
2587 | uris = self.d.getVar('SRC_URI').split() | ||
2588 | |||
2589 | fetcher = bb.fetch2.Fetch(uris, self.d) | ||
2590 | with self.assertRaisesRegex(bb.fetch2.FetchError, "Fetcher failure for URL"): | ||
2591 | fetcher.download() | ||
2592 | |||
2204 | class NPMTest(FetcherTest): | 2593 | class NPMTest(FetcherTest): |
2205 | def skipIfNoNpm(): | 2594 | def skipIfNoNpm(): |
2206 | import shutil | 2595 | import shutil |
2207 | if not shutil.which('npm'): | 2596 | if not shutil.which('npm'): |
2208 | return unittest.skip('npm not installed, tests being skipped') | 2597 | return unittest.skip('npm not installed') |
2209 | return lambda f: f | 2598 | return lambda f: f |
2210 | 2599 | ||
2211 | @skipIfNoNpm() | 2600 | @skipIfNoNpm() |
@@ -2250,11 +2639,42 @@ class NPMTest(FetcherTest): | |||
2250 | ud = fetcher.ud[fetcher.urls[0]] | 2639 | ud = fetcher.ud[fetcher.urls[0]] |
2251 | fetcher.download() | 2640 | fetcher.download() |
2252 | self.assertTrue(os.path.exists(ud.localpath)) | 2641 | self.assertTrue(os.path.exists(ud.localpath)) |
2642 | |||
2643 | # Setup the mirror by renaming the download directory | ||
2644 | mirrordir = os.path.join(self.tempdir, 'mirror') | ||
2645 | bb.utils.rename(self.dldir, mirrordir) | ||
2646 | os.mkdir(self.dldir) | ||
2647 | |||
2648 | # Configure the premirror to be used | ||
2649 | self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/npm2' % mirrordir) | ||
2650 | self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') | ||
2651 | |||
2652 | # Fetch again | ||
2653 | self.assertFalse(os.path.exists(ud.localpath)) | ||
2654 | # The npm fetcher doesn't handle that the .resolved file disappears | ||
2655 | # while the fetcher object exists, which it does when we rename the | ||
2656 | # download directory to "mirror" above. Thus we need a new fetcher to go | ||
2657 | # with the now empty download directory. | ||
2658 | fetcher = bb.fetch.Fetch([url], self.d) | ||
2659 | ud = fetcher.ud[fetcher.urls[0]] | ||
2660 | fetcher.download() | ||
2661 | self.assertTrue(os.path.exists(ud.localpath)) | ||
2662 | |||
2663 | @skipIfNoNpm() | ||
2664 | @skipIfNoNetwork() | ||
2665 | def test_npm_premirrors_with_specified_filename(self): | ||
2666 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | ||
2667 | # Fetch once to get a tarball | ||
2668 | fetcher = bb.fetch.Fetch([url], self.d) | ||
2669 | ud = fetcher.ud[fetcher.urls[0]] | ||
2670 | fetcher.download() | ||
2671 | self.assertTrue(os.path.exists(ud.localpath)) | ||
2253 | # Setup the mirror | 2672 | # Setup the mirror |
2254 | mirrordir = os.path.join(self.tempdir, 'mirror') | 2673 | mirrordir = os.path.join(self.tempdir, 'mirror') |
2255 | bb.utils.mkdirhier(mirrordir) | 2674 | bb.utils.mkdirhier(mirrordir) |
2256 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) | 2675 | mirrorfilename = os.path.join(mirrordir, os.path.basename(ud.localpath)) |
2257 | self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) | 2676 | os.replace(ud.localpath, mirrorfilename) |
2677 | self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s' % mirrorfilename) | ||
2258 | self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') | 2678 | self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') |
2259 | # Fetch again | 2679 | # Fetch again |
2260 | self.assertFalse(os.path.exists(ud.localpath)) | 2680 | self.assertFalse(os.path.exists(ud.localpath)) |
@@ -2274,7 +2694,7 @@ class NPMTest(FetcherTest): | |||
2274 | mirrordir = os.path.join(self.tempdir, 'mirror') | 2694 | mirrordir = os.path.join(self.tempdir, 'mirror') |
2275 | bb.utils.mkdirhier(mirrordir) | 2695 | bb.utils.mkdirhier(mirrordir) |
2276 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) | 2696 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) |
2277 | self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) | 2697 | self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir) |
2278 | # Update the resolved url to an invalid url | 2698 | # Update the resolved url to an invalid url |
2279 | with open(ud.resolvefile, 'r') as f: | 2699 | with open(ud.resolvefile, 'r') as f: |
2280 | url = f.read() | 2700 | url = f.read() |
@@ -2293,7 +2713,7 @@ class NPMTest(FetcherTest): | |||
2293 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' | 2713 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' |
2294 | fetcher = bb.fetch.Fetch([url], self.d) | 2714 | fetcher = bb.fetch.Fetch([url], self.d) |
2295 | fetcher.download() | 2715 | fetcher.download() |
2296 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'foo-bar.tgz'))) | 2716 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz'))) |
2297 | fetcher.unpack(self.unpackdir) | 2717 | fetcher.unpack(self.unpackdir) |
2298 | unpackdir = os.path.join(self.unpackdir, 'foo', 'bar') | 2718 | unpackdir = os.path.join(self.unpackdir, 'foo', 'bar') |
2299 | self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) | 2719 | self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) |
@@ -2323,7 +2743,7 @@ class NPMTest(FetcherTest): | |||
2323 | @skipIfNoNpm() | 2743 | @skipIfNoNpm() |
2324 | @skipIfNoNetwork() | 2744 | @skipIfNoNetwork() |
2325 | def test_npm_registry_alternate(self): | 2745 | def test_npm_registry_alternate(self): |
2326 | url = 'npm://registry.freajs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2746 | url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0' |
2327 | fetcher = bb.fetch.Fetch([url], self.d) | 2747 | fetcher = bb.fetch.Fetch([url], self.d) |
2328 | fetcher.download() | 2748 | fetcher.download() |
2329 | fetcher.unpack(self.unpackdir) | 2749 | fetcher.unpack(self.unpackdir) |
@@ -2433,6 +2853,45 @@ class NPMTest(FetcherTest): | |||
2433 | 2853 | ||
2434 | @skipIfNoNpm() | 2854 | @skipIfNoNpm() |
2435 | @skipIfNoNetwork() | 2855 | @skipIfNoNetwork() |
2856 | def test_npmsw_git(self): | ||
2857 | swfile = self.create_shrinkwrap_file({ | ||
2858 | 'dependencies': { | ||
2859 | 'cookie': { | ||
2860 | 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', | ||
2861 | 'from': 'github:jshttp/cookie.git' | ||
2862 | } | ||
2863 | } | ||
2864 | }) | ||
2865 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | ||
2866 | fetcher.download() | ||
2867 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) | ||
2868 | |||
2869 | swfile = self.create_shrinkwrap_file({ | ||
2870 | 'dependencies': { | ||
2871 | 'cookie': { | ||
2872 | 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', | ||
2873 | 'from': 'jshttp/cookie.git' | ||
2874 | } | ||
2875 | } | ||
2876 | }) | ||
2877 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | ||
2878 | fetcher.download() | ||
2879 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) | ||
2880 | |||
2881 | swfile = self.create_shrinkwrap_file({ | ||
2882 | 'dependencies': { | ||
2883 | 'nodejs': { | ||
2884 | 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262', | ||
2885 | 'from': 'gitlab:gitlab-examples/nodejs' | ||
2886 | } | ||
2887 | } | ||
2888 | }) | ||
2889 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | ||
2890 | fetcher.download() | ||
2891 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git'))) | ||
2892 | |||
2893 | @skipIfNoNpm() | ||
2894 | @skipIfNoNetwork() | ||
2436 | def test_npmsw_dev(self): | 2895 | def test_npmsw_dev(self): |
2437 | swfile = self.create_shrinkwrap_file({ | 2896 | swfile = self.create_shrinkwrap_file({ |
2438 | 'dependencies': { | 2897 | 'dependencies': { |
@@ -2590,7 +3049,7 @@ class NPMTest(FetcherTest): | |||
2590 | mirrordir = os.path.join(self.tempdir, 'mirror') | 3049 | mirrordir = os.path.join(self.tempdir, 'mirror') |
2591 | bb.utils.mkdirhier(mirrordir) | 3050 | bb.utils.mkdirhier(mirrordir) |
2592 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) | 3051 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) |
2593 | self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) | 3052 | self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/' % mirrordir) |
2594 | self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') | 3053 | self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') |
2595 | # Fetch again | 3054 | # Fetch again |
2596 | self.assertFalse(os.path.exists(ud.localpath)) | 3055 | self.assertFalse(os.path.exists(ud.localpath)) |
@@ -2619,7 +3078,7 @@ class NPMTest(FetcherTest): | |||
2619 | mirrordir = os.path.join(self.tempdir, 'mirror') | 3078 | mirrordir = os.path.join(self.tempdir, 'mirror') |
2620 | bb.utils.mkdirhier(mirrordir) | 3079 | bb.utils.mkdirhier(mirrordir) |
2621 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) | 3080 | os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) |
2622 | self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) | 3081 | self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir) |
2623 | # Fetch again with invalid url | 3082 | # Fetch again with invalid url |
2624 | self.assertFalse(os.path.exists(ud.localpath)) | 3083 | self.assertFalse(os.path.exists(ud.localpath)) |
2625 | swfile = self.create_shrinkwrap_file({ | 3084 | swfile = self.create_shrinkwrap_file({ |
@@ -2634,3 +3093,273 @@ class NPMTest(FetcherTest): | |||
2634 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | 3093 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) |
2635 | fetcher.download() | 3094 | fetcher.download() |
2636 | self.assertTrue(os.path.exists(ud.localpath)) | 3095 | self.assertTrue(os.path.exists(ud.localpath)) |
3096 | |||
3097 | class GitSharedTest(FetcherTest): | ||
3098 | def setUp(self): | ||
3099 | super(GitSharedTest, self).setUp() | ||
3100 | self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https" | ||
3101 | self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') | ||
3102 | self.d.setVar("__BBSRCREV_SEEN", "1") | ||
3103 | |||
3104 | @skipIfNoNetwork() | ||
3105 | def test_shared_unpack(self): | ||
3106 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3107 | |||
3108 | fetcher.download() | ||
3109 | fetcher.unpack(self.unpackdir) | ||
3110 | alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') | ||
3111 | self.assertTrue(os.path.exists(alt)) | ||
3112 | |||
3113 | @skipIfNoNetwork() | ||
3114 | def test_noshared_unpack(self): | ||
3115 | self.d.setVar('BB_GIT_NOSHARED', '1') | ||
3116 | self.unpackdir += '_noshared' | ||
3117 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3118 | |||
3119 | fetcher.download() | ||
3120 | fetcher.unpack(self.unpackdir) | ||
3121 | alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') | ||
3122 | self.assertFalse(os.path.exists(alt)) | ||
3123 | |||
3124 | |||
3125 | class FetchPremirroronlyLocalTest(FetcherTest): | ||
3126 | |||
3127 | def setUp(self): | ||
3128 | super(FetchPremirroronlyLocalTest, self).setUp() | ||
3129 | self.mirrordir = os.path.join(self.tempdir, "mirrors") | ||
3130 | os.mkdir(self.mirrordir) | ||
3131 | self.reponame = "bitbake" | ||
3132 | self.gitdir = os.path.join(self.tempdir, "git", self.reponame) | ||
3133 | self.recipe_url = "git://git.fake.repo/bitbake;branch=master;protocol=https" | ||
3134 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") | ||
3135 | self.d.setVar("BB_NO_NETWORK", "1") | ||
3136 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") | ||
3137 | self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz" | ||
3138 | self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname) | ||
3139 | self.testfilename = "bitbake-fetch.test" | ||
3140 | |||
3141 | def make_git_repo(self): | ||
3142 | recipeurl = "git:/git.fake.repo/bitbake" | ||
3143 | os.makedirs(self.gitdir) | ||
3144 | self.git_init(cwd=self.gitdir) | ||
3145 | for i in range(0): | ||
3146 | self.git_new_commit() | ||
3147 | bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) | ||
3148 | |||
3149 | def git_new_commit(self): | ||
3150 | import random | ||
3151 | os.unlink(os.path.join(self.mirrordir, self.mirrorname)) | ||
3152 | branch = self.git("branch --show-current", self.gitdir).split() | ||
3153 | with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile: | ||
3154 | testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random())) | ||
3155 | self.git("add {}".format(self.testfilename), self.gitdir) | ||
3156 | self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir) | ||
3157 | bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) | ||
3158 | return self.git("rev-parse HEAD", self.gitdir).strip() | ||
3159 | |||
3160 | def git_new_branch(self, name): | ||
3161 | self.git_new_commit() | ||
3162 | head = self.git("rev-parse HEAD", self.gitdir).strip() | ||
3163 | self.git("checkout -b {}".format(name), self.gitdir) | ||
3164 | newrev = self.git_new_commit() | ||
3165 | self.git("checkout {}".format(head), self.gitdir) | ||
3166 | return newrev | ||
3167 | |||
3168 | def test_mirror_multiple_fetches(self): | ||
3169 | self.make_git_repo() | ||
3170 | self.d.setVar("SRCREV", self.git_new_commit()) | ||
3171 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3172 | fetcher.download() | ||
3173 | fetcher.unpack(self.unpackdir) | ||
3174 | ## New commit in premirror. it's not in the download_dir | ||
3175 | self.d.setVar("SRCREV", self.git_new_commit()) | ||
3176 | fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3177 | fetcher2.download() | ||
3178 | fetcher2.unpack(self.unpackdir) | ||
3179 | ## New commit in premirror. it's not in the download_dir | ||
3180 | self.d.setVar("SRCREV", self.git_new_commit()) | ||
3181 | fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3182 | fetcher3.download() | ||
3183 | fetcher3.unpack(self.unpackdir) | ||
3184 | |||
3185 | |||
3186 | def test_mirror_commit_nonexistent(self): | ||
3187 | self.make_git_repo() | ||
3188 | self.d.setVar("SRCREV", "0"*40) | ||
3189 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3190 | with self.assertRaises(bb.fetch2.NetworkAccess): | ||
3191 | fetcher.download() | ||
3192 | |||
3193 | def test_mirror_commit_exists(self): | ||
3194 | self.make_git_repo() | ||
3195 | self.d.setVar("SRCREV", self.git_new_commit()) | ||
3196 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3197 | fetcher.download() | ||
3198 | fetcher.unpack(self.unpackdir) | ||
3199 | |||
3200 | def test_mirror_tarball_nonexistent(self): | ||
3201 | self.d.setVar("SRCREV", "0"*40) | ||
3202 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3203 | with self.assertRaises(bb.fetch2.NetworkAccess): | ||
3204 | fetcher.download() | ||
3205 | |||
3206 | def test_mirror_tarball_multiple_branches(self): | ||
3207 | """ | ||
3208 | test if PREMIRRORS can handle multiple name/branches correctly | ||
3209 | both branches have required revisions | ||
3210 | """ | ||
3211 | self.make_git_repo() | ||
3212 | branch1rev = self.git_new_branch("testbranch1") | ||
3213 | branch2rev = self.git_new_branch("testbranch2") | ||
3214 | self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2" | ||
3215 | self.d.setVar("SRCREV_branch1", branch1rev) | ||
3216 | self.d.setVar("SRCREV_branch2", branch2rev) | ||
3217 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3218 | self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist") | ||
3219 | fetcher.download() | ||
3220 | fetcher.unpack(os.path.join(self.tempdir, "unpacked")) | ||
3221 | unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename) | ||
3222 | self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!") | ||
3223 | with open(unpacked, 'r') as f: | ||
3224 | content = f.read() | ||
3225 | ## We expect to see testbranch1 in the file, not master, not testbranch2 | ||
3226 | self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!") | ||
3227 | |||
3228 | def test_mirror_tarball_multiple_branches_nobranch(self): | ||
3229 | """ | ||
3230 | test if PREMIRRORS can handle multiple name/branches correctly | ||
3231 | Unbalanced name/branches raises ParameterError | ||
3232 | """ | ||
3233 | self.make_git_repo() | ||
3234 | branch1rev = self.git_new_branch("testbranch1") | ||
3235 | branch2rev = self.git_new_branch("testbranch2") | ||
3236 | self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2" | ||
3237 | self.d.setVar("SRCREV_branch1", branch1rev) | ||
3238 | self.d.setVar("SRCREV_branch2", branch2rev) | ||
3239 | with self.assertRaises(bb.fetch2.ParameterError): | ||
3240 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3241 | |||
3242 | def test_mirror_tarball_multiple_branches_norev(self): | ||
3243 | """ | ||
3244 | test if PREMIRRORS can handle multiple name/branches correctly | ||
3245 | one of the branches specifies non existing SRCREV | ||
3246 | """ | ||
3247 | self.make_git_repo() | ||
3248 | branch1rev = self.git_new_branch("testbranch1") | ||
3249 | branch2rev = self.git_new_branch("testbranch2") | ||
3250 | self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2" | ||
3251 | self.d.setVar("SRCREV_branch1", branch1rev) | ||
3252 | self.d.setVar("SRCREV_branch2", "0"*40) | ||
3253 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3254 | self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist") | ||
3255 | with self.assertRaises(bb.fetch2.NetworkAccess): | ||
3256 | fetcher.download() | ||
3257 | |||
3258 | |||
3259 | class FetchPremirroronlyNetworkTest(FetcherTest): | ||
3260 | |||
3261 | def setUp(self): | ||
3262 | super(FetchPremirroronlyNetworkTest, self).setUp() | ||
3263 | self.mirrordir = os.path.join(self.tempdir, "mirrors") | ||
3264 | os.mkdir(self.mirrordir) | ||
3265 | self.reponame = "fstests" | ||
3266 | self.clonedir = os.path.join(self.tempdir, "git") | ||
3267 | self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame)) | ||
3268 | self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https" | ||
3269 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") | ||
3270 | self.d.setVar("BB_NO_NETWORK", "0") | ||
3271 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") | ||
3272 | |||
3273 | def make_git_repo(self): | ||
3274 | import shutil | ||
3275 | self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz" | ||
3276 | os.makedirs(self.clonedir) | ||
3277 | self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir) | ||
3278 | bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) | ||
3279 | shutil.rmtree(self.clonedir) | ||
3280 | |||
3281 | @skipIfNoNetwork() | ||
3282 | def test_mirror_tarball_updated(self): | ||
3283 | self.make_git_repo() | ||
3284 | ## Upstream commit is in the mirror | ||
3285 | self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec") | ||
3286 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3287 | fetcher.download() | ||
3288 | |||
3289 | @skipIfNoNetwork() | ||
3290 | def test_mirror_tarball_outdated(self): | ||
3291 | self.make_git_repo() | ||
3292 | ## Upstream commit not in the mirror | ||
3293 | self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f") | ||
3294 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3295 | with self.assertRaises(bb.fetch2.NetworkAccess): | ||
3296 | fetcher.download() | ||
3297 | |||
3298 | class FetchPremirroronlyMercurialTest(FetcherTest): | ||
3299 | """ Test for premirrors with mercurial repos | ||
3300 | the test covers also basic hg:// clone (see fetch_and_create_tarball | ||
3301 | """ | ||
3302 | def skipIfNoHg(): | ||
3303 | import shutil | ||
3304 | if not shutil.which('hg'): | ||
3305 | return unittest.skip('Mercurial not installed') | ||
3306 | return lambda f: f | ||
3307 | |||
3308 | def setUp(self): | ||
3309 | super(FetchPremirroronlyMercurialTest, self).setUp() | ||
3310 | self.mirrordir = os.path.join(self.tempdir, "mirrors") | ||
3311 | os.mkdir(self.mirrordir) | ||
3312 | self.reponame = "libgnt" | ||
3313 | self.clonedir = os.path.join(self.tempdir, "hg") | ||
3314 | self.recipe_url = "hg://keep.imfreedom.org/libgnt;module=libgnt" | ||
3315 | self.d.setVar("SRCREV", "53e8b422faaf") | ||
3316 | self.mirrorname = "hg_libgnt_keep.imfreedom.org_.libgnt.tar.gz" | ||
3317 | |||
3318 | def fetch_and_create_tarball(self): | ||
3319 | """ | ||
3320 | Ask bitbake to download repo and prepare mirror tarball for us | ||
3321 | """ | ||
3322 | self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1") | ||
3323 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3324 | fetcher.download() | ||
3325 | mirrorfile = os.path.join(self.d.getVar("DL_DIR"), self.mirrorname) | ||
3326 | self.assertTrue(os.path.exists(mirrorfile), "Mirror tarball {} has not been created".format(mirrorfile)) | ||
3327 | ## moving tarball to mirror directory | ||
3328 | os.rename(mirrorfile, os.path.join(self.mirrordir, self.mirrorname)) | ||
3329 | self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "0") | ||
3330 | |||
3331 | |||
3332 | @skipIfNoNetwork() | ||
3333 | @skipIfNoHg() | ||
3334 | def test_premirror_mercurial(self): | ||
3335 | self.fetch_and_create_tarball() | ||
3336 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") | ||
3337 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") | ||
3338 | self.d.setVar("BB_NO_NETWORK", "1") | ||
3339 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3340 | fetcher.download() | ||
3341 | |||
3342 | class FetchPremirroronlyBrokenTarball(FetcherTest): | ||
3343 | |||
3344 | def setUp(self): | ||
3345 | super(FetchPremirroronlyBrokenTarball, self).setUp() | ||
3346 | self.mirrordir = os.path.join(self.tempdir, "mirrors") | ||
3347 | os.mkdir(self.mirrordir) | ||
3348 | self.reponame = "bitbake" | ||
3349 | self.gitdir = os.path.join(self.tempdir, "git", self.reponame) | ||
3350 | self.recipe_url = "git://git.fake.repo/bitbake;protocol=https" | ||
3351 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") | ||
3352 | self.d.setVar("BB_NO_NETWORK", "1") | ||
3353 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") | ||
3354 | self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz" | ||
3355 | with open(os.path.join(self.mirrordir, self.mirrorname), 'w') as targz: | ||
3356 | targz.write("This is not tar.gz file!") | ||
3357 | |||
3358 | def test_mirror_broken_download(self): | ||
3359 | import sys | ||
3360 | self.d.setVar("SRCREV", "0"*40) | ||
3361 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3362 | with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs: | ||
3363 | fetcher.download() | ||
3364 | output = "".join(logs.output) | ||
3365 | self.assertFalse(" not a git repository (or any parent up to mount point /)" in output) | ||
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py index 9e21e18425..72d1962e7e 100644 --- a/bitbake/lib/bb/tests/parse.py +++ b/bitbake/lib/bb/tests/parse.py | |||
@@ -98,8 +98,8 @@ exportD = "d" | |||
98 | 98 | ||
99 | 99 | ||
100 | overridetest = """ | 100 | overridetest = """ |
101 | RRECOMMENDS_${PN} = "a" | 101 | RRECOMMENDS:${PN} = "a" |
102 | RRECOMMENDS_${PN}_libc = "b" | 102 | RRECOMMENDS:${PN}:libc = "b" |
103 | OVERRIDES = "libc:${PN}" | 103 | OVERRIDES = "libc:${PN}" |
104 | PN = "gtk+" | 104 | PN = "gtk+" |
105 | """ | 105 | """ |
@@ -110,16 +110,16 @@ PN = "gtk+" | |||
110 | self.assertEqual(d.getVar("RRECOMMENDS"), "b") | 110 | self.assertEqual(d.getVar("RRECOMMENDS"), "b") |
111 | bb.data.expandKeys(d) | 111 | bb.data.expandKeys(d) |
112 | self.assertEqual(d.getVar("RRECOMMENDS"), "b") | 112 | self.assertEqual(d.getVar("RRECOMMENDS"), "b") |
113 | d.setVar("RRECOMMENDS_gtk+", "c") | 113 | d.setVar("RRECOMMENDS:gtk+", "c") |
114 | self.assertEqual(d.getVar("RRECOMMENDS"), "c") | 114 | self.assertEqual(d.getVar("RRECOMMENDS"), "c") |
115 | 115 | ||
116 | overridetest2 = """ | 116 | overridetest2 = """ |
117 | EXTRA_OECONF = "" | 117 | EXTRA_OECONF = "" |
118 | EXTRA_OECONF_class-target = "b" | 118 | EXTRA_OECONF:class-target = "b" |
119 | EXTRA_OECONF_append = " c" | 119 | EXTRA_OECONF:append = " c" |
120 | """ | 120 | """ |
121 | 121 | ||
122 | def test_parse_overrides(self): | 122 | def test_parse_overrides2(self): |
123 | f = self.parsehelper(self.overridetest2) | 123 | f = self.parsehelper(self.overridetest2) |
124 | d = bb.parse.handle(f.name, self.d)[''] | 124 | d = bb.parse.handle(f.name, self.d)[''] |
125 | d.appendVar("EXTRA_OECONF", " d") | 125 | d.appendVar("EXTRA_OECONF", " d") |
@@ -128,7 +128,7 @@ EXTRA_OECONF_append = " c" | |||
128 | 128 | ||
129 | overridetest3 = """ | 129 | overridetest3 = """ |
130 | DESCRIPTION = "A" | 130 | DESCRIPTION = "A" |
131 | DESCRIPTION_${PN}-dev = "${DESCRIPTION} B" | 131 | DESCRIPTION:${PN}-dev = "${DESCRIPTION} B" |
132 | PN = "bc" | 132 | PN = "bc" |
133 | """ | 133 | """ |
134 | 134 | ||
@@ -136,15 +136,15 @@ PN = "bc" | |||
136 | f = self.parsehelper(self.overridetest3) | 136 | f = self.parsehelper(self.overridetest3) |
137 | d = bb.parse.handle(f.name, self.d)[''] | 137 | d = bb.parse.handle(f.name, self.d)[''] |
138 | bb.data.expandKeys(d) | 138 | bb.data.expandKeys(d) |
139 | self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B") | 139 | self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B") |
140 | d.setVar("DESCRIPTION", "E") | 140 | d.setVar("DESCRIPTION", "E") |
141 | d.setVar("DESCRIPTION_bc-dev", "C D") | 141 | d.setVar("DESCRIPTION:bc-dev", "C D") |
142 | d.setVar("OVERRIDES", "bc-dev") | 142 | d.setVar("OVERRIDES", "bc-dev") |
143 | self.assertEqual(d.getVar("DESCRIPTION"), "C D") | 143 | self.assertEqual(d.getVar("DESCRIPTION"), "C D") |
144 | 144 | ||
145 | 145 | ||
146 | classextend = """ | 146 | classextend = """ |
147 | VAR_var_override1 = "B" | 147 | VAR_var:override1 = "B" |
148 | EXTRA = ":override1" | 148 | EXTRA = ":override1" |
149 | OVERRIDES = "nothing${EXTRA}" | 149 | OVERRIDES = "nothing${EXTRA}" |
150 | 150 | ||
@@ -164,6 +164,7 @@ python () { | |||
164 | # become unset/disappear. | 164 | # become unset/disappear. |
165 | # | 165 | # |
166 | def test_parse_classextend_contamination(self): | 166 | def test_parse_classextend_contamination(self): |
167 | self.d.setVar("__bbclasstype", "recipe") | ||
167 | cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass") | 168 | cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass") |
168 | #clsname = os.path.basename(cls.name).replace(".bbclass", "") | 169 | #clsname = os.path.basename(cls.name).replace(".bbclass", "") |
169 | self.classextend = self.classextend.replace("###CLASS###", cls.name) | 170 | self.classextend = self.classextend.replace("###CLASS###", cls.name) |
@@ -185,12 +186,158 @@ deltask ${EMPTYVAR} | |||
185 | """ | 186 | """ |
186 | def test_parse_addtask_deltask(self): | 187 | def test_parse_addtask_deltask(self): |
187 | import sys | 188 | import sys |
188 | f = self.parsehelper(self.addtask_deltask) | 189 | |
190 | with self.assertLogs() as logs: | ||
191 | f = self.parsehelper(self.addtask_deltask) | ||
192 | d = bb.parse.handle(f.name, self.d)[''] | ||
193 | |||
194 | output = "".join(logs.output) | ||
195 | self.assertTrue("addtask contained multiple 'before' keywords" in output) | ||
196 | self.assertTrue("addtask contained multiple 'after' keywords" in output) | ||
197 | self.assertTrue('addtask ignored: " do_patch"' in output) | ||
198 | #self.assertTrue('dependent task do_foo for do_patch does not exist' in output) | ||
199 | |||
200 | broken_multiline_comment = """ | ||
201 | # First line of comment \\ | ||
202 | # Second line of comment \\ | ||
203 | |||
204 | """ | ||
205 | def test_parse_broken_multiline_comment(self): | ||
206 | f = self.parsehelper(self.broken_multiline_comment) | ||
207 | with self.assertRaises(bb.BBHandledException): | ||
208 | d = bb.parse.handle(f.name, self.d)[''] | ||
209 | |||
210 | |||
211 | comment_in_var = """ | ||
212 | VAR = " \\ | ||
213 | SOMEVAL \\ | ||
214 | # some comment \\ | ||
215 | SOMEOTHERVAL \\ | ||
216 | " | ||
217 | """ | ||
218 | def test_parse_comment_in_var(self): | ||
219 | f = self.parsehelper(self.comment_in_var) | ||
220 | with self.assertRaises(bb.BBHandledException): | ||
221 | d = bb.parse.handle(f.name, self.d)[''] | ||
222 | |||
223 | |||
224 | at_sign_in_var_flag = """ | ||
225 | A[flag@.service] = "nonet" | ||
226 | B[flag@.target] = "ntb" | ||
227 | C[f] = "flag" | ||
228 | |||
229 | unset A[flag@.service] | ||
230 | """ | ||
231 | def test_parse_at_sign_in_var_flag(self): | ||
232 | f = self.parsehelper(self.at_sign_in_var_flag) | ||
189 | d = bb.parse.handle(f.name, self.d)[''] | 233 | d = bb.parse.handle(f.name, self.d)[''] |
234 | self.assertEqual(d.getVar("A"), None) | ||
235 | self.assertEqual(d.getVar("B"), None) | ||
236 | self.assertEqual(d.getVarFlag("A","flag@.service"), None) | ||
237 | self.assertEqual(d.getVarFlag("B","flag@.target"), "ntb") | ||
238 | self.assertEqual(d.getVarFlag("C","f"), "flag") | ||
239 | |||
240 | def test_parse_invalid_at_sign_in_var_flag(self): | ||
241 | invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f") | ||
242 | f = self.parsehelper(invalid_at_sign) | ||
243 | with self.assertRaises(bb.parse.ParseError): | ||
244 | d = bb.parse.handle(f.name, self.d)[''] | ||
245 | |||
246 | export_function_recipe = """ | ||
247 | inherit someclass | ||
248 | """ | ||
249 | |||
250 | export_function_recipe2 = """ | ||
251 | inherit someclass | ||
252 | |||
253 | do_compile () { | ||
254 | false | ||
255 | } | ||
256 | |||
257 | python do_compilepython () { | ||
258 | bb.note("Something else") | ||
259 | } | ||
260 | |||
261 | """ | ||
262 | export_function_class = """ | ||
263 | someclass_do_compile() { | ||
264 | true | ||
265 | } | ||
266 | |||
267 | python someclass_do_compilepython () { | ||
268 | bb.note("Something") | ||
269 | } | ||
270 | |||
271 | EXPORT_FUNCTIONS do_compile do_compilepython | ||
272 | """ | ||
273 | |||
274 | export_function_class2 = """ | ||
275 | secondclass_do_compile() { | ||
276 | true | ||
277 | } | ||
278 | |||
279 | python secondclass_do_compilepython () { | ||
280 | bb.note("Something") | ||
281 | } | ||
282 | |||
283 | EXPORT_FUNCTIONS do_compile do_compilepython | ||
284 | """ | ||
190 | 285 | ||
191 | stdout = sys.stdout.getvalue() | 286 | def test_parse_export_functions(self): |
192 | self.assertTrue("addtask contained multiple 'before' keywords" in stdout) | 287 | def check_function_flags(d): |
193 | self.assertTrue("addtask contained multiple 'after' keywords" in stdout) | 288 | self.assertEqual(d.getVarFlag("do_compile", "func"), 1) |
194 | self.assertTrue('addtask ignored: " do_patch"' in stdout) | 289 | self.assertEqual(d.getVarFlag("do_compilepython", "func"), 1) |
195 | #self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout) | 290 | self.assertEqual(d.getVarFlag("do_compile", "python"), None) |
291 | self.assertEqual(d.getVarFlag("do_compilepython", "python"), "1") | ||
292 | |||
293 | with tempfile.TemporaryDirectory() as tempdir: | ||
294 | self.d.setVar("__bbclasstype", "recipe") | ||
295 | recipename = tempdir + "/recipe.bb" | ||
296 | os.makedirs(tempdir + "/classes") | ||
297 | with open(tempdir + "/classes/someclass.bbclass", "w") as f: | ||
298 | f.write(self.export_function_class) | ||
299 | f.flush() | ||
300 | with open(tempdir + "/classes/secondclass.bbclass", "w") as f: | ||
301 | f.write(self.export_function_class2) | ||
302 | f.flush() | ||
303 | |||
304 | with open(recipename, "w") as f: | ||
305 | f.write(self.export_function_recipe) | ||
306 | f.flush() | ||
307 | os.chdir(tempdir) | ||
308 | d = bb.parse.handle(recipename, bb.data.createCopy(self.d))[''] | ||
309 | self.assertIn("someclass_do_compile", d.getVar("do_compile")) | ||
310 | self.assertIn("someclass_do_compilepython", d.getVar("do_compilepython")) | ||
311 | check_function_flags(d) | ||
312 | |||
313 | recipename2 = tempdir + "/recipe2.bb" | ||
314 | with open(recipename2, "w") as f: | ||
315 | f.write(self.export_function_recipe2) | ||
316 | f.flush() | ||
317 | |||
318 | d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))[''] | ||
319 | self.assertNotIn("someclass_do_compile", d.getVar("do_compile")) | ||
320 | self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython")) | ||
321 | self.assertIn("false", d.getVar("do_compile")) | ||
322 | self.assertIn("else", d.getVar("do_compilepython")) | ||
323 | check_function_flags(d) | ||
324 | |||
325 | with open(recipename, "a+") as f: | ||
326 | f.write("\ninherit secondclass\n") | ||
327 | f.flush() | ||
328 | with open(recipename2, "a+") as f: | ||
329 | f.write("\ninherit secondclass\n") | ||
330 | f.flush() | ||
331 | |||
332 | d = bb.parse.handle(recipename, bb.data.createCopy(self.d))[''] | ||
333 | self.assertIn("secondclass_do_compile", d.getVar("do_compile")) | ||
334 | self.assertIn("secondclass_do_compilepython", d.getVar("do_compilepython")) | ||
335 | check_function_flags(d) | ||
336 | |||
337 | d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))[''] | ||
338 | self.assertNotIn("someclass_do_compile", d.getVar("do_compile")) | ||
339 | self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython")) | ||
340 | self.assertIn("false", d.getVar("do_compile")) | ||
341 | self.assertIn("else", d.getVar("do_compilepython")) | ||
342 | check_function_flags(d) | ||
196 | 343 | ||
diff --git a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf index efebf001a9..05d7fd07dd 100644 --- a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf +++ b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf | |||
@@ -12,6 +12,6 @@ STAMP = "${TMPDIR}/stamps/${PN}" | |||
12 | T = "${TMPDIR}/workdir/${PN}/temp" | 12 | T = "${TMPDIR}/workdir/${PN}/temp" |
13 | BB_NUMBER_THREADS = "4" | 13 | BB_NUMBER_THREADS = "4" |
14 | 14 | ||
15 | BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE" | 15 | BB_BASEHASH_IGNORE_VARS = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK" |
16 | 16 | ||
17 | include conf/multiconfig/${BB_CURRENT_MC}.conf | 17 | include conf/multiconfig/${BB_CURRENT_MC}.conf |
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py index 3d51779d6c..cc87e8d6a8 100644 --- a/bitbake/lib/bb/tests/runqueue.py +++ b/bitbake/lib/bb/tests/runqueue.py | |||
@@ -29,13 +29,14 @@ class RunQueueTests(unittest.TestCase): | |||
29 | def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): | 29 | def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): |
30 | env = os.environ.copy() | 30 | env = os.environ.copy() |
31 | env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) | 31 | env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) |
32 | env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS" | 32 | env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR" |
33 | env["SSTATEVALID"] = sstatevalid | 33 | env["SSTATEVALID"] = sstatevalid |
34 | env["SLOWTASKS"] = slowtasks | 34 | env["SLOWTASKS"] = slowtasks |
35 | env["TOPDIR"] = builddir | ||
35 | if extraenv: | 36 | if extraenv: |
36 | for k in extraenv: | 37 | for k in extraenv: |
37 | env[k] = extraenv[k] | 38 | env[k] = extraenv[k] |
38 | env["BB_ENV_EXTRAWHITE"] = env["BB_ENV_EXTRAWHITE"] + " " + k | 39 | env["BB_ENV_PASSTHROUGH_ADDITIONS"] = env["BB_ENV_PASSTHROUGH_ADDITIONS"] + " " + k |
39 | try: | 40 | try: |
40 | output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) | 41 | output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) |
41 | print(output) | 42 | print(output) |
@@ -58,6 +59,8 @@ class RunQueueTests(unittest.TestCase): | |||
58 | expected = ['a1:' + x for x in self.alltasks] | 59 | expected = ['a1:' + x for x in self.alltasks] |
59 | self.assertEqual(set(tasks), set(expected)) | 60 | self.assertEqual(set(tasks), set(expected)) |
60 | 61 | ||
62 | self.shutdown(tempdir) | ||
63 | |||
61 | def test_single_setscenevalid(self): | 64 | def test_single_setscenevalid(self): |
62 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 65 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
63 | cmd = ["bitbake", "a1"] | 66 | cmd = ["bitbake", "a1"] |
@@ -68,6 +71,8 @@ class RunQueueTests(unittest.TestCase): | |||
68 | 'a1:populate_sysroot', 'a1:build'] | 71 | 'a1:populate_sysroot', 'a1:build'] |
69 | self.assertEqual(set(tasks), set(expected)) | 72 | self.assertEqual(set(tasks), set(expected)) |
70 | 73 | ||
74 | self.shutdown(tempdir) | ||
75 | |||
71 | def test_intermediate_setscenevalid(self): | 76 | def test_intermediate_setscenevalid(self): |
72 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 77 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
73 | cmd = ["bitbake", "a1"] | 78 | cmd = ["bitbake", "a1"] |
@@ -77,6 +82,8 @@ class RunQueueTests(unittest.TestCase): | |||
77 | 'a1:populate_sysroot_setscene', 'a1:build'] | 82 | 'a1:populate_sysroot_setscene', 'a1:build'] |
78 | self.assertEqual(set(tasks), set(expected)) | 83 | self.assertEqual(set(tasks), set(expected)) |
79 | 84 | ||
85 | self.shutdown(tempdir) | ||
86 | |||
80 | def test_intermediate_notcovered(self): | 87 | def test_intermediate_notcovered(self): |
81 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 88 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
82 | cmd = ["bitbake", "a1"] | 89 | cmd = ["bitbake", "a1"] |
@@ -86,6 +93,8 @@ class RunQueueTests(unittest.TestCase): | |||
86 | 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] | 93 | 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] |
87 | self.assertEqual(set(tasks), set(expected)) | 94 | self.assertEqual(set(tasks), set(expected)) |
88 | 95 | ||
96 | self.shutdown(tempdir) | ||
97 | |||
89 | def test_all_setscenevalid(self): | 98 | def test_all_setscenevalid(self): |
90 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 99 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
91 | cmd = ["bitbake", "a1"] | 100 | cmd = ["bitbake", "a1"] |
@@ -95,6 +104,8 @@ class RunQueueTests(unittest.TestCase): | |||
95 | 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] | 104 | 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] |
96 | self.assertEqual(set(tasks), set(expected)) | 105 | self.assertEqual(set(tasks), set(expected)) |
97 | 106 | ||
107 | self.shutdown(tempdir) | ||
108 | |||
98 | def test_no_settasks(self): | 109 | def test_no_settasks(self): |
99 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 110 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
100 | cmd = ["bitbake", "a1", "-c", "patch"] | 111 | cmd = ["bitbake", "a1", "-c", "patch"] |
@@ -103,6 +114,8 @@ class RunQueueTests(unittest.TestCase): | |||
103 | expected = ['a1:fetch', 'a1:unpack', 'a1:patch'] | 114 | expected = ['a1:fetch', 'a1:unpack', 'a1:patch'] |
104 | self.assertEqual(set(tasks), set(expected)) | 115 | self.assertEqual(set(tasks), set(expected)) |
105 | 116 | ||
117 | self.shutdown(tempdir) | ||
118 | |||
106 | def test_mix_covered_notcovered(self): | 119 | def test_mix_covered_notcovered(self): |
107 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 120 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
108 | cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"] | 121 | cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"] |
@@ -111,6 +124,7 @@ class RunQueueTests(unittest.TestCase): | |||
111 | expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene'] | 124 | expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene'] |
112 | self.assertEqual(set(tasks), set(expected)) | 125 | self.assertEqual(set(tasks), set(expected)) |
113 | 126 | ||
127 | self.shutdown(tempdir) | ||
114 | 128 | ||
115 | # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks | 129 | # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks |
116 | def test_mixed_direct_tasks_setscene_tasks(self): | 130 | def test_mixed_direct_tasks_setscene_tasks(self): |
@@ -122,6 +136,8 @@ class RunQueueTests(unittest.TestCase): | |||
122 | 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] | 136 | 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] |
123 | self.assertEqual(set(tasks), set(expected)) | 137 | self.assertEqual(set(tasks), set(expected)) |
124 | 138 | ||
139 | self.shutdown(tempdir) | ||
140 | |||
125 | # This test slows down the execution of do_package_setscene until after other real tasks have | 141 | # This test slows down the execution of do_package_setscene until after other real tasks have |
126 | # started running which tests for a bug where tasks were being lost from the buildable list of real | 142 | # started running which tests for a bug where tasks were being lost from the buildable list of real |
127 | # tasks if they weren't in tasks_covered or tasks_notcovered | 143 | # tasks if they weren't in tasks_covered or tasks_notcovered |
@@ -136,12 +152,14 @@ class RunQueueTests(unittest.TestCase): | |||
136 | 'a1:populate_sysroot', 'a1:build'] | 152 | 'a1:populate_sysroot', 'a1:build'] |
137 | self.assertEqual(set(tasks), set(expected)) | 153 | self.assertEqual(set(tasks), set(expected)) |
138 | 154 | ||
139 | def test_setscenewhitelist(self): | 155 | self.shutdown(tempdir) |
156 | |||
157 | def test_setscene_ignore_tasks(self): | ||
140 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 158 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
141 | cmd = ["bitbake", "a1"] | 159 | cmd = ["bitbake", "a1"] |
142 | extraenv = { | 160 | extraenv = { |
143 | "BB_SETSCENE_ENFORCE" : "1", | 161 | "BB_SETSCENE_ENFORCE" : "1", |
144 | "BB_SETSCENE_ENFORCE_WHITELIST" : "a1:do_package_write_rpm a1:do_build" | 162 | "BB_SETSCENE_ENFORCE_IGNORE_TASKS" : "a1:do_package_write_rpm a1:do_build" |
145 | } | 163 | } |
146 | sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot" | 164 | sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot" |
147 | tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv) | 165 | tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv) |
@@ -149,6 +167,8 @@ class RunQueueTests(unittest.TestCase): | |||
149 | 'a1:populate_sysroot_setscene', 'a1:package_setscene'] | 167 | 'a1:populate_sysroot_setscene', 'a1:package_setscene'] |
150 | self.assertEqual(set(tasks), set(expected)) | 168 | self.assertEqual(set(tasks), set(expected)) |
151 | 169 | ||
170 | self.shutdown(tempdir) | ||
171 | |||
152 | # Tests for problems with dependencies between setscene tasks | 172 | # Tests for problems with dependencies between setscene tasks |
153 | def test_no_setscenevalid_harddeps(self): | 173 | def test_no_setscenevalid_harddeps(self): |
154 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 174 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
@@ -162,6 +182,8 @@ class RunQueueTests(unittest.TestCase): | |||
162 | 'd1:populate_sysroot', 'd1:build'] | 182 | 'd1:populate_sysroot', 'd1:build'] |
163 | self.assertEqual(set(tasks), set(expected)) | 183 | self.assertEqual(set(tasks), set(expected)) |
164 | 184 | ||
185 | self.shutdown(tempdir) | ||
186 | |||
165 | def test_no_setscenevalid_withdeps(self): | 187 | def test_no_setscenevalid_withdeps(self): |
166 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 188 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
167 | cmd = ["bitbake", "b1"] | 189 | cmd = ["bitbake", "b1"] |
@@ -172,6 +194,8 @@ class RunQueueTests(unittest.TestCase): | |||
172 | expected.remove('a1:package_qa') | 194 | expected.remove('a1:package_qa') |
173 | self.assertEqual(set(tasks), set(expected)) | 195 | self.assertEqual(set(tasks), set(expected)) |
174 | 196 | ||
197 | self.shutdown(tempdir) | ||
198 | |||
175 | def test_single_a1_setscenevalid_withdeps(self): | 199 | def test_single_a1_setscenevalid_withdeps(self): |
176 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 200 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
177 | cmd = ["bitbake", "b1"] | 201 | cmd = ["bitbake", "b1"] |
@@ -182,6 +206,8 @@ class RunQueueTests(unittest.TestCase): | |||
182 | 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks] | 206 | 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks] |
183 | self.assertEqual(set(tasks), set(expected)) | 207 | self.assertEqual(set(tasks), set(expected)) |
184 | 208 | ||
209 | self.shutdown(tempdir) | ||
210 | |||
185 | def test_single_b1_setscenevalid_withdeps(self): | 211 | def test_single_b1_setscenevalid_withdeps(self): |
186 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 212 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
187 | cmd = ["bitbake", "b1"] | 213 | cmd = ["bitbake", "b1"] |
@@ -193,6 +219,8 @@ class RunQueueTests(unittest.TestCase): | |||
193 | expected.remove('b1:package') | 219 | expected.remove('b1:package') |
194 | self.assertEqual(set(tasks), set(expected)) | 220 | self.assertEqual(set(tasks), set(expected)) |
195 | 221 | ||
222 | self.shutdown(tempdir) | ||
223 | |||
196 | def test_intermediate_setscenevalid_withdeps(self): | 224 | def test_intermediate_setscenevalid_withdeps(self): |
197 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 225 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
198 | cmd = ["bitbake", "b1"] | 226 | cmd = ["bitbake", "b1"] |
@@ -203,6 +231,8 @@ class RunQueueTests(unittest.TestCase): | |||
203 | expected.remove('b1:package') | 231 | expected.remove('b1:package') |
204 | self.assertEqual(set(tasks), set(expected)) | 232 | self.assertEqual(set(tasks), set(expected)) |
205 | 233 | ||
234 | self.shutdown(tempdir) | ||
235 | |||
206 | def test_all_setscenevalid_withdeps(self): | 236 | def test_all_setscenevalid_withdeps(self): |
207 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 237 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
208 | cmd = ["bitbake", "b1"] | 238 | cmd = ["bitbake", "b1"] |
@@ -213,6 +243,8 @@ class RunQueueTests(unittest.TestCase): | |||
213 | 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene'] | 243 | 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene'] |
214 | self.assertEqual(set(tasks), set(expected)) | 244 | self.assertEqual(set(tasks), set(expected)) |
215 | 245 | ||
246 | self.shutdown(tempdir) | ||
247 | |||
216 | def test_multiconfig_setscene_optimise(self): | 248 | def test_multiconfig_setscene_optimise(self): |
217 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 249 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
218 | extraenv = { | 250 | extraenv = { |
@@ -232,6 +264,8 @@ class RunQueueTests(unittest.TestCase): | |||
232 | expected.remove(x) | 264 | expected.remove(x) |
233 | self.assertEqual(set(tasks), set(expected)) | 265 | self.assertEqual(set(tasks), set(expected)) |
234 | 266 | ||
267 | self.shutdown(tempdir) | ||
268 | |||
235 | def test_multiconfig_bbmask(self): | 269 | def test_multiconfig_bbmask(self): |
236 | # This test validates that multiconfigs can independently mask off | 270 | # This test validates that multiconfigs can independently mask off |
237 | # recipes they do not want with BBMASK. It works by having recipes | 271 | # recipes they do not want with BBMASK. It works by having recipes |
@@ -248,11 +282,13 @@ class RunQueueTests(unittest.TestCase): | |||
248 | cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"] | 282 | cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"] |
249 | self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv) | 283 | self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv) |
250 | 284 | ||
285 | self.shutdown(tempdir) | ||
286 | |||
251 | def test_multiconfig_mcdepends(self): | 287 | def test_multiconfig_mcdepends(self): |
252 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 288 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
253 | extraenv = { | 289 | extraenv = { |
254 | "BBMULTICONFIG" : "mc-1 mc_2", | 290 | "BBMULTICONFIG" : "mc-1 mc_2", |
255 | "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends", | 291 | "BB_SIGNATURE_HANDLER" : "basichash", |
256 | "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb", | 292 | "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb", |
257 | } | 293 | } |
258 | tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True) | 294 | tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True) |
@@ -278,7 +314,8 @@ class RunQueueTests(unittest.TestCase): | |||
278 | ["mc_2:a1:%s" % t for t in rerun_tasks] | 314 | ["mc_2:a1:%s" % t for t in rerun_tasks] |
279 | self.assertEqual(set(tasks), set(expected)) | 315 | self.assertEqual(set(tasks), set(expected)) |
280 | 316 | ||
281 | @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') | 317 | self.shutdown(tempdir) |
318 | |||
282 | def test_hashserv_single(self): | 319 | def test_hashserv_single(self): |
283 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 320 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
284 | extraenv = { | 321 | extraenv = { |
@@ -304,7 +341,6 @@ class RunQueueTests(unittest.TestCase): | |||
304 | 341 | ||
305 | self.shutdown(tempdir) | 342 | self.shutdown(tempdir) |
306 | 343 | ||
307 | @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') | ||
308 | def test_hashserv_double(self): | 344 | def test_hashserv_double(self): |
309 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 345 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
310 | extraenv = { | 346 | extraenv = { |
@@ -329,7 +365,6 @@ class RunQueueTests(unittest.TestCase): | |||
329 | 365 | ||
330 | self.shutdown(tempdir) | 366 | self.shutdown(tempdir) |
331 | 367 | ||
332 | @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') | ||
333 | def test_hashserv_multiple_setscene(self): | 368 | def test_hashserv_multiple_setscene(self): |
334 | # Runs e1:do_package_setscene twice | 369 | # Runs e1:do_package_setscene twice |
335 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: | 370 | with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: |
@@ -361,7 +396,6 @@ class RunQueueTests(unittest.TestCase): | |||
361 | 396 | ||
362 | def shutdown(self, tempdir): | 397 | def shutdown(self, tempdir): |
363 | # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup | 398 | # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup |
364 | while os.path.exists(tempdir + "/hashserve.sock"): | 399 | while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal") or os.path.exists(tempdir + "/bitbake.lock")): |
365 | time.sleep(0.5) | 400 | time.sleep(0.5) |
366 | 401 | ||
367 | |||
diff --git a/bitbake/lib/bb/tests/siggen.py b/bitbake/lib/bb/tests/siggen.py index c21ab4e4fb..0dc67e6cc2 100644 --- a/bitbake/lib/bb/tests/siggen.py +++ b/bitbake/lib/bb/tests/siggen.py | |||
@@ -17,75 +17,12 @@ import bb.siggen | |||
17 | 17 | ||
18 | class SiggenTest(unittest.TestCase): | 18 | class SiggenTest(unittest.TestCase): |
19 | 19 | ||
20 | def test_clean_basepath_simple_target_basepath(self): | 20 | def test_build_pnid(self): |
21 | basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' | 21 | tests = { |
22 | expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask' | 22 | ('', 'helloworld', 'do_sometask') : 'helloworld:do_sometask', |
23 | ('XX', 'helloworld', 'do_sometask') : 'mc:XX:helloworld:do_sometask', | ||
24 | } | ||
23 | 25 | ||
24 | actual_cleaned = bb.siggen.clean_basepath(basepath) | 26 | for t in tests: |
27 | self.assertEqual(bb.siggen.build_pnid(*t), tests[t]) | ||
25 | 28 | ||
26 | self.assertEqual(actual_cleaned, expected_cleaned) | ||
27 | |||
28 | def test_clean_basepath_basic_virtual_basepath(self): | ||
29 | basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' | ||
30 | expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something' | ||
31 | |||
32 | actual_cleaned = bb.siggen.clean_basepath(basepath) | ||
33 | |||
34 | self.assertEqual(actual_cleaned, expected_cleaned) | ||
35 | |||
36 | def test_clean_basepath_mc_basepath(self): | ||
37 | basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' | ||
38 | expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine' | ||
39 | |||
40 | actual_cleaned = bb.siggen.clean_basepath(basepath) | ||
41 | |||
42 | self.assertEqual(actual_cleaned, expected_cleaned) | ||
43 | |||
44 | def test_clean_basepath_virtual_long_prefix_basepath(self): | ||
45 | basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' | ||
46 | expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C' | ||
47 | |||
48 | actual_cleaned = bb.siggen.clean_basepath(basepath) | ||
49 | |||
50 | self.assertEqual(actual_cleaned, expected_cleaned) | ||
51 | |||
52 | def test_clean_basepath_mc_virtual_basepath(self): | ||
53 | basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' | ||
54 | expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine' | ||
55 | |||
56 | actual_cleaned = bb.siggen.clean_basepath(basepath) | ||
57 | |||
58 | self.assertEqual(actual_cleaned, expected_cleaned) | ||
59 | |||
60 | def test_clean_basepath_mc_virtual_long_prefix_basepath(self): | ||
61 | basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' | ||
62 | expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X' | ||
63 | |||
64 | actual_cleaned = bb.siggen.clean_basepath(basepath) | ||
65 | |||
66 | self.assertEqual(actual_cleaned, expected_cleaned) | ||
67 | |||
68 | |||
69 | # def test_clean_basepath_performance(self): | ||
70 | # input_basepaths = [ | ||
71 | # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
72 | # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
73 | # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
74 | # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
75 | # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
76 | # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
77 | # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask', | ||
78 | # ] | ||
79 | |||
80 | # time_start = time.time() | ||
81 | |||
82 | # i = 2000000 | ||
83 | # while i >= 0: | ||
84 | # for basepath in input_basepaths: | ||
85 | # bb.siggen.clean_basepath(basepath) | ||
86 | # i -= 1 | ||
87 | |||
88 | # elapsed = time.time() - time_start | ||
89 | # print('{} ({}s)'.format(self.id(), round(elapsed, 3))) | ||
90 | |||
91 | # self.assertTrue(False) | ||
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py index a7ff33db52..c363f62d7d 100644 --- a/bitbake/lib/bb/tests/utils.py +++ b/bitbake/lib/bb/tests/utils.py | |||
@@ -418,7 +418,7 @@ MULTILINE = " stuff \\ | |||
418 | ['MULTILINE'], | 418 | ['MULTILINE'], |
419 | handle_var) | 419 | handle_var) |
420 | 420 | ||
421 | testvalue = re.sub('\s+', ' ', value_in_callback.strip()) | 421 | testvalue = re.sub(r'\s+', ' ', value_in_callback.strip()) |
422 | self.assertEqual(expected_value, testvalue) | 422 | self.assertEqual(expected_value, testvalue) |
423 | 423 | ||
424 | class EditBbLayersConf(unittest.TestCase): | 424 | class EditBbLayersConf(unittest.TestCase): |
@@ -666,3 +666,21 @@ class GetReferencedVars(unittest.TestCase): | |||
666 | 666 | ||
667 | layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}] | 667 | layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}] |
668 | self.check_referenced("${SRC_URI}", layers) | 668 | self.check_referenced("${SRC_URI}", layers) |
669 | |||
670 | |||
671 | class EnvironmentTests(unittest.TestCase): | ||
672 | def test_environment(self): | ||
673 | os.environ["A"] = "this is A" | ||
674 | self.assertIn("A", os.environ) | ||
675 | self.assertEqual(os.environ["A"], "this is A") | ||
676 | self.assertNotIn("B", os.environ) | ||
677 | |||
678 | with bb.utils.environment(B="this is B"): | ||
679 | self.assertIn("A", os.environ) | ||
680 | self.assertEqual(os.environ["A"], "this is A") | ||
681 | self.assertIn("B", os.environ) | ||
682 | self.assertEqual(os.environ["B"], "this is B") | ||
683 | |||
684 | self.assertIn("A", os.environ) | ||
685 | self.assertEqual(os.environ["A"], "this is A") | ||
686 | self.assertNotIn("B", os.environ) | ||
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py index 763c329810..dcd3910cc4 100644 --- a/bitbake/lib/bb/tinfoil.py +++ b/bitbake/lib/bb/tinfoil.py | |||
@@ -10,6 +10,7 @@ | |||
10 | import logging | 10 | import logging |
11 | import os | 11 | import os |
12 | import sys | 12 | import sys |
13 | import time | ||
13 | import atexit | 14 | import atexit |
14 | import re | 15 | import re |
15 | from collections import OrderedDict, defaultdict | 16 | from collections import OrderedDict, defaultdict |
@@ -52,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory: | |||
52 | def remoteCommand(self, cmd, *args, **kwargs): | 53 | def remoteCommand(self, cmd, *args, **kwargs): |
53 | return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs) | 54 | return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs) |
54 | 55 | ||
56 | def emit(self, var, oval, val, o, d): | ||
57 | ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex) | ||
58 | o.write(ret) | ||
59 | |||
55 | def __getattr__(self, name): | 60 | def __getattr__(self, name): |
56 | if not hasattr(bb.data_smart.VariableHistory, name): | 61 | if not hasattr(bb.data_smart.VariableHistory, name): |
57 | raise AttributeError("VariableHistory has no such method %s" % name) | 62 | raise AttributeError("VariableHistory has no such method %s" % name) |
@@ -320,11 +325,11 @@ class Tinfoil: | |||
320 | self.recipes_parsed = False | 325 | self.recipes_parsed = False |
321 | self.quiet = 0 | 326 | self.quiet = 0 |
322 | self.oldhandlers = self.logger.handlers[:] | 327 | self.oldhandlers = self.logger.handlers[:] |
328 | self.localhandlers = [] | ||
323 | if setup_logging: | 329 | if setup_logging: |
324 | # This is the *client-side* logger, nothing to do with | 330 | # This is the *client-side* logger, nothing to do with |
325 | # logging messages from the server | 331 | # logging messages from the server |
326 | bb.msg.logger_create('BitBake', output) | 332 | bb.msg.logger_create('BitBake', output) |
327 | self.localhandlers = [] | ||
328 | for handler in self.logger.handlers: | 333 | for handler in self.logger.handlers: |
329 | if handler not in self.oldhandlers: | 334 | if handler not in self.oldhandlers: |
330 | self.localhandlers.append(handler) | 335 | self.localhandlers.append(handler) |
@@ -440,11 +445,17 @@ class Tinfoil: | |||
440 | to initialise Tinfoil and use it with config_only=True first and | 445 | to initialise Tinfoil and use it with config_only=True first and |
441 | then conditionally call this function to parse recipes later. | 446 | then conditionally call this function to parse recipes later. |
442 | """ | 447 | """ |
443 | config_params = TinfoilConfigParameters(config_only=False) | 448 | config_params = TinfoilConfigParameters(config_only=False, quiet=self.quiet) |
444 | self.run_actions(config_params) | 449 | self.run_actions(config_params) |
445 | self.recipes_parsed = True | 450 | self.recipes_parsed = True |
446 | 451 | ||
447 | def run_command(self, command, *params): | 452 | def modified_files(self): |
453 | """ | ||
454 | Notify the server it needs to revalidate it's caches since the client has modified files | ||
455 | """ | ||
456 | self.run_command("revalidateCaches") | ||
457 | |||
458 | def run_command(self, command, *params, handle_events=True): | ||
448 | """ | 459 | """ |
449 | Run a command on the server (as implemented in bb.command). | 460 | Run a command on the server (as implemented in bb.command). |
450 | Note that there are two types of command - synchronous and | 461 | Note that there are two types of command - synchronous and |
@@ -464,7 +475,7 @@ class Tinfoil: | |||
464 | try: | 475 | try: |
465 | result = self.server_connection.connection.runCommand(commandline) | 476 | result = self.server_connection.connection.runCommand(commandline) |
466 | finally: | 477 | finally: |
467 | while True: | 478 | while handle_events: |
468 | event = self.wait_event() | 479 | event = self.wait_event() |
469 | if not event: | 480 | if not event: |
470 | break | 481 | break |
@@ -489,7 +500,7 @@ class Tinfoil: | |||
489 | Wait for an event from the server for the specified time. | 500 | Wait for an event from the server for the specified time. |
490 | A timeout of 0 means don't wait if there are no events in the queue. | 501 | A timeout of 0 means don't wait if there are no events in the queue. |
491 | Returns the next event in the queue or None if the timeout was | 502 | Returns the next event in the queue or None if the timeout was |
492 | reached. Note that in order to recieve any events you will | 503 | reached. Note that in order to receive any events you will |
493 | first need to set the internal event mask using set_event_mask() | 504 | first need to set the internal event mask using set_event_mask() |
494 | (otherwise whatever event mask the UI set up will be in effect). | 505 | (otherwise whatever event mask the UI set up will be in effect). |
495 | """ | 506 | """ |
@@ -725,6 +736,7 @@ class Tinfoil: | |||
725 | 736 | ||
726 | ret = self.run_command('buildTargets', targets, task) | 737 | ret = self.run_command('buildTargets', targets, task) |
727 | if handle_events: | 738 | if handle_events: |
739 | lastevent = time.time() | ||
728 | result = False | 740 | result = False |
729 | # Borrowed from knotty, instead somewhat hackily we use the helper | 741 | # Borrowed from knotty, instead somewhat hackily we use the helper |
730 | # as the object to store "shutdown" on | 742 | # as the object to store "shutdown" on |
@@ -737,6 +749,7 @@ class Tinfoil: | |||
737 | try: | 749 | try: |
738 | event = self.wait_event(0.25) | 750 | event = self.wait_event(0.25) |
739 | if event: | 751 | if event: |
752 | lastevent = time.time() | ||
740 | if event_callback and event_callback(event): | 753 | if event_callback and event_callback(event): |
741 | continue | 754 | continue |
742 | if helper.eventHandler(event): | 755 | if helper.eventHandler(event): |
@@ -757,7 +770,7 @@ class Tinfoil: | |||
757 | if parseprogress: | 770 | if parseprogress: |
758 | parseprogress.update(event.progress) | 771 | parseprogress.update(event.progress) |
759 | else: | 772 | else: |
760 | bb.warn("Got ProcessProgress event for someting that never started?") | 773 | bb.warn("Got ProcessProgress event for something that never started?") |
761 | continue | 774 | continue |
762 | if isinstance(event, bb.event.ProcessFinished): | 775 | if isinstance(event, bb.event.ProcessFinished): |
763 | if self.quiet > 1: | 776 | if self.quiet > 1: |
@@ -769,7 +782,7 @@ class Tinfoil: | |||
769 | if isinstance(event, bb.command.CommandCompleted): | 782 | if isinstance(event, bb.command.CommandCompleted): |
770 | result = True | 783 | result = True |
771 | break | 784 | break |
772 | if isinstance(event, bb.command.CommandFailed): | 785 | if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)): |
773 | self.logger.error(str(event)) | 786 | self.logger.error(str(event)) |
774 | result = False | 787 | result = False |
775 | break | 788 | break |
@@ -781,10 +794,13 @@ class Tinfoil: | |||
781 | self.logger.error(str(event)) | 794 | self.logger.error(str(event)) |
782 | result = False | 795 | result = False |
783 | break | 796 | break |
784 | |||
785 | elif helper.shutdown > 1: | 797 | elif helper.shutdown > 1: |
786 | break | 798 | break |
787 | termfilter.updateFooter() | 799 | termfilter.updateFooter() |
800 | if time.time() > (lastevent + (3*60)): | ||
801 | if not self.run_command('ping', handle_events=False): | ||
802 | print("\nUnable to ping server and no events, closing down...\n") | ||
803 | return False | ||
788 | except KeyboardInterrupt: | 804 | except KeyboardInterrupt: |
789 | termfilter.clearFooter() | 805 | termfilter.clearFooter() |
790 | if helper.shutdown == 1: | 806 | if helper.shutdown == 1: |
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py index 43aa592842..8b212b7803 100644 --- a/bitbake/lib/bb/ui/buildinfohelper.py +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -45,7 +45,7 @@ from pprint import pformat | |||
45 | import logging | 45 | import logging |
46 | from datetime import datetime, timedelta | 46 | from datetime import datetime, timedelta |
47 | 47 | ||
48 | from django.db import transaction, connection | 48 | from django.db import transaction |
49 | 49 | ||
50 | 50 | ||
51 | # pylint: disable=invalid-name | 51 | # pylint: disable=invalid-name |
@@ -227,6 +227,12 @@ class ORMWrapper(object): | |||
227 | build.completed_on = timezone.now() | 227 | build.completed_on = timezone.now() |
228 | build.outcome = outcome | 228 | build.outcome = outcome |
229 | build.save() | 229 | build.save() |
230 | |||
231 | # We force a sync point here to force the outcome status commit, | ||
232 | # which resolves a race condition with the build completion takedown | ||
233 | transaction.set_autocommit(True) | ||
234 | transaction.set_autocommit(False) | ||
235 | |||
230 | signal_runbuilds() | 236 | signal_runbuilds() |
231 | 237 | ||
232 | def update_target_set_license_manifest(self, target, license_manifest_path): | 238 | def update_target_set_license_manifest(self, target, license_manifest_path): |
@@ -483,14 +489,14 @@ class ORMWrapper(object): | |||
483 | 489 | ||
484 | # we already created the root directory, so ignore any | 490 | # we already created the root directory, so ignore any |
485 | # entry for it | 491 | # entry for it |
486 | if len(path) == 0: | 492 | if not path: |
487 | continue | 493 | continue |
488 | 494 | ||
489 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | 495 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) |
490 | if len(parent_path) == 0: | 496 | if not parent_path: |
491 | parent_path = "/" | 497 | parent_path = "/" |
492 | parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 498 | parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) |
493 | tf_obj = Target_File.objects.create( | 499 | Target_File.objects.create( |
494 | target = target_obj, | 500 | target = target_obj, |
495 | path = path, | 501 | path = path, |
496 | size = size, | 502 | size = size, |
@@ -555,7 +561,7 @@ class ORMWrapper(object): | |||
555 | 561 | ||
556 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 562 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) |
557 | 563 | ||
558 | tf_obj = Target_File.objects.create( | 564 | Target_File.objects.create( |
559 | target = target_obj, | 565 | target = target_obj, |
560 | path = path, | 566 | path = path, |
561 | size = size, | 567 | size = size, |
@@ -571,7 +577,7 @@ class ORMWrapper(object): | |||
571 | assert isinstance(build_obj, Build) | 577 | assert isinstance(build_obj, Build) |
572 | assert isinstance(target_obj, Target) | 578 | assert isinstance(target_obj, Target) |
573 | 579 | ||
574 | errormsg = "" | 580 | errormsg = [] |
575 | for p in packagedict: | 581 | for p in packagedict: |
576 | # Search name swtiches round the installed name vs package name | 582 | # Search name swtiches round the installed name vs package name |
577 | # by default installed name == package name | 583 | # by default installed name == package name |
@@ -633,10 +639,10 @@ class ORMWrapper(object): | |||
633 | packagefile_objects.append(Package_File( package = packagedict[p]['object'], | 639 | packagefile_objects.append(Package_File( package = packagedict[p]['object'], |
634 | path = targetpath, | 640 | path = targetpath, |
635 | size = targetfilesize)) | 641 | size = targetfilesize)) |
636 | if len(packagefile_objects): | 642 | if packagefile_objects: |
637 | Package_File.objects.bulk_create(packagefile_objects) | 643 | Package_File.objects.bulk_create(packagefile_objects) |
638 | except KeyError as e: | 644 | except KeyError as e: |
639 | errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) | 645 | errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e)) |
640 | 646 | ||
641 | # save disk installed size | 647 | # save disk installed size |
642 | packagedict[p]['object'].installed_size = packagedict[p]['size'] | 648 | packagedict[p]['object'].installed_size = packagedict[p]['size'] |
@@ -673,13 +679,13 @@ class ORMWrapper(object): | |||
673 | logger.warning("Could not add dependency to the package %s " | 679 | logger.warning("Could not add dependency to the package %s " |
674 | "because %s is an unknown package", p, px) | 680 | "because %s is an unknown package", p, px) |
675 | 681 | ||
676 | if len(packagedeps_objs) > 0: | 682 | if packagedeps_objs: |
677 | Package_Dependency.objects.bulk_create(packagedeps_objs) | 683 | Package_Dependency.objects.bulk_create(packagedeps_objs) |
678 | else: | 684 | else: |
679 | logger.info("No package dependencies created") | 685 | logger.info("No package dependencies created") |
680 | 686 | ||
681 | if len(errormsg) > 0: | 687 | if errormsg: |
682 | logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg) | 688 | logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg)) |
683 | 689 | ||
684 | def save_target_image_file_information(self, target_obj, file_name, file_size): | 690 | def save_target_image_file_information(self, target_obj, file_name, file_size): |
685 | Target_Image_File.objects.create(target=target_obj, | 691 | Target_Image_File.objects.create(target=target_obj, |
@@ -767,7 +773,7 @@ class ORMWrapper(object): | |||
767 | packagefile_objects.append(Package_File( package = bp_object, | 773 | packagefile_objects.append(Package_File( package = bp_object, |
768 | path = path, | 774 | path = path, |
769 | size = package_info['FILES_INFO'][path] )) | 775 | size = package_info['FILES_INFO'][path] )) |
770 | if len(packagefile_objects): | 776 | if packagefile_objects: |
771 | Package_File.objects.bulk_create(packagefile_objects) | 777 | Package_File.objects.bulk_create(packagefile_objects) |
772 | 778 | ||
773 | def _po_byname(p): | 779 | def _po_byname(p): |
@@ -809,7 +815,7 @@ class ORMWrapper(object): | |||
809 | packagedeps_objs.append(Package_Dependency( package = bp_object, | 815 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
810 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) | 816 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) |
811 | 817 | ||
812 | if len(packagedeps_objs) > 0: | 818 | if packagedeps_objs: |
813 | Package_Dependency.objects.bulk_create(packagedeps_objs) | 819 | Package_Dependency.objects.bulk_create(packagedeps_objs) |
814 | 820 | ||
815 | return bp_object | 821 | return bp_object |
@@ -826,7 +832,7 @@ class ORMWrapper(object): | |||
826 | desc = vardump[root_var]['doc'] | 832 | desc = vardump[root_var]['doc'] |
827 | if desc is None: | 833 | if desc is None: |
828 | desc = '' | 834 | desc = '' |
829 | if len(desc): | 835 | if desc: |
830 | HelpText.objects.get_or_create(build=build_obj, | 836 | HelpText.objects.get_or_create(build=build_obj, |
831 | area=HelpText.VARIABLE, | 837 | area=HelpText.VARIABLE, |
832 | key=k, text=desc) | 838 | key=k, text=desc) |
@@ -846,7 +852,7 @@ class ORMWrapper(object): | |||
846 | file_name = vh['file'], | 852 | file_name = vh['file'], |
847 | line_number = vh['line'], | 853 | line_number = vh['line'], |
848 | operation = vh['op'])) | 854 | operation = vh['op'])) |
849 | if len(varhist_objects): | 855 | if varhist_objects: |
850 | VariableHistory.objects.bulk_create(varhist_objects) | 856 | VariableHistory.objects.bulk_create(varhist_objects) |
851 | 857 | ||
852 | 858 | ||
@@ -893,9 +899,6 @@ class BuildInfoHelper(object): | |||
893 | self.task_order = 0 | 899 | self.task_order = 0 |
894 | self.autocommit_step = 1 | 900 | self.autocommit_step = 1 |
895 | self.server = server | 901 | self.server = server |
896 | # we use manual transactions if the database doesn't autocommit on us | ||
897 | if not connection.features.autocommits_when_autocommit_is_off: | ||
898 | transaction.set_autocommit(False) | ||
899 | self.orm_wrapper = ORMWrapper() | 902 | self.orm_wrapper = ORMWrapper() |
900 | self.has_build_history = has_build_history | 903 | self.has_build_history = has_build_history |
901 | self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] | 904 | self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] |
@@ -1059,27 +1062,6 @@ class BuildInfoHelper(object): | |||
1059 | 1062 | ||
1060 | return recipe_info | 1063 | return recipe_info |
1061 | 1064 | ||
1062 | def _get_path_information(self, task_object): | ||
1063 | self._ensure_build() | ||
1064 | |||
1065 | assert isinstance(task_object, Task) | ||
1066 | build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/" | ||
1067 | build_stats_path = [] | ||
1068 | |||
1069 | for t in self.internal_state['targets']: | ||
1070 | buildname = self.internal_state['build'].build_name | ||
1071 | pe, pv = task_object.recipe.version.split(":",1) | ||
1072 | if len(pe) > 0: | ||
1073 | package = task_object.recipe.name + "-" + pe + "_" + pv | ||
1074 | else: | ||
1075 | package = task_object.recipe.name + "-" + pv | ||
1076 | |||
1077 | build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, | ||
1078 | buildname=buildname, | ||
1079 | package=package)) | ||
1080 | |||
1081 | return build_stats_path | ||
1082 | |||
1083 | 1065 | ||
1084 | ################################ | 1066 | ################################ |
1085 | ## external available methods to store information | 1067 | ## external available methods to store information |
@@ -1313,12 +1295,11 @@ class BuildInfoHelper(object): | |||
1313 | task_information['outcome'] = Task.OUTCOME_FAILED | 1295 | task_information['outcome'] = Task.OUTCOME_FAILED |
1314 | del self.internal_state['taskdata'][identifier] | 1296 | del self.internal_state['taskdata'][identifier] |
1315 | 1297 | ||
1316 | if not connection.features.autocommits_when_autocommit_is_off: | 1298 | # we force a sync point here, to get the progress bar to show |
1317 | # we force a sync point here, to get the progress bar to show | 1299 | if self.autocommit_step % 3 == 0: |
1318 | if self.autocommit_step % 3 == 0: | 1300 | transaction.set_autocommit(True) |
1319 | transaction.set_autocommit(True) | 1301 | transaction.set_autocommit(False) |
1320 | transaction.set_autocommit(False) | 1302 | self.autocommit_step += 1 |
1321 | self.autocommit_step += 1 | ||
1322 | 1303 | ||
1323 | self.orm_wrapper.get_update_task_object(task_information, True) # must exist | 1304 | self.orm_wrapper.get_update_task_object(task_information, True) # must exist |
1324 | 1305 | ||
@@ -1404,7 +1385,7 @@ class BuildInfoHelper(object): | |||
1404 | assert 'pn' in event._depgraph | 1385 | assert 'pn' in event._depgraph |
1405 | assert 'tdepends' in event._depgraph | 1386 | assert 'tdepends' in event._depgraph |
1406 | 1387 | ||
1407 | errormsg = "" | 1388 | errormsg = [] |
1408 | 1389 | ||
1409 | # save layer version priorities | 1390 | # save layer version priorities |
1410 | if 'layer-priorities' in event._depgraph.keys(): | 1391 | if 'layer-priorities' in event._depgraph.keys(): |
@@ -1496,7 +1477,7 @@ class BuildInfoHelper(object): | |||
1496 | elif dep in self.internal_state['recipes']: | 1477 | elif dep in self.internal_state['recipes']: |
1497 | dependency = self.internal_state['recipes'][dep] | 1478 | dependency = self.internal_state['recipes'][dep] |
1498 | else: | 1479 | else: |
1499 | errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep) | 1480 | errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)) |
1500 | continue | 1481 | continue |
1501 | recipe_dep = Recipe_Dependency(recipe=target, | 1482 | recipe_dep = Recipe_Dependency(recipe=target, |
1502 | depends_on=dependency, | 1483 | depends_on=dependency, |
@@ -1537,8 +1518,8 @@ class BuildInfoHelper(object): | |||
1537 | taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) | 1518 | taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) |
1538 | Task_Dependency.objects.bulk_create(taskdeps_objects) | 1519 | Task_Dependency.objects.bulk_create(taskdeps_objects) |
1539 | 1520 | ||
1540 | if len(errormsg) > 0: | 1521 | if errormsg: |
1541 | logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg) | 1522 | logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg)) |
1542 | 1523 | ||
1543 | 1524 | ||
1544 | def store_build_package_information(self, event): | 1525 | def store_build_package_information(self, event): |
@@ -1618,7 +1599,7 @@ class BuildInfoHelper(object): | |||
1618 | 1599 | ||
1619 | if 'backlog' in self.internal_state: | 1600 | if 'backlog' in self.internal_state: |
1620 | # if we have a backlog of events, do our best to save them here | 1601 | # if we have a backlog of events, do our best to save them here |
1621 | if len(self.internal_state['backlog']): | 1602 | if self.internal_state['backlog']: |
1622 | tempevent = self.internal_state['backlog'].pop() | 1603 | tempevent = self.internal_state['backlog'].pop() |
1623 | logger.debug("buildinfohelper: Saving stored event %s " | 1604 | logger.debug("buildinfohelper: Saving stored event %s " |
1624 | % tempevent) | 1605 | % tempevent) |
@@ -1765,7 +1746,6 @@ class BuildInfoHelper(object): | |||
1765 | 1746 | ||
1766 | buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] | 1747 | buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] |
1767 | machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] | 1748 | machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] |
1768 | image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0] | ||
1769 | 1749 | ||
1770 | # location of the manifest files for this build; | 1750 | # location of the manifest files for this build; |
1771 | # note that this file is only produced if an image is produced | 1751 | # note that this file is only produced if an image is produced |
@@ -1786,6 +1766,18 @@ class BuildInfoHelper(object): | |||
1786 | # filter out anything which isn't an image target | 1766 | # filter out anything which isn't an image target |
1787 | image_targets = [target for target in targets if target.is_image] | 1767 | image_targets = [target for target in targets if target.is_image] |
1788 | 1768 | ||
1769 | if len(image_targets) > 0: | ||
1770 | #if there are image targets retrieve image_name | ||
1771 | image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0] | ||
1772 | if not image_name: | ||
1773 | #When build target is an image and image_name is not found as an environment variable | ||
1774 | logger.info("IMAGE_NAME not found, extracting from bitbake command") | ||
1775 | cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0] | ||
1776 | #filter out tokens that are command line options | ||
1777 | cmd = [token for token in cmd if not token.startswith('-')] | ||
1778 | image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name | ||
1779 | logger.info("IMAGE_NAME found as : %s " % image_name) | ||
1780 | |||
1789 | for image_target in image_targets: | 1781 | for image_target in image_targets: |
1790 | # this is set to True if we find at least one file relating to | 1782 | # this is set to True if we find at least one file relating to |
1791 | # this target; if this remains False after the scan, we copy the | 1783 | # this target; if this remains False after the scan, we copy the |
@@ -1990,8 +1982,6 @@ class BuildInfoHelper(object): | |||
1990 | # Do not skip command line build events | 1982 | # Do not skip command line build events |
1991 | self.store_log_event(tempevent,False) | 1983 | self.store_log_event(tempevent,False) |
1992 | 1984 | ||
1993 | if not connection.features.autocommits_when_autocommit_is_off: | ||
1994 | transaction.set_autocommit(True) | ||
1995 | 1985 | ||
1996 | # unset the brbe; this is to prevent subsequent command-line builds | 1986 | # unset the brbe; this is to prevent subsequent command-line builds |
1997 | # being incorrectly attached to the previous Toaster-triggered build; | 1987 | # being incorrectly attached to the previous Toaster-triggered build; |
diff --git a/bitbake/lib/bb/ui/eventreplay.py b/bitbake/lib/bb/ui/eventreplay.py new file mode 100644 index 0000000000..d62ecbfa56 --- /dev/null +++ b/bitbake/lib/bb/ui/eventreplay.py | |||
@@ -0,0 +1,86 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | ||
4 | # | ||
5 | # This file re-uses code spread throughout other Bitbake source files. | ||
6 | # As such, all other copyrights belong to their own right holders. | ||
7 | # | ||
8 | |||
9 | |||
10 | import os | ||
11 | import sys | ||
12 | import json | ||
13 | import pickle | ||
14 | import codecs | ||
15 | |||
16 | |||
17 | class EventPlayer: | ||
18 | """Emulate a connection to a bitbake server.""" | ||
19 | |||
20 | def __init__(self, eventfile, variables): | ||
21 | self.eventfile = eventfile | ||
22 | self.variables = variables | ||
23 | self.eventmask = [] | ||
24 | |||
25 | def waitEvent(self, _timeout): | ||
26 | """Read event from the file.""" | ||
27 | line = self.eventfile.readline().strip() | ||
28 | if not line: | ||
29 | return | ||
30 | try: | ||
31 | decodedline = json.loads(line) | ||
32 | if 'allvariables' in decodedline: | ||
33 | self.variables = decodedline['allvariables'] | ||
34 | return | ||
35 | if not 'vars' in decodedline: | ||
36 | raise ValueError | ||
37 | event_str = decodedline['vars'].encode('utf-8') | ||
38 | event = pickle.loads(codecs.decode(event_str, 'base64')) | ||
39 | event_name = "%s.%s" % (event.__module__, event.__class__.__name__) | ||
40 | if event_name not in self.eventmask: | ||
41 | return | ||
42 | return event | ||
43 | except ValueError as err: | ||
44 | print("Failed loading ", line) | ||
45 | raise err | ||
46 | |||
47 | def runCommand(self, command_line): | ||
48 | """Emulate running a command on the server.""" | ||
49 | name = command_line[0] | ||
50 | |||
51 | if name == "getVariable": | ||
52 | var_name = command_line[1] | ||
53 | variable = self.variables.get(var_name) | ||
54 | if variable: | ||
55 | return variable['v'], None | ||
56 | return None, "Missing variable %s" % var_name | ||
57 | |||
58 | elif name == "getAllKeysWithFlags": | ||
59 | dump = {} | ||
60 | flaglist = command_line[1] | ||
61 | for key, val in self.variables.items(): | ||
62 | try: | ||
63 | if not key.startswith("__"): | ||
64 | dump[key] = { | ||
65 | 'v': val['v'], | ||
66 | 'history' : val['history'], | ||
67 | } | ||
68 | for flag in flaglist: | ||
69 | dump[key][flag] = val[flag] | ||
70 | except Exception as err: | ||
71 | print(err) | ||
72 | return (dump, None) | ||
73 | |||
74 | elif name == 'setEventMask': | ||
75 | self.eventmask = command_line[-1] | ||
76 | return True, None | ||
77 | |||
78 | else: | ||
79 | raise Exception("Command %s not implemented" % command_line[0]) | ||
80 | |||
81 | def getEventHandle(self): | ||
82 | """ | ||
83 | This method is called by toasterui. | ||
84 | The return value is passed to self.runCommand but not used there. | ||
85 | """ | ||
86 | pass | ||
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py index 0efa614dfc..f86999bb09 100644 --- a/bitbake/lib/bb/ui/knotty.py +++ b/bitbake/lib/bb/ui/knotty.py | |||
@@ -21,10 +21,11 @@ import fcntl | |||
21 | import struct | 21 | import struct |
22 | import copy | 22 | import copy |
23 | import atexit | 23 | import atexit |
24 | from itertools import groupby | ||
24 | 25 | ||
25 | from bb.ui import uihelper | 26 | from bb.ui import uihelper |
26 | 27 | ||
27 | featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS] | 28 | featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING] |
28 | 29 | ||
29 | logger = logging.getLogger("BitBake") | 30 | logger = logging.getLogger("BitBake") |
30 | interactive = sys.stdout.isatty() | 31 | interactive = sys.stdout.isatty() |
@@ -178,7 +179,7 @@ class TerminalFilter(object): | |||
178 | new[3] = new[3] & ~termios.ECHO | 179 | new[3] = new[3] & ~termios.ECHO |
179 | termios.tcsetattr(fd, termios.TCSADRAIN, new) | 180 | termios.tcsetattr(fd, termios.TCSADRAIN, new) |
180 | curses.setupterm() | 181 | curses.setupterm() |
181 | if curses.tigetnum("colors") > 2: | 182 | if curses.tigetnum("colors") > 2 and os.environ.get('NO_COLOR', '') == '': |
182 | for h in handlers: | 183 | for h in handlers: |
183 | try: | 184 | try: |
184 | h.formatter.enable_color() | 185 | h.formatter.enable_color() |
@@ -227,7 +228,9 @@ class TerminalFilter(object): | |||
227 | 228 | ||
228 | def keepAlive(self, t): | 229 | def keepAlive(self, t): |
229 | if not self.cuu: | 230 | if not self.cuu: |
230 | print("Bitbake still alive (%ds)" % t) | 231 | print("Bitbake still alive (no events for %ds). Active tasks:" % t) |
232 | for t in self.helper.running_tasks: | ||
233 | print(t) | ||
231 | sys.stdout.flush() | 234 | sys.stdout.flush() |
232 | 235 | ||
233 | def updateFooter(self): | 236 | def updateFooter(self): |
@@ -249,58 +252,68 @@ class TerminalFilter(object): | |||
249 | return | 252 | return |
250 | tasks = [] | 253 | tasks = [] |
251 | for t in runningpids: | 254 | for t in runningpids: |
255 | start_time = activetasks[t].get("starttime", None) | ||
256 | if start_time: | ||
257 | msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"]) | ||
258 | else: | ||
259 | msg = "%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]) | ||
252 | progress = activetasks[t].get("progress", None) | 260 | progress = activetasks[t].get("progress", None) |
253 | if progress is not None: | 261 | if progress is not None: |
254 | pbar = activetasks[t].get("progressbar", None) | 262 | pbar = activetasks[t].get("progressbar", None) |
255 | rate = activetasks[t].get("rate", None) | 263 | rate = activetasks[t].get("rate", None) |
256 | start_time = activetasks[t].get("starttime", None) | ||
257 | if not pbar or pbar.bouncing != (progress < 0): | 264 | if not pbar or pbar.bouncing != (progress < 0): |
258 | if progress < 0: | 265 | if progress < 0: |
259 | pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle) | 266 | pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle) |
260 | pbar.bouncing = True | 267 | pbar.bouncing = True |
261 | else: | 268 | else: |
262 | pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle) | 269 | pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle) |
263 | pbar.bouncing = False | 270 | pbar.bouncing = False |
264 | activetasks[t]["progressbar"] = pbar | 271 | activetasks[t]["progressbar"] = pbar |
265 | tasks.append((pbar, progress, rate, start_time)) | 272 | tasks.append((pbar, msg, progress, rate, start_time)) |
266 | else: | 273 | else: |
267 | start_time = activetasks[t].get("starttime", None) | 274 | tasks.append(msg) |
268 | if start_time: | ||
269 | tasks.append("%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"])) | ||
270 | else: | ||
271 | tasks.append("%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"])) | ||
272 | 275 | ||
273 | if self.main.shutdown: | 276 | if self.main.shutdown: |
274 | content = "Waiting for %s running tasks to finish:" % len(activetasks) | 277 | content = pluralise("Waiting for %s running task to finish", |
278 | "Waiting for %s running tasks to finish", len(activetasks)) | ||
279 | if not self.quiet: | ||
280 | content += ':' | ||
275 | print(content) | 281 | print(content) |
276 | else: | 282 | else: |
283 | scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total) | ||
284 | cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total) | ||
285 | |||
286 | content = '' | ||
287 | if not self.quiet: | ||
288 | msg = "Setscene tasks: %s" % scene_tasks | ||
289 | content += msg + "\n" | ||
290 | print(msg) | ||
291 | |||
277 | if self.quiet: | 292 | if self.quiet: |
278 | content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) | 293 | msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks) |
279 | elif not len(activetasks): | 294 | elif not len(activetasks): |
280 | content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) | 295 | msg = "No currently running tasks (%s)" % cur_tasks |
281 | else: | 296 | else: |
282 | content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total) | 297 | msg = "Currently %2s running tasks (%s)" % (len(activetasks), cur_tasks) |
283 | maxtask = self.helper.tasknumber_total | 298 | maxtask = self.helper.tasknumber_total |
284 | if not self.main_progress or self.main_progress.maxval != maxtask: | 299 | if not self.main_progress or self.main_progress.maxval != maxtask: |
285 | widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] | 300 | widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] |
286 | self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle) | 301 | self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle) |
287 | self.main_progress.start(False) | 302 | self.main_progress.start(False) |
288 | self.main_progress.setmessage(content) | 303 | self.main_progress.setmessage(msg) |
289 | progress = self.helper.tasknumber_current - 1 | 304 | progress = max(0, self.helper.tasknumber_current - 1) |
290 | if progress < 0: | 305 | content += self.main_progress.update(progress) |
291 | progress = 0 | ||
292 | content = self.main_progress.update(progress) | ||
293 | print('') | 306 | print('') |
294 | lines = 1 + int(len(content) / (self.columns + 1)) | 307 | lines = self.getlines(content) |
295 | if self.quiet == 0: | 308 | if not self.quiet: |
296 | for tasknum, task in enumerate(tasks[:(self.rows - 2)]): | 309 | for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]): |
297 | if isinstance(task, tuple): | 310 | if isinstance(task, tuple): |
298 | pbar, progress, rate, start_time = task | 311 | pbar, msg, progress, rate, start_time = task |
299 | if not pbar.start_time: | 312 | if not pbar.start_time: |
300 | pbar.start(False) | 313 | pbar.start(False) |
301 | if start_time: | 314 | if start_time: |
302 | pbar.start_time = start_time | 315 | pbar.start_time = start_time |
303 | pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1])) | 316 | pbar.setmessage('%s: %s' % (tasknum, msg)) |
304 | pbar.setextra(rate) | 317 | pbar.setextra(rate) |
305 | if progress > -1: | 318 | if progress > -1: |
306 | content = pbar.update(progress) | 319 | content = pbar.update(progress) |
@@ -310,11 +323,17 @@ class TerminalFilter(object): | |||
310 | else: | 323 | else: |
311 | content = "%s: %s" % (tasknum, task) | 324 | content = "%s: %s" % (tasknum, task) |
312 | print(content) | 325 | print(content) |
313 | lines = lines + 1 + int(len(content) / (self.columns + 1)) | 326 | lines = lines + self.getlines(content) |
314 | self.footer_present = lines | 327 | self.footer_present = lines |
315 | self.lastpids = runningpids[:] | 328 | self.lastpids = runningpids[:] |
316 | self.lastcount = self.helper.tasknumber_current | 329 | self.lastcount = self.helper.tasknumber_current |
317 | 330 | ||
331 | def getlines(self, content): | ||
332 | lines = 0 | ||
333 | for line in content.split("\n"): | ||
334 | lines = lines + 1 + int(len(line) / (self.columns + 1)) | ||
335 | return lines | ||
336 | |||
318 | def finish(self): | 337 | def finish(self): |
319 | if self.stdinbackup: | 338 | if self.stdinbackup: |
320 | fd = sys.stdin.fileno() | 339 | fd = sys.stdin.fileno() |
@@ -401,6 +420,11 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
401 | except bb.BBHandledException: | 420 | except bb.BBHandledException: |
402 | drain_events_errorhandling(eventHandler) | 421 | drain_events_errorhandling(eventHandler) |
403 | return 1 | 422 | return 1 |
423 | except Exception as e: | ||
424 | # bitbake-server comms failure | ||
425 | early_logger = bb.msg.logger_create('bitbake', sys.stdout) | ||
426 | early_logger.fatal("Attempting to set server environment: %s", e) | ||
427 | return 1 | ||
404 | 428 | ||
405 | if params.options.quiet == 0: | 429 | if params.options.quiet == 0: |
406 | console_loglevel = loglevel | 430 | console_loglevel = loglevel |
@@ -539,6 +563,13 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
539 | except OSError: | 563 | except OSError: |
540 | pass | 564 | pass |
541 | 565 | ||
566 | # Add the logging domains specified by the user on the command line | ||
567 | for (domainarg, iterator) in groupby(params.debug_domains): | ||
568 | dlevel = len(tuple(iterator)) | ||
569 | l = logconfig["loggers"].setdefault("BitBake.%s" % domainarg, {}) | ||
570 | l["level"] = logging.DEBUG - dlevel + 1 | ||
571 | l.setdefault("handlers", []).extend(["BitBake.verbconsole"]) | ||
572 | |||
542 | conf = bb.msg.setLoggingConfig(logconfig, logconfigfile) | 573 | conf = bb.msg.setLoggingConfig(logconfig, logconfigfile) |
543 | 574 | ||
544 | if sys.stdin.isatty() and sys.stdout.isatty(): | 575 | if sys.stdin.isatty() and sys.stdout.isatty(): |
@@ -559,7 +590,12 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
559 | return | 590 | return |
560 | 591 | ||
561 | llevel, debug_domains = bb.msg.constructLogOptions() | 592 | llevel, debug_domains = bb.msg.constructLogOptions() |
562 | server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list]) | 593 | try: |
594 | server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list]) | ||
595 | except (BrokenPipeError, EOFError) as e: | ||
596 | # bitbake-server comms failure | ||
597 | logger.fatal("Attempting to set event mask: %s", e) | ||
598 | return 1 | ||
563 | 599 | ||
564 | # The logging_tree module is *extremely* helpful in debugging logging | 600 | # The logging_tree module is *extremely* helpful in debugging logging |
565 | # domains. Uncomment here to dump the logging tree when bitbake starts | 601 | # domains. Uncomment here to dump the logging tree when bitbake starts |
@@ -568,7 +604,11 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
568 | 604 | ||
569 | universe = False | 605 | universe = False |
570 | if not params.observe_only: | 606 | if not params.observe_only: |
571 | params.updateFromServer(server) | 607 | try: |
608 | params.updateFromServer(server) | ||
609 | except Exception as e: | ||
610 | logger.fatal("Fetching command line: %s", e) | ||
611 | return 1 | ||
572 | cmdline = params.parseActions() | 612 | cmdline = params.parseActions() |
573 | if not cmdline: | 613 | if not cmdline: |
574 | print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") | 614 | print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") |
@@ -579,7 +619,12 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
579 | if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]: | 619 | if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]: |
580 | universe = True | 620 | universe = True |
581 | 621 | ||
582 | ret, error = server.runCommand(cmdline['action']) | 622 | try: |
623 | ret, error = server.runCommand(cmdline['action']) | ||
624 | except (BrokenPipeError, EOFError) as e: | ||
625 | # bitbake-server comms failure | ||
626 | logger.fatal("Command '{}' failed: %s".format(cmdline), e) | ||
627 | return 1 | ||
583 | if error: | 628 | if error: |
584 | logger.error("Command '%s' failed: %s" % (cmdline, error)) | 629 | logger.error("Command '%s' failed: %s" % (cmdline, error)) |
585 | return 1 | 630 | return 1 |
@@ -597,26 +642,40 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
597 | warnings = 0 | 642 | warnings = 0 |
598 | taskfailures = [] | 643 | taskfailures = [] |
599 | 644 | ||
600 | printinterval = 5000 | 645 | printintervaldelta = 10 * 60 # 10 minutes |
601 | lastprint = time.time() | 646 | printinterval = printintervaldelta |
647 | pinginterval = 1 * 60 # 1 minute | ||
648 | lastevent = lastprint = time.time() | ||
602 | 649 | ||
603 | termfilter = tf(main, helper, console_handlers, params.options.quiet) | 650 | termfilter = tf(main, helper, console_handlers, params.options.quiet) |
604 | atexit.register(termfilter.finish) | 651 | atexit.register(termfilter.finish) |
605 | 652 | ||
606 | while True: | 653 | # shutdown levels |
654 | # 0 - normal operation | ||
655 | # 1 - no new task execution, let current running tasks finish | ||
656 | # 2 - interrupting currently executing tasks | ||
657 | # 3 - we're done, exit | ||
658 | while main.shutdown < 3: | ||
607 | try: | 659 | try: |
608 | if (lastprint + printinterval) <= time.time(): | 660 | if (lastprint + printinterval) <= time.time(): |
609 | termfilter.keepAlive(printinterval) | 661 | termfilter.keepAlive(printinterval) |
610 | printinterval += 5000 | 662 | printinterval += printintervaldelta |
611 | event = eventHandler.waitEvent(0) | 663 | event = eventHandler.waitEvent(0) |
612 | if event is None: | 664 | if event is None: |
613 | if main.shutdown > 1: | 665 | if (lastevent + pinginterval) <= time.time(): |
614 | break | 666 | ret, error = server.runCommand(["ping"]) |
667 | if error or not ret: | ||
668 | termfilter.clearFooter() | ||
669 | print("No reply after pinging server (%s, %s), exiting." % (str(error), str(ret))) | ||
670 | return_value = 3 | ||
671 | main.shutdown = 3 | ||
672 | lastevent = time.time() | ||
615 | if not parseprogress: | 673 | if not parseprogress: |
616 | termfilter.updateFooter() | 674 | termfilter.updateFooter() |
617 | event = eventHandler.waitEvent(0.25) | 675 | event = eventHandler.waitEvent(0.25) |
618 | if event is None: | 676 | if event is None: |
619 | continue | 677 | continue |
678 | lastevent = time.time() | ||
620 | helper.eventHandler(event) | 679 | helper.eventHandler(event) |
621 | if isinstance(event, bb.runqueue.runQueueExitWait): | 680 | if isinstance(event, bb.runqueue.runQueueExitWait): |
622 | if not main.shutdown: | 681 | if not main.shutdown: |
@@ -638,8 +697,8 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
638 | 697 | ||
639 | if isinstance(event, logging.LogRecord): | 698 | if isinstance(event, logging.LogRecord): |
640 | lastprint = time.time() | 699 | lastprint = time.time() |
641 | printinterval = 5000 | 700 | printinterval = printintervaldelta |
642 | if event.levelno >= bb.msg.BBLogFormatter.ERROR: | 701 | if event.levelno >= bb.msg.BBLogFormatter.ERRORONCE: |
643 | errors = errors + 1 | 702 | errors = errors + 1 |
644 | return_value = 1 | 703 | return_value = 1 |
645 | elif event.levelno == bb.msg.BBLogFormatter.WARNING: | 704 | elif event.levelno == bb.msg.BBLogFormatter.WARNING: |
@@ -653,10 +712,10 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
653 | continue | 712 | continue |
654 | 713 | ||
655 | # Prefix task messages with recipe/task | 714 | # Prefix task messages with recipe/task |
656 | if event.taskpid in helper.pidmap and event.levelno != bb.msg.BBLogFormatter.PLAIN: | 715 | if event.taskpid in helper.pidmap and event.levelno not in [bb.msg.BBLogFormatter.PLAIN, bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]: |
657 | taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]] | 716 | taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]] |
658 | event.msg = taskinfo['title'] + ': ' + event.msg | 717 | event.msg = taskinfo['title'] + ': ' + event.msg |
659 | if hasattr(event, 'fn'): | 718 | if hasattr(event, 'fn') and event.levelno not in [bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]: |
660 | event.msg = event.fn + ': ' + event.msg | 719 | event.msg = event.fn + ': ' + event.msg |
661 | logging.getLogger(event.name).handle(event) | 720 | logging.getLogger(event.name).handle(event) |
662 | continue | 721 | continue |
@@ -721,15 +780,15 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
721 | if event.error: | 780 | if event.error: |
722 | errors = errors + 1 | 781 | errors = errors + 1 |
723 | logger.error(str(event)) | 782 | logger.error(str(event)) |
724 | main.shutdown = 2 | 783 | main.shutdown = 3 |
725 | continue | 784 | continue |
726 | if isinstance(event, bb.command.CommandExit): | 785 | if isinstance(event, bb.command.CommandExit): |
727 | if not return_value: | 786 | if not return_value: |
728 | return_value = event.exitcode | 787 | return_value = event.exitcode |
729 | main.shutdown = 2 | 788 | main.shutdown = 3 |
730 | continue | 789 | continue |
731 | if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)): | 790 | if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)): |
732 | main.shutdown = 2 | 791 | main.shutdown = 3 |
733 | continue | 792 | continue |
734 | if isinstance(event, bb.event.MultipleProviders): | 793 | if isinstance(event, bb.event.MultipleProviders): |
735 | logger.info(str(event)) | 794 | logger.info(str(event)) |
@@ -745,7 +804,7 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
745 | continue | 804 | continue |
746 | 805 | ||
747 | if isinstance(event, bb.runqueue.sceneQueueTaskStarted): | 806 | if isinstance(event, bb.runqueue.sceneQueueTaskStarted): |
748 | logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring)) | 807 | logger.info("Running setscene task %d of %d (%s)" % (event.stats.setscene_covered + event.stats.setscene_active + event.stats.setscene_notcovered + 1, event.stats.setscene_total, event.taskstring)) |
749 | continue | 808 | continue |
750 | 809 | ||
751 | if isinstance(event, bb.runqueue.runQueueTaskStarted): | 810 | if isinstance(event, bb.runqueue.runQueueTaskStarted): |
@@ -814,15 +873,26 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
814 | 873 | ||
815 | logger.error("Unknown event: %s", event) | 874 | logger.error("Unknown event: %s", event) |
816 | 875 | ||
876 | except (BrokenPipeError, EOFError) as e: | ||
877 | # bitbake-server comms failure, don't attempt further comms and exit | ||
878 | logger.fatal("Executing event: %s", e) | ||
879 | return_value = 1 | ||
880 | errors = errors + 1 | ||
881 | main.shutdown = 3 | ||
817 | except EnvironmentError as ioerror: | 882 | except EnvironmentError as ioerror: |
818 | termfilter.clearFooter() | 883 | termfilter.clearFooter() |
819 | # ignore interrupted io | 884 | # ignore interrupted io |
820 | if ioerror.args[0] == 4: | 885 | if ioerror.args[0] == 4: |
821 | continue | 886 | continue |
822 | sys.stderr.write(str(ioerror)) | 887 | sys.stderr.write(str(ioerror)) |
823 | if not params.observe_only: | ||
824 | _, error = server.runCommand(["stateForceShutdown"]) | ||
825 | main.shutdown = 2 | 888 | main.shutdown = 2 |
889 | if not params.observe_only: | ||
890 | try: | ||
891 | _, error = server.runCommand(["stateForceShutdown"]) | ||
892 | except (BrokenPipeError, EOFError) as e: | ||
893 | # bitbake-server comms failure, don't attempt further comms and exit | ||
894 | logger.fatal("Unable to force shutdown: %s", e) | ||
895 | main.shutdown = 3 | ||
826 | except KeyboardInterrupt: | 896 | except KeyboardInterrupt: |
827 | termfilter.clearFooter() | 897 | termfilter.clearFooter() |
828 | if params.observe_only: | 898 | if params.observe_only: |
@@ -831,9 +901,13 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
831 | 901 | ||
832 | def state_force_shutdown(): | 902 | def state_force_shutdown(): |
833 | print("\nSecond Keyboard Interrupt, stopping...\n") | 903 | print("\nSecond Keyboard Interrupt, stopping...\n") |
834 | _, error = server.runCommand(["stateForceShutdown"]) | 904 | try: |
835 | if error: | 905 | _, error = server.runCommand(["stateForceShutdown"]) |
836 | logger.error("Unable to cleanly stop: %s" % error) | 906 | if error: |
907 | logger.error("Unable to cleanly stop: %s" % error) | ||
908 | except (BrokenPipeError, EOFError) as e: | ||
909 | # bitbake-server comms failure | ||
910 | logger.fatal("Unable to cleanly stop: %s", e) | ||
837 | 911 | ||
838 | if not params.observe_only and main.shutdown == 1: | 912 | if not params.observe_only and main.shutdown == 1: |
839 | state_force_shutdown() | 913 | state_force_shutdown() |
@@ -846,17 +920,24 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
846 | _, error = server.runCommand(["stateShutdown"]) | 920 | _, error = server.runCommand(["stateShutdown"]) |
847 | if error: | 921 | if error: |
848 | logger.error("Unable to cleanly shutdown: %s" % error) | 922 | logger.error("Unable to cleanly shutdown: %s" % error) |
923 | except (BrokenPipeError, EOFError) as e: | ||
924 | # bitbake-server comms failure | ||
925 | logger.fatal("Unable to cleanly shutdown: %s", e) | ||
849 | except KeyboardInterrupt: | 926 | except KeyboardInterrupt: |
850 | state_force_shutdown() | 927 | state_force_shutdown() |
851 | 928 | ||
852 | main.shutdown = main.shutdown + 1 | 929 | main.shutdown = main.shutdown + 1 |
853 | pass | ||
854 | except Exception as e: | 930 | except Exception as e: |
855 | import traceback | 931 | import traceback |
856 | sys.stderr.write(traceback.format_exc()) | 932 | sys.stderr.write(traceback.format_exc()) |
857 | if not params.observe_only: | ||
858 | _, error = server.runCommand(["stateForceShutdown"]) | ||
859 | main.shutdown = 2 | 933 | main.shutdown = 2 |
934 | if not params.observe_only: | ||
935 | try: | ||
936 | _, error = server.runCommand(["stateForceShutdown"]) | ||
937 | except (BrokenPipeError, EOFError) as e: | ||
938 | # bitbake-server comms failure, don't attempt further comms and exit | ||
939 | logger.fatal("Unable to force shutdown: %s", e) | ||
940 | main.shudown = 3 | ||
860 | return_value = 1 | 941 | return_value = 1 |
861 | try: | 942 | try: |
862 | termfilter.clearFooter() | 943 | termfilter.clearFooter() |
@@ -867,11 +948,11 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
867 | for failure in taskfailures: | 948 | for failure in taskfailures: |
868 | summary += "\n %s" % failure | 949 | summary += "\n %s" % failure |
869 | if warnings: | 950 | if warnings: |
870 | summary += pluralise("\nSummary: There was %s WARNING message shown.", | 951 | summary += pluralise("\nSummary: There was %s WARNING message.", |
871 | "\nSummary: There were %s WARNING messages shown.", warnings) | 952 | "\nSummary: There were %s WARNING messages.", warnings) |
872 | if return_value and errors: | 953 | if return_value and errors: |
873 | summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.", | 954 | summary += pluralise("\nSummary: There was %s ERROR message, returning a non-zero exit code.", |
874 | "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors) | 955 | "\nSummary: There were %s ERROR messages, returning a non-zero exit code.", errors) |
875 | if summary and params.options.quiet == 0: | 956 | if summary and params.options.quiet == 0: |
876 | print(summary) | 957 | print(summary) |
877 | 958 | ||
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py index cf1c876a51..18a706547a 100644 --- a/bitbake/lib/bb/ui/ncurses.py +++ b/bitbake/lib/bb/ui/ncurses.py | |||
@@ -227,6 +227,9 @@ class NCursesUI: | |||
227 | shutdown = 0 | 227 | shutdown = 0 |
228 | 228 | ||
229 | try: | 229 | try: |
230 | if not params.observe_only: | ||
231 | params.updateToServer(server, os.environ.copy()) | ||
232 | |||
230 | params.updateFromServer(server) | 233 | params.updateFromServer(server) |
231 | cmdline = params.parseActions() | 234 | cmdline = params.parseActions() |
232 | if not cmdline: | 235 | if not cmdline: |
diff --git a/bitbake/lib/bb/ui/taskexp.py b/bitbake/lib/bb/ui/taskexp.py index 2b246710ca..bedfd69b09 100644 --- a/bitbake/lib/bb/ui/taskexp.py +++ b/bitbake/lib/bb/ui/taskexp.py | |||
@@ -8,6 +8,7 @@ | |||
8 | # | 8 | # |
9 | 9 | ||
10 | import sys | 10 | import sys |
11 | import traceback | ||
11 | 12 | ||
12 | try: | 13 | try: |
13 | import gi | 14 | import gi |
@@ -176,7 +177,7 @@ class gtkthread(threading.Thread): | |||
176 | quit = threading.Event() | 177 | quit = threading.Event() |
177 | def __init__(self, shutdown): | 178 | def __init__(self, shutdown): |
178 | threading.Thread.__init__(self) | 179 | threading.Thread.__init__(self) |
179 | self.setDaemon(True) | 180 | self.daemon = True |
180 | self.shutdown = shutdown | 181 | self.shutdown = shutdown |
181 | if not Gtk.init_check()[0]: | 182 | if not Gtk.init_check()[0]: |
182 | sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") | 183 | sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") |
@@ -196,6 +197,7 @@ def main(server, eventHandler, params): | |||
196 | gtkgui.start() | 197 | gtkgui.start() |
197 | 198 | ||
198 | try: | 199 | try: |
200 | params.updateToServer(server, os.environ.copy()) | ||
199 | params.updateFromServer(server) | 201 | params.updateFromServer(server) |
200 | cmdline = params.parseActions() | 202 | cmdline = params.parseActions() |
201 | if not cmdline: | 203 | if not cmdline: |
@@ -218,6 +220,9 @@ def main(server, eventHandler, params): | |||
218 | except client.Fault as x: | 220 | except client.Fault as x: |
219 | print("XMLRPC Fault getting commandline:\n %s" % x) | 221 | print("XMLRPC Fault getting commandline:\n %s" % x) |
220 | return | 222 | return |
223 | except Exception as e: | ||
224 | print("Exception in startup:\n %s" % traceback.format_exc()) | ||
225 | return | ||
221 | 226 | ||
222 | if gtkthread.quit.isSet(): | 227 | if gtkthread.quit.isSet(): |
223 | return | 228 | return |
diff --git a/bitbake/lib/bb/ui/taskexp_ncurses.py b/bitbake/lib/bb/ui/taskexp_ncurses.py new file mode 100755 index 0000000000..ea94a4987f --- /dev/null +++ b/bitbake/lib/bb/ui/taskexp_ncurses.py | |||
@@ -0,0 +1,1511 @@ | |||
1 | # | ||
2 | # BitBake Graphical ncurses-based Dependency Explorer | ||
3 | # * Based on the GTK implementation | ||
4 | # * Intended to run on any Linux host | ||
5 | # | ||
6 | # Copyright (C) 2007 Ross Burton | ||
7 | # Copyright (C) 2007 - 2008 Richard Purdie | ||
8 | # Copyright (C) 2022 - 2024 David Reyna | ||
9 | # | ||
10 | # SPDX-License-Identifier: GPL-2.0-only | ||
11 | # | ||
12 | |||
13 | # | ||
14 | # Execution example: | ||
15 | # $ bitbake -g -u taskexp_ncurses zlib acl | ||
16 | # | ||
17 | # Self-test example (executes a script of GUI actions): | ||
18 | # $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl | ||
19 | # ... | ||
20 | # $ echo $? | ||
21 | # 0 | ||
22 | # $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo | ||
23 | # ERROR: Nothing PROVIDES 'foo'. Close matches: | ||
24 | # ofono | ||
25 | # $ echo $? | ||
26 | # 1 | ||
27 | # | ||
28 | # Self-test with no terminal example (only tests dependency fetch from bitbake): | ||
29 | # $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt | ||
30 | # $ echo $? | ||
31 | # 0 | ||
32 | # | ||
33 | # Features: | ||
34 | # * Ncurses is used for the presentation layer. Only the 'curses' | ||
35 | # library is used (none of the extension libraries), plus only | ||
36 | # one main screen is used (no sub-windows) | ||
37 | # * Uses the 'generateDepTreeEvent' bitbake event to fetch the | ||
38 | # dynamic dependency data based on passed recipes | ||
39 | # * Computes and provides reverse dependencies | ||
40 | # * Supports task sorting on: | ||
41 | # (a) Task dependency order within each recipe | ||
42 | # (b) Pure alphabetical order | ||
43 | # (c) Provisions for third sort order (bitbake order?) | ||
44 | # * The 'Filter' does a "*string*" wildcard filter on tasks in the | ||
45 | # main window, dynamically re-ordering and re-centering the content | ||
46 | # * A 'Print' function exports the selected task or its whole recipe | ||
47 | # task set to the default file "taskdep.txt" | ||
48 | # * Supports a progress bar for bitbake loads and file printing | ||
49 | # * Line art for box drawing supported, ASCII art an alernative | ||
50 | # * No horizontal scrolling support. Selected task's full name | ||
51 | # shown in bottom bar | ||
52 | # * Dynamically catches terminals that are (or become) too small | ||
53 | # * Exception to insure return to normal terminal on errors | ||
54 | # * Debugging support, self test option | ||
55 | # | ||
56 | |||
57 | import sys | ||
58 | import traceback | ||
59 | import curses | ||
60 | import re | ||
61 | import time | ||
62 | |||
63 | # Bitbake server support | ||
64 | import threading | ||
65 | from xmlrpc import client | ||
66 | import bb | ||
67 | import bb.event | ||
68 | |||
69 | # Dependency indexes (depends_model) | ||
70 | (TYPE_DEP, TYPE_RDEP) = (0, 1) | ||
71 | DEPENDS_TYPE = 0 | ||
72 | DEPENDS_TASK = 1 | ||
73 | DEPENDS_DEPS = 2 | ||
74 | # Task indexes (task_list) | ||
75 | TASK_NAME = 0 | ||
76 | TASK_PRIMARY = 1 | ||
77 | TASK_SORT_ALPHA = 2 | ||
78 | TASK_SORT_DEPS = 3 | ||
79 | TASK_SORT_BITBAKE = 4 | ||
80 | # Sort options (default is SORT_DEPS) | ||
81 | SORT_ALPHA = 0 | ||
82 | SORT_DEPS = 1 | ||
83 | SORT_BITBAKE_ENABLE = False # NOTE: future sort | ||
84 | SORT_BITBAKE = 2 | ||
85 | sort_model = SORT_DEPS | ||
86 | # Print options | ||
87 | PRINT_MODEL_1 = 0 | ||
88 | PRINT_MODEL_2 = 1 | ||
89 | print_model = PRINT_MODEL_2 | ||
90 | print_file_name = "taskdep_print.log" | ||
91 | print_file_backup_name = "taskdep_print_backup.log" | ||
92 | is_printed = False | ||
93 | is_filter = False | ||
94 | |||
95 | # Standard (and backup) key mappings | ||
96 | CHAR_NUL = 0 # Used as self-test nop char | ||
97 | CHAR_BS_H = 8 # Alternate backspace key | ||
98 | CHAR_TAB = 9 | ||
99 | CHAR_RETURN = 10 | ||
100 | CHAR_ESCAPE = 27 | ||
101 | CHAR_UP = ord('{') # Used as self-test ASCII char | ||
102 | CHAR_DOWN = ord('}') # Used as self-test ASCII char | ||
103 | |||
104 | # Color_pair IDs | ||
105 | CURSES_NORMAL = 0 | ||
106 | CURSES_HIGHLIGHT = 1 | ||
107 | CURSES_WARNING = 2 | ||
108 | |||
109 | |||
110 | ################################################# | ||
111 | ### Debugging support | ||
112 | ### | ||
113 | |||
114 | verbose = False | ||
115 | |||
116 | # Debug: message display slow-step through display update issues | ||
117 | def alert(msg,screen): | ||
118 | if msg: | ||
119 | screen.addstr(0, 10, '[%-4s]' % msg) | ||
120 | screen.refresh(); | ||
121 | curses.napms(2000) | ||
122 | else: | ||
123 | if do_line_art: | ||
124 | for i in range(10, 24): | ||
125 | screen.addch(0, i, curses.ACS_HLINE) | ||
126 | else: | ||
127 | screen.addstr(0, 10, '-' * 14) | ||
128 | screen.refresh(); | ||
129 | |||
130 | # Debug: display edge conditions on frame movements | ||
131 | def debug_frame(nbox_ojb): | ||
132 | if verbose: | ||
133 | nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % ( | ||
134 | nbox_ojb.cursor_index, | ||
135 | nbox_ojb.cursor_offset, | ||
136 | nbox_ojb.scroll_offset, | ||
137 | nbox_ojb.inside_height, | ||
138 | len(nbox_ojb.task_list), | ||
139 | )) | ||
140 | nbox_ojb.screen.refresh(); | ||
141 | |||
142 | # | ||
143 | # Unit test (assumes that 'quilt-native' is always present) | ||
144 | # | ||
145 | |||
146 | unit_test = os.environ.get('TASK_EXP_UNIT_TEST') | ||
147 | unit_test_cmnds=[ | ||
148 | '# Default selected task in primary box', | ||
149 | 'tst_selected=<TASK>.do_recipe_qa', | ||
150 | '# Default selected task in deps', | ||
151 | 'tst_entry=<TAB>', | ||
152 | 'tst_selected=', | ||
153 | '# Default selected task in rdeps', | ||
154 | 'tst_entry=<TAB>', | ||
155 | 'tst_selected=<TASK>.do_fetch', | ||
156 | "# Test 'select' back to primary box", | ||
157 | 'tst_entry=<CR>', | ||
158 | '#tst_entry=<DOWN>', # optional injected error | ||
159 | 'tst_selected=<TASK>.do_fetch', | ||
160 | '# Check filter', | ||
161 | 'tst_entry=/uilt-nativ/', | ||
162 | 'tst_selected=quilt-native.do_recipe_qa', | ||
163 | '# Check print', | ||
164 | 'tst_entry=p', | ||
165 | 'tst_printed=quilt-native.do_fetch', | ||
166 | '#tst_printed=quilt-foo.do_nothing', # optional injected error | ||
167 | '# Done!', | ||
168 | 'tst_entry=q', | ||
169 | ] | ||
170 | unit_test_idx=0 | ||
171 | unit_test_command_chars='' | ||
172 | unit_test_results=[] | ||
173 | def unit_test_action(active_package): | ||
174 | global unit_test_idx | ||
175 | global unit_test_command_chars | ||
176 | global unit_test_results | ||
177 | ret = CHAR_NUL | ||
178 | if unit_test_command_chars: | ||
179 | ch = unit_test_command_chars[0] | ||
180 | unit_test_command_chars = unit_test_command_chars[1:] | ||
181 | time.sleep(0.5) | ||
182 | ret = ord(ch) | ||
183 | else: | ||
184 | line = unit_test_cmnds[unit_test_idx] | ||
185 | unit_test_idx += 1 | ||
186 | line = re.sub('#.*', '', line).strip() | ||
187 | line = line.replace('<TASK>',active_package.primary[0]) | ||
188 | line = line.replace('<TAB>','\t').replace('<CR>','\n') | ||
189 | line = line.replace('<UP>','{').replace('<DOWN>','}') | ||
190 | if not line: line = 'nop=nop' | ||
191 | cmnd,value = line.split('=') | ||
192 | if cmnd == 'tst_entry': | ||
193 | unit_test_command_chars = value | ||
194 | elif cmnd == 'tst_selected': | ||
195 | active_selected = active_package.get_selected() | ||
196 | if active_selected != value: | ||
197 | unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected)) | ||
198 | ret = ord('Q') | ||
199 | else: | ||
200 | unit_test_results.append("Pass:SELFTEST:found '%s'" % (value)) | ||
201 | elif cmnd == 'tst_printed': | ||
202 | result = os.system('grep %s %s' % (value,print_file_name)) | ||
203 | if result: | ||
204 | unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name)) | ||
205 | ret = ord('Q') | ||
206 | else: | ||
207 | unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value)) | ||
208 | # Return the action (CHAR_NUL for no action til next round) | ||
209 | return(ret) | ||
210 | |||
211 | # Unit test without an interative terminal (e.g. ptest) | ||
212 | unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM') | ||
213 | |||
214 | |||
215 | ################################################# | ||
216 | ### Window frame rendering | ||
217 | ### | ||
218 | ### By default, use the normal line art. Since | ||
219 | ### these extended characters are not ASCII, one | ||
220 | ### must use the ncursus API to render them | ||
221 | ### The alternate ASCII line art set is optionally | ||
222 | ### available via the 'do_line_art' flag | ||
223 | |||
224 | # By default, render frames using line art | ||
225 | do_line_art = True | ||
226 | |||
227 | # ASCII render set option | ||
228 | CHAR_HBAR = '-' | ||
229 | CHAR_VBAR = '|' | ||
230 | CHAR_UL_CORNER = '/' | ||
231 | CHAR_UR_CORNER = '\\' | ||
232 | CHAR_LL_CORNER = '\\' | ||
233 | CHAR_LR_CORNER = '/' | ||
234 | |||
235 | # Box frame drawing with line-art | ||
236 | def line_art_frame(box): | ||
237 | x = box.base_x | ||
238 | y = box.base_y | ||
239 | w = box.width | ||
240 | h = box.height + 1 | ||
241 | |||
242 | if do_line_art: | ||
243 | for i in range(1, w - 1): | ||
244 | box.screen.addch(y, x + i, curses.ACS_HLINE, box.color) | ||
245 | box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color) | ||
246 | body_line = "%s" % (' ' * (w - 2)) | ||
247 | for i in range(1, h - 1): | ||
248 | box.screen.addch(y + i, x, curses.ACS_VLINE, box.color) | ||
249 | box.screen.addstr(y + i, x + 1, body_line, box.color) | ||
250 | box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color) | ||
251 | box.screen.addch(y, x, curses.ACS_ULCORNER, box.color) | ||
252 | box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color) | ||
253 | box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color) | ||
254 | box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color) | ||
255 | else: | ||
256 | top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER) | ||
257 | body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR) | ||
258 | bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER) | ||
259 | tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']') | ||
260 | # Top bar | ||
261 | box.screen.addstr(y, x, top_line) | ||
262 | # Middle frame | ||
263 | for i in range(1, (h - 1)): | ||
264 | box.screen.addstr(y+i, x, body_line) | ||
265 | # Bottom bar | ||
266 | box.screen.addstr(y + (h - 1), x, bot_line) | ||
267 | |||
268 | # Connect the separate boxes | ||
269 | def line_art_fixup(box): | ||
270 | if do_line_art: | ||
271 | box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color) | ||
272 | box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color) | ||
273 | |||
274 | |||
275 | ################################################# | ||
276 | ### Ncurses box object : box frame object to display | ||
277 | ### and manage a sub-window's display elements | ||
278 | ### using basic ncurses | ||
279 | ### | ||
280 | ### Supports: | ||
281 | ### * Frame drawing, content (re)drawing | ||
282 | ### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN, | ||
283 | ### * Highlighting for active selected item | ||
284 | ### * Content sorting based on selected sort model | ||
285 | ### | ||
286 | |||
287 | class NBox(): | ||
288 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
289 | # Box description | ||
290 | self.screen = screen | ||
291 | self.label = label | ||
292 | self.primary = primary | ||
293 | self.color = curses.color_pair(CURSES_NORMAL) if screen else None | ||
294 | # Box boundaries | ||
295 | self.base_x = base_x | ||
296 | self.base_y = base_y | ||
297 | self.width = width | ||
298 | self.height = height | ||
299 | # Cursor/scroll management | ||
300 | self.cursor_enable = False | ||
301 | self.cursor_index = 0 # Absolute offset | ||
302 | self.cursor_offset = 0 # Frame centric offset | ||
303 | self.scroll_offset = 0 # Frame centric offset | ||
304 | # Box specific content | ||
305 | # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key] | ||
306 | self.task_list = [] | ||
307 | |||
308 | @property | ||
309 | def inside_width(self): | ||
310 | return(self.width-2) | ||
311 | |||
312 | @property | ||
313 | def inside_height(self): | ||
314 | return(self.height-2) | ||
315 | |||
316 | # Populate the box's content, include the sort mappings and is_primary flag | ||
317 | def task_list_append(self,task_name,dep): | ||
318 | task_sort_alpha = task_name | ||
319 | task_sort_deps = dep.get_dep_sort(task_name) | ||
320 | is_primary = False | ||
321 | for primary in self.primary: | ||
322 | if task_name.startswith(primary+'.'): | ||
323 | is_primary = True | ||
324 | if SORT_BITBAKE_ENABLE: | ||
325 | task_sort_bitbake = dep.get_bb_sort(task_name) | ||
326 | self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake]) | ||
327 | else: | ||
328 | self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps]) | ||
329 | |||
330 | def reset(self): | ||
331 | self.task_list = [] | ||
332 | self.cursor_index = 0 # Absolute offset | ||
333 | self.cursor_offset = 0 # Frame centric offset | ||
334 | self.scroll_offset = 0 # Frame centric offset | ||
335 | |||
336 | # Sort the box's content based on the current sort model | ||
337 | def sort(self): | ||
338 | if SORT_ALPHA == sort_model: | ||
339 | self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA]) | ||
340 | elif SORT_DEPS == sort_model: | ||
341 | self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS]) | ||
342 | elif SORT_BITBAKE == sort_model: | ||
343 | self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE]) | ||
344 | |||
345 | # The target package list (to hightlight), from the command line | ||
346 | def set_primary(self,primary): | ||
347 | self.primary = primary | ||
348 | |||
349 | # Draw the box's outside frame | ||
350 | def draw_frame(self): | ||
351 | line_art_frame(self) | ||
352 | # Title | ||
353 | self.screen.addstr(self.base_y, | ||
354 | (self.base_x + (self.width//2))-((len(self.label)+2)//2), | ||
355 | '['+self.label+']') | ||
356 | self.screen.refresh() | ||
357 | |||
358 | # Draw the box's inside text content | ||
359 | def redraw(self): | ||
360 | task_list_len = len(self.task_list) | ||
361 | # Middle frame | ||
362 | body_line = "%s" % (' ' * (self.inside_width-1) ) | ||
363 | for i in range(0,self.inside_height+1): | ||
364 | if i < (task_list_len + self.scroll_offset): | ||
365 | str_ctl = "%%-%ss" % (self.width-3) | ||
366 | # Safety assert | ||
367 | if (i + self.scroll_offset) >= task_list_len: | ||
368 | alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen) | ||
369 | break | ||
370 | |||
371 | task_obj = self.task_list[i + self.scroll_offset] | ||
372 | task = task_obj[TASK_NAME][:self.inside_width-1] | ||
373 | task_primary = task_obj[TASK_PRIMARY] | ||
374 | |||
375 | if task_primary: | ||
376 | line = str_ctl % task[:self.inside_width-1] | ||
377 | self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD) | ||
378 | else: | ||
379 | line = str_ctl % task[:self.inside_width-1] | ||
380 | self.screen.addstr(self.base_y+1+i, self.base_x+2, line) | ||
381 | else: | ||
382 | line = "%s" % (' ' * (self.inside_width-1) ) | ||
383 | self.screen.addstr(self.base_y+1+i, self.base_x+2, line) | ||
384 | self.screen.refresh() | ||
385 | |||
386 | # Show the current selected task over the bottom of the frame | ||
387 | def show_selected(self,selected_task): | ||
388 | if not selected_task: | ||
389 | selected_task = self.get_selected() | ||
390 | tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']') | ||
391 | self.screen.addstr(self.base_y + self.height, self.base_x, tag_line) | ||
392 | self.screen.addstr(self.base_y + self.height, | ||
393 | (self.base_x + (self.width//2))-((len(selected_task)+2)//2), | ||
394 | '['+selected_task+']') | ||
395 | self.screen.refresh() | ||
396 | |||
397 | # Load box with new table of content | ||
398 | def update_content(self,task_list): | ||
399 | self.task_list = task_list | ||
400 | if self.cursor_enable: | ||
401 | cursor_update(turn_on=False) | ||
402 | self.cursor_index = 0 | ||
403 | self.cursor_offset = 0 | ||
404 | self.scroll_offset = 0 | ||
405 | self.redraw() | ||
406 | if self.cursor_enable: | ||
407 | cursor_update(turn_on=True) | ||
408 | |||
409 | # Manage the box's highlighted task and blinking cursor character | ||
410 | def cursor_on(self,is_on): | ||
411 | self.cursor_enable = is_on | ||
412 | self.cursor_update(is_on) | ||
413 | |||
414 | # High-light the current pointed package, normal for released packages | ||
415 | def cursor_update(self,turn_on=True): | ||
416 | str_ctl = "%%-%ss" % (self.inside_width-1) | ||
417 | try: | ||
418 | if len(self.task_list): | ||
419 | task_obj = self.task_list[self.cursor_index] | ||
420 | task = task_obj[TASK_NAME][:self.inside_width-1] | ||
421 | task_primary = task_obj[TASK_PRIMARY] | ||
422 | task_font = curses.A_BOLD if task_primary else 0 | ||
423 | else: | ||
424 | task = '' | ||
425 | task_font = 0 | ||
426 | except Exception as e: | ||
427 | alert("CURSOR_UPDATE:%s" % (e),self.screen) | ||
428 | return | ||
429 | if turn_on: | ||
430 | self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK) | ||
431 | self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font) | ||
432 | else: | ||
433 | self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ") | ||
434 | self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font) | ||
435 | |||
436 | # Down arrow | ||
437 | def line_down(self): | ||
438 | if len(self.task_list) <= (self.cursor_index+1): | ||
439 | return | ||
440 | self.cursor_update(turn_on=False) | ||
441 | self.cursor_index += 1 | ||
442 | self.cursor_offset += 1 | ||
443 | if self.cursor_offset > (self.inside_height): | ||
444 | self.cursor_offset -= 1 | ||
445 | self.scroll_offset += 1 | ||
446 | self.redraw() | ||
447 | self.cursor_update(turn_on=True) | ||
448 | debug_frame(self) | ||
449 | |||
450 | # Up arrow | ||
451 | def line_up(self): | ||
452 | if 0 > (self.cursor_index-1): | ||
453 | return | ||
454 | self.cursor_update(turn_on=False) | ||
455 | self.cursor_index -= 1 | ||
456 | self.cursor_offset -= 1 | ||
457 | if self.cursor_offset < 0: | ||
458 | self.cursor_offset += 1 | ||
459 | self.scroll_offset -= 1 | ||
460 | self.redraw() | ||
461 | self.cursor_update(turn_on=True) | ||
462 | debug_frame(self) | ||
463 | |||
464 | # Page down | ||
465 | def page_down(self): | ||
466 | max_task = len(self.task_list)-1 | ||
467 | if max_task < self.inside_height: | ||
468 | return | ||
469 | self.cursor_update(turn_on=False) | ||
470 | self.cursor_index += 10 | ||
471 | self.cursor_index = min(self.cursor_index,max_task) | ||
472 | self.cursor_offset = min(self.inside_height,self.cursor_index) | ||
473 | self.scroll_offset = self.cursor_index - self.cursor_offset | ||
474 | self.redraw() | ||
475 | self.cursor_update(turn_on=True) | ||
476 | debug_frame(self) | ||
477 | |||
478 | # Page up | ||
479 | def page_up(self): | ||
480 | max_task = len(self.task_list)-1 | ||
481 | if max_task < self.inside_height: | ||
482 | return | ||
483 | self.cursor_update(turn_on=False) | ||
484 | self.cursor_index -= 10 | ||
485 | self.cursor_index = max(self.cursor_index,0) | ||
486 | self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index)) | ||
487 | self.scroll_offset = self.cursor_index - self.cursor_offset | ||
488 | self.redraw() | ||
489 | self.cursor_update(turn_on=True) | ||
490 | debug_frame(self) | ||
491 | |||
492 | # Return the currently selected task name for this box | ||
493 | def get_selected(self): | ||
494 | if self.task_list: | ||
495 | return(self.task_list[self.cursor_index][TASK_NAME]) | ||
496 | else: | ||
497 | return('') | ||
498 | |||
499 | ################################################# | ||
500 | ### The helper sub-windows | ||
501 | ### | ||
502 | |||
503 | # Show persistent help at the top of the screen | ||
504 | class HelpBarView(NBox): | ||
505 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
506 | super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
507 | |||
508 | def show_help(self,show): | ||
509 | self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width)) | ||
510 | if show: | ||
511 | help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'" | ||
512 | bar_size = self.inside_width - 5 - len(help) | ||
513 | self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help) | ||
514 | self.screen.refresh() | ||
515 | |||
516 | # Pop up a detailed Help box | ||
517 | class HelpBoxView(NBox): | ||
518 | def __init__(self, screen, label, primary, base_x, base_y, width, height, dep): | ||
519 | super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
520 | self.x_pos = 0 | ||
521 | self.y_pos = 0 | ||
522 | self.dep = dep | ||
523 | |||
524 | # Instantial the pop-up help box | ||
525 | def show_help(self,show): | ||
526 | self.x_pos = self.base_x + 4 | ||
527 | self.y_pos = self.base_y + 2 | ||
528 | |||
529 | def add_line(line): | ||
530 | if line: | ||
531 | self.screen.addstr(self.y_pos,self.x_pos,line) | ||
532 | self.y_pos += 1 | ||
533 | |||
534 | # Gather some statisics | ||
535 | dep_count = 0 | ||
536 | rdep_count = 0 | ||
537 | for task_obj in self.dep.depends_model: | ||
538 | if TYPE_DEP == task_obj[DEPENDS_TYPE]: | ||
539 | dep_count += 1 | ||
540 | elif TYPE_RDEP == task_obj[DEPENDS_TYPE]: | ||
541 | rdep_count += 1 | ||
542 | |||
543 | self.draw_frame() | ||
544 | line_art_fixup(self.dep) | ||
545 | add_line("Quit : 'q' ") | ||
546 | add_line("Filter task names : '/'") | ||
547 | add_line("Tab to next box : <Tab>") | ||
548 | add_line("Select a task : <Enter>") | ||
549 | add_line("Print task's deps : 'p'") | ||
550 | add_line("Print recipe's deps : 'P'") | ||
551 | add_line(" -> '%s'" % print_file_name) | ||
552 | add_line("Sort toggle : 's'") | ||
553 | add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- ')) | ||
554 | add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- ')) | ||
555 | if SORT_BITBAKE_ENABLE: | ||
556 | add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- ')) | ||
557 | add_line("Alternate backspace : <CTRL-H>") | ||
558 | add_line("") | ||
559 | add_line("Primary recipes = %s" % ','.join(self.primary)) | ||
560 | add_line("Task count = %4d" % len(self.dep.pkg_model)) | ||
561 | add_line("Deps count = %4d" % dep_count) | ||
562 | add_line("RDeps count = %4d" % rdep_count) | ||
563 | add_line("") | ||
564 | self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT)) | ||
565 | self.screen.refresh() | ||
566 | c = self.screen.getch() | ||
567 | |||
568 | # Show a progress bar | ||
569 | class ProgressView(NBox): | ||
570 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
571 | super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
572 | |||
573 | def progress(self,title,current,max): | ||
574 | if title: | ||
575 | self.label = title | ||
576 | else: | ||
577 | title = self.label | ||
578 | if max <=0: max = 10 | ||
579 | bar_size = self.width - 7 - len(title) | ||
580 | bar_done = int( (float(current)/float(max)) * float(bar_size) ) | ||
581 | self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done))) | ||
582 | self.screen.refresh() | ||
583 | return(current+1) | ||
584 | |||
585 | def clear(self): | ||
586 | self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width)) | ||
587 | self.screen.refresh() | ||
588 | |||
589 | # Implement a task filter bar | ||
590 | class FilterView(NBox): | ||
591 | SEARCH_NOP = 0 | ||
592 | SEARCH_GO = 1 | ||
593 | SEARCH_CANCEL = 2 | ||
594 | |||
595 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
596 | super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
597 | self.do_show = False | ||
598 | self.filter_str = "" | ||
599 | |||
600 | def clear(self,enable_show=True): | ||
601 | self.filter_str = "" | ||
602 | |||
603 | def show(self,enable_show=True): | ||
604 | self.do_show = enable_show | ||
605 | if self.do_show: | ||
606 | self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25]) | ||
607 | else: | ||
608 | self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width)) | ||
609 | self.screen.refresh() | ||
610 | |||
611 | def show_prompt(self): | ||
612 | self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ") | ||
613 | self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "") | ||
614 | |||
615 | # Keys specific to the filter box (start/stop filter keys are in the main loop) | ||
616 | def input(self,c,ch): | ||
617 | ret = self.SEARCH_GO | ||
618 | if c in (curses.KEY_BACKSPACE,CHAR_BS_H): | ||
619 | # Backspace | ||
620 | if self.filter_str: | ||
621 | self.filter_str = self.filter_str[0:-1] | ||
622 | self.show() | ||
623 | elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')): | ||
624 | # The isalnum() acts strangly with keypad(True), so explicit bounds | ||
625 | self.filter_str += ch | ||
626 | self.show() | ||
627 | else: | ||
628 | ret = self.SEARCH_NOP | ||
629 | return(ret) | ||
630 | |||
631 | |||
632 | ################################################# | ||
633 | ### The primary dependency windows | ||
634 | ### | ||
635 | |||
636 | # The main list of package tasks | ||
637 | class PackageView(NBox): | ||
638 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
639 | super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
640 | |||
641 | # Find and verticaly center a selected task (from filter or from dependent box) | ||
642 | # The 'task_filter_str' can be a full or a partial (filter) task name | ||
643 | def find(self,task_filter_str): | ||
644 | found = False | ||
645 | max = self.height-2 | ||
646 | if not task_filter_str: | ||
647 | return(found) | ||
648 | for i,task_obj in enumerate(self.task_list): | ||
649 | task = task_obj[TASK_NAME] | ||
650 | if task.startswith(task_filter_str): | ||
651 | self.cursor_on(False) | ||
652 | self.cursor_index = i | ||
653 | |||
654 | # Position selected at vertical center | ||
655 | vcenter = self.inside_height // 2 | ||
656 | if self.cursor_index <= vcenter: | ||
657 | self.scroll_offset = 0 | ||
658 | self.cursor_offset = self.cursor_index | ||
659 | elif self.cursor_index >= (len(self.task_list) - vcenter - 1): | ||
660 | self.cursor_offset = self.inside_height-1 | ||
661 | self.scroll_offset = self.cursor_index - self.cursor_offset | ||
662 | else: | ||
663 | self.cursor_offset = vcenter | ||
664 | self.scroll_offset = self.cursor_index - self.cursor_offset | ||
665 | |||
666 | self.redraw() | ||
667 | self.cursor_on(True) | ||
668 | found = True | ||
669 | break | ||
670 | return(found) | ||
671 | |||
672 | # The view of dependent packages | ||
673 | class PackageDepView(NBox): | ||
674 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
675 | super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
676 | |||
677 | # The view of reverse-dependent packages | ||
678 | class PackageReverseDepView(NBox): | ||
679 | def __init__(self, screen, label, primary, base_x, base_y, width, height): | ||
680 | super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height) | ||
681 | |||
682 | |||
683 | ################################################# | ||
684 | ### DepExplorer : The parent frame and object | ||
685 | ### | ||
686 | |||
687 | class DepExplorer(NBox): | ||
688 | def __init__(self,screen): | ||
689 | title = "Task Dependency Explorer" | ||
690 | super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23) | ||
691 | |||
692 | self.screen = screen | ||
693 | self.pkg_model = [] | ||
694 | self.depends_model = [] | ||
695 | self.dep_sort_map = {} | ||
696 | self.bb_sort_map = {} | ||
697 | self.filter_str = '' | ||
698 | self.filter_prev = 'deadbeef' | ||
699 | |||
700 | if self.screen: | ||
701 | self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1) | ||
702 | self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self) | ||
703 | self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1) | ||
704 | self.filter_view = FilterView(screen, "Filter",'',2,1,76,1) | ||
705 | self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20) | ||
706 | self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10) | ||
707 | self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9) | ||
708 | self.draw_frames() | ||
709 | |||
710 | # Draw this main window's frame and all sub-windows | ||
711 | def draw_frames(self): | ||
712 | self.draw_frame() | ||
713 | self.package_view.draw_frame() | ||
714 | self.dep_view.draw_frame() | ||
715 | self.reverse_view.draw_frame() | ||
716 | if is_filter: | ||
717 | self.filter_view.show(True) | ||
718 | self.filter_view.show_prompt() | ||
719 | else: | ||
720 | self.help_bar_view.show_help(True) | ||
721 | self.package_view.redraw() | ||
722 | self.dep_view.redraw() | ||
723 | self.reverse_view.redraw() | ||
724 | self.show_selected(self.package_view.get_selected()) | ||
725 | line_art_fixup(self) | ||
726 | |||
727 | # Parse the bitbake dependency event object | ||
728 | def parse(self, depgraph): | ||
729 | for task in depgraph["tdepends"]: | ||
730 | self.pkg_model.insert(0, task) | ||
731 | for depend in depgraph["tdepends"][task]: | ||
732 | self.depends_model.insert (0, (TYPE_DEP, task, depend)) | ||
733 | self.depends_model.insert (0, (TYPE_RDEP, depend, task)) | ||
734 | if self.screen: | ||
735 | self.dep_sort_prep() | ||
736 | |||
737 | # Prepare the dependency sort order keys | ||
738 | # This method creates sort keys per recipe tasks in | ||
739 | # the order of each recipe's internal dependecies | ||
740 | # Method: | ||
741 | # Filter the tasks in dep order in dep_sort_map = {} | ||
742 | # (a) Find a task that has no dependecies | ||
743 | # Ignore non-recipe specific tasks | ||
744 | # (b) Add it to the sort mapping dict with | ||
745 | # key of "<task_group>_<order>" | ||
746 | # (c) Remove it as a dependency from the other tasks | ||
747 | # (d) Repeat till all tasks are mapped | ||
748 | # Use placeholders to insure each sub-dict is instantiated | ||
749 | def dep_sort_prep(self): | ||
750 | self.progress_view.progress('DepSort',0,4) | ||
751 | # Init the task base entries | ||
752 | self.progress_view.progress('DepSort',1,4) | ||
753 | dep_table = {} | ||
754 | bb_index = 0 | ||
755 | for task in self.pkg_model: | ||
756 | # First define the incoming bitbake sort order | ||
757 | self.bb_sort_map[task] = "%04d" % (bb_index) | ||
758 | bb_index += 1 | ||
759 | task_group = task[0:task.find('.')] | ||
760 | if task_group not in dep_table: | ||
761 | dep_table[task_group] = {} | ||
762 | dep_table[task_group]['-'] = {} # Placeholder | ||
763 | if task not in dep_table[task_group]: | ||
764 | dep_table[task_group][task] = {} | ||
765 | dep_table[task_group][task]['-'] = {} # Placeholder | ||
766 | # Add the task dependecy entries | ||
767 | self.progress_view.progress('DepSort',2,4) | ||
768 | for task_obj in self.depends_model: | ||
769 | if task_obj[DEPENDS_TYPE] != TYPE_DEP: | ||
770 | continue | ||
771 | task = task_obj[DEPENDS_TASK] | ||
772 | task_dep = task_obj[DEPENDS_DEPS] | ||
773 | task_group = task[0:task.find('.')] | ||
774 | # Only track depends within same group | ||
775 | if task_dep.startswith(task_group+'.'): | ||
776 | dep_table[task_group][task][task_dep] = 1 | ||
777 | self.progress_view.progress('DepSort',3,4) | ||
778 | for task_group in dep_table: | ||
779 | dep_index = 0 | ||
780 | # Whittle down the tasks of each group | ||
781 | this_pass = 1 | ||
782 | do_loop = True | ||
783 | while (len(dep_table[task_group]) > 1) and do_loop: | ||
784 | this_pass += 1 | ||
785 | is_change = False | ||
786 | delete_list = [] | ||
787 | for task in dep_table[task_group]: | ||
788 | if '-' == task: | ||
789 | continue | ||
790 | if 1 == len(dep_table[task_group][task]): | ||
791 | is_change = True | ||
792 | # No more deps, so collect this task... | ||
793 | self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index) | ||
794 | dep_index += 1 | ||
795 | # ... remove it from other lists as resolved ... | ||
796 | for dep_task in dep_table[task_group]: | ||
797 | if task in dep_table[task_group][dep_task]: | ||
798 | del dep_table[task_group][dep_task][task] | ||
799 | # ... and remove it from from the task group | ||
800 | delete_list.append(task) | ||
801 | for task in delete_list: | ||
802 | del dep_table[task_group][task] | ||
803 | if not is_change: | ||
804 | alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen) | ||
805 | do_loop = False | ||
806 | continue | ||
807 | self.progress_view.progress('',4,4) | ||
808 | self.progress_view.clear() | ||
809 | self.help_bar_view.show_help(True) | ||
810 | if len(self.dep_sort_map) != len(self.pkg_model): | ||
811 | alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen) | ||
812 | |||
813 | # Look up a dep sort order key | ||
814 | def get_dep_sort(self,key): | ||
815 | if key in self.dep_sort_map: | ||
816 | return(self.dep_sort_map[key]) | ||
817 | else: | ||
818 | return(key) | ||
819 | |||
820 | # Look up a bitbake sort order key | ||
821 | def get_bb_sort(self,key): | ||
822 | if key in self.bb_sort_map: | ||
823 | return(self.bb_sort_map[key]) | ||
824 | else: | ||
825 | return(key) | ||
826 | |||
827 | # Find the selected package in the main frame, update the dependency frames content accordingly | ||
828 | def select(self, package_name, only_update_dependents=False): | ||
829 | if not package_name: | ||
830 | package_name = self.package_view.get_selected() | ||
831 | # alert("SELECT:%s:" % package_name,self.screen) | ||
832 | |||
833 | if self.filter_str != self.filter_prev: | ||
834 | self.package_view.cursor_on(False) | ||
835 | # Fill of the main package task list using new filter | ||
836 | self.package_view.task_list = [] | ||
837 | for package in self.pkg_model: | ||
838 | if self.filter_str: | ||
839 | if self.filter_str in package: | ||
840 | self.package_view.task_list_append(package,self) | ||
841 | else: | ||
842 | self.package_view.task_list_append(package,self) | ||
843 | self.package_view.sort() | ||
844 | self.filter_prev = self.filter_str | ||
845 | |||
846 | # Old position is lost, assert new position of previous task (if still filtered in) | ||
847 | self.package_view.cursor_index = 0 | ||
848 | self.package_view.cursor_offset = 0 | ||
849 | self.package_view.scroll_offset = 0 | ||
850 | self.package_view.redraw() | ||
851 | self.package_view.cursor_on(True) | ||
852 | |||
853 | # Make sure the selected package is in view, with implicit redraw() | ||
854 | if (not only_update_dependents): | ||
855 | self.package_view.find(package_name) | ||
856 | # In case selected name change (i.e. filter removed previous) | ||
857 | package_name = self.package_view.get_selected() | ||
858 | |||
859 | # Filter the package's dependent list to the dependent view | ||
860 | self.dep_view.reset() | ||
861 | for package_def in self.depends_model: | ||
862 | if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name): | ||
863 | self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self) | ||
864 | self.dep_view.sort() | ||
865 | self.dep_view.redraw() | ||
866 | # Filter the package's dependent list to the reverse dependent view | ||
867 | self.reverse_view.reset() | ||
868 | for package_def in self.depends_model: | ||
869 | if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name): | ||
870 | self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self) | ||
871 | self.reverse_view.sort() | ||
872 | self.reverse_view.redraw() | ||
873 | self.show_selected(package_name) | ||
874 | self.screen.refresh() | ||
875 | |||
876 | # The print-to-file method | ||
877 | def print_deps(self,whole_group=False): | ||
878 | global is_printed | ||
879 | # Print the selected deptree(s) to a file | ||
880 | if not is_printed: | ||
881 | try: | ||
882 | # Move to backup any exiting file before first write | ||
883 | if os.path.isfile(print_file_name): | ||
884 | os.system('mv -f %s %s' % (print_file_name,print_file_backup_name)) | ||
885 | except Exception as e: | ||
886 | alert(e,self.screen) | ||
887 | alert('',self.screen) | ||
888 | print_list = [] | ||
889 | selected_task = self.package_view.get_selected() | ||
890 | if not selected_task: | ||
891 | return | ||
892 | if not whole_group: | ||
893 | print_list.append(selected_task) | ||
894 | else: | ||
895 | # Use the presorted task_group order from 'package_view' | ||
896 | task_group = selected_task[0:selected_task.find('.')+1] | ||
897 | for task_obj in self.package_view.task_list: | ||
898 | task = task_obj[TASK_NAME] | ||
899 | if task.startswith(task_group): | ||
900 | print_list.append(task) | ||
901 | with open(print_file_name, "a") as fd: | ||
902 | print_max = len(print_list) | ||
903 | print_count = 1 | ||
904 | self.progress_view.progress('Write "%s"' % print_file_name,0,print_max) | ||
905 | for task in print_list: | ||
906 | print_count = self.progress_view.progress('',print_count,print_max) | ||
907 | self.select(task) | ||
908 | self.screen.refresh(); | ||
909 | # Utilize the current print output model | ||
910 | if print_model == PRINT_MODEL_1: | ||
911 | print("=== Dependendency Snapshot ===",file=fd) | ||
912 | print(" = Package =",file=fd) | ||
913 | print(' '+task,file=fd) | ||
914 | # Fill in the matching dependencies | ||
915 | print(" = Dependencies =",file=fd) | ||
916 | for task_obj in self.dep_view.task_list: | ||
917 | print(' '+ task_obj[TASK_NAME],file=fd) | ||
918 | print(" = Dependent Tasks =",file=fd) | ||
919 | for task_obj in self.reverse_view.task_list: | ||
920 | print(' '+ task_obj[TASK_NAME],file=fd) | ||
921 | if print_model == PRINT_MODEL_2: | ||
922 | print("=== Dependendency Snapshot ===",file=fd) | ||
923 | dep_count = len(self.dep_view.task_list) - 1 | ||
924 | for i,task_obj in enumerate(self.dep_view.task_list): | ||
925 | print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd) | ||
926 | if not self.dep_view.task_list: | ||
927 | print('Dep =',file=fd) | ||
928 | print("Package=%s" % task,file=fd) | ||
929 | for i,task_obj in enumerate(self.reverse_view.task_list): | ||
930 | print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd) | ||
931 | if not self.reverse_view.task_list: | ||
932 | print('RDep =',file=fd) | ||
933 | curses.napms(2000) | ||
934 | self.progress_view.clear() | ||
935 | self.help_bar_view.show_help(True) | ||
936 | print('',file=fd) | ||
937 | # Restore display to original selected task | ||
938 | self.select(selected_task) | ||
939 | is_printed = True | ||
940 | |||
941 | ################################################# | ||
942 | ### Load bitbake data | ||
943 | ### | ||
944 | |||
945 | def bitbake_load(server, eventHandler, params, dep, curses_off, screen): | ||
946 | global bar_len_old | ||
947 | bar_len_old = 0 | ||
948 | |||
949 | # Support no screen | ||
950 | def progress(msg,count,max): | ||
951 | global bar_len_old | ||
952 | if screen: | ||
953 | dep.progress_view.progress(msg,count,max) | ||
954 | else: | ||
955 | if msg: | ||
956 | if bar_len_old: | ||
957 | bar_len_old = 0 | ||
958 | print("\n") | ||
959 | print(f"{msg}: ({count} of {max})") | ||
960 | else: | ||
961 | bar_len = int((count*40)/max) | ||
962 | if bar_len_old != bar_len: | ||
963 | print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True) | ||
964 | bar_len_old = bar_len | ||
965 | def clear(): | ||
966 | if screen: | ||
967 | dep.progress_view.clear() | ||
968 | def clear_curses(screen): | ||
969 | if screen: | ||
970 | curses_off(screen) | ||
971 | |||
972 | # | ||
973 | # Trigger bitbake "generateDepTreeEvent" | ||
974 | # | ||
975 | |||
976 | cmdline = '' | ||
977 | try: | ||
978 | params.updateToServer(server, os.environ.copy()) | ||
979 | params.updateFromServer(server) | ||
980 | cmdline = params.parseActions() | ||
981 | if not cmdline: | ||
982 | clear_curses(screen) | ||
983 | print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") | ||
984 | return 1,cmdline | ||
985 | if 'msg' in cmdline and cmdline['msg']: | ||
986 | clear_curses(screen) | ||
987 | print('ERROR: ' + cmdline['msg']) | ||
988 | return 1,cmdline | ||
989 | cmdline = cmdline['action'] | ||
990 | if not cmdline or cmdline[0] != "generateDotGraph": | ||
991 | clear_curses(screen) | ||
992 | print("ERROR: This UI requires the -g option") | ||
993 | return 1,cmdline | ||
994 | ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]]) | ||
995 | if error: | ||
996 | clear_curses(screen) | ||
997 | print("ERROR: running command '%s': %s" % (cmdline, error)) | ||
998 | return 1,cmdline | ||
999 | elif not ret: | ||
1000 | clear_curses(screen) | ||
1001 | print("ERROR: running command '%s': returned %s" % (cmdline, ret)) | ||
1002 | return 1,cmdline | ||
1003 | except client.Fault as x: | ||
1004 | clear_curses(screen) | ||
1005 | print("ERROR: XMLRPC Fault getting commandline:\n %s" % x) | ||
1006 | return 1,cmdline | ||
1007 | except Exception as e: | ||
1008 | clear_curses(screen) | ||
1009 | print("ERROR: in startup:\n %s" % traceback.format_exc()) | ||
1010 | return 1,cmdline | ||
1011 | |||
1012 | # | ||
1013 | # Receive data from bitbake | ||
1014 | # | ||
1015 | |||
1016 | progress_total = 0 | ||
1017 | load_bitbake = True | ||
1018 | quit = False | ||
1019 | try: | ||
1020 | while load_bitbake: | ||
1021 | try: | ||
1022 | event = eventHandler.waitEvent(0.25) | ||
1023 | if quit: | ||
1024 | _, error = server.runCommand(["stateForceShutdown"]) | ||
1025 | clear_curses(screen) | ||
1026 | if error: | ||
1027 | print('Unable to cleanly stop: %s' % error) | ||
1028 | break | ||
1029 | |||
1030 | if event is None: | ||
1031 | continue | ||
1032 | |||
1033 | if isinstance(event, bb.event.CacheLoadStarted): | ||
1034 | progress_total = event.total | ||
1035 | progress('Loading Cache',0,progress_total) | ||
1036 | continue | ||
1037 | |||
1038 | if isinstance(event, bb.event.CacheLoadProgress): | ||
1039 | x = event.current | ||
1040 | progress('',x,progress_total) | ||
1041 | continue | ||
1042 | |||
1043 | if isinstance(event, bb.event.CacheLoadCompleted): | ||
1044 | clear() | ||
1045 | progress('Bitbake... ',1,2) | ||
1046 | continue | ||
1047 | |||
1048 | if isinstance(event, bb.event.ParseStarted): | ||
1049 | progress_total = event.total | ||
1050 | progress('Processing recipes',0,progress_total) | ||
1051 | if progress_total == 0: | ||
1052 | continue | ||
1053 | |||
1054 | if isinstance(event, bb.event.ParseProgress): | ||
1055 | x = event.current | ||
1056 | progress('',x,progress_total) | ||
1057 | continue | ||
1058 | |||
1059 | if isinstance(event, bb.event.ParseCompleted): | ||
1060 | progress('Generating dependency tree',0,3) | ||
1061 | continue | ||
1062 | |||
1063 | if isinstance(event, bb.event.DepTreeGenerated): | ||
1064 | progress('Generating dependency tree',1,3) | ||
1065 | dep.parse(event._depgraph) | ||
1066 | progress('Generating dependency tree',2,3) | ||
1067 | |||
1068 | if isinstance(event, bb.command.CommandCompleted): | ||
1069 | load_bitbake = False | ||
1070 | progress('Generating dependency tree',3,3) | ||
1071 | clear() | ||
1072 | if screen: | ||
1073 | dep.help_bar_view.show_help(True) | ||
1074 | continue | ||
1075 | |||
1076 | if isinstance(event, bb.event.NoProvider): | ||
1077 | clear_curses(screen) | ||
1078 | print('ERROR: %s' % event) | ||
1079 | |||
1080 | _, error = server.runCommand(["stateShutdown"]) | ||
1081 | if error: | ||
1082 | print('ERROR: Unable to cleanly shutdown: %s' % error) | ||
1083 | return 1,cmdline | ||
1084 | |||
1085 | if isinstance(event, bb.command.CommandFailed): | ||
1086 | clear_curses(screen) | ||
1087 | print('ERROR: ' + str(event)) | ||
1088 | return event.exitcode,cmdline | ||
1089 | |||
1090 | if isinstance(event, bb.command.CommandExit): | ||
1091 | clear_curses(screen) | ||
1092 | return event.exitcode,cmdline | ||
1093 | |||
1094 | if isinstance(event, bb.cooker.CookerExit): | ||
1095 | break | ||
1096 | |||
1097 | continue | ||
1098 | except EnvironmentError as ioerror: | ||
1099 | # ignore interrupted io | ||
1100 | if ioerror.args[0] == 4: | ||
1101 | pass | ||
1102 | except KeyboardInterrupt: | ||
1103 | if shutdown == 2: | ||
1104 | clear_curses(screen) | ||
1105 | print("\nThird Keyboard Interrupt, exit.\n") | ||
1106 | break | ||
1107 | if shutdown == 1: | ||
1108 | clear_curses(screen) | ||
1109 | print("\nSecond Keyboard Interrupt, stopping...\n") | ||
1110 | _, error = server.runCommand(["stateForceShutdown"]) | ||
1111 | if error: | ||
1112 | print('Unable to cleanly stop: %s' % error) | ||
1113 | if shutdown == 0: | ||
1114 | clear_curses(screen) | ||
1115 | print("\nKeyboard Interrupt, closing down...\n") | ||
1116 | _, error = server.runCommand(["stateShutdown"]) | ||
1117 | if error: | ||
1118 | print('Unable to cleanly shutdown: %s' % error) | ||
1119 | shutdown = shutdown + 1 | ||
1120 | pass | ||
1121 | except Exception as e: | ||
1122 | # Safe exit on error | ||
1123 | clear_curses(screen) | ||
1124 | print("Exception : %s" % e) | ||
1125 | print("Exception in startup:\n %s" % traceback.format_exc()) | ||
1126 | |||
1127 | return 0,cmdline | ||
1128 | |||
1129 | ################################################# | ||
1130 | ### main | ||
1131 | ### | ||
1132 | |||
1133 | SCREEN_COL_MIN = 83 | ||
1134 | SCREEN_ROW_MIN = 26 | ||
1135 | |||
1136 | def main(server, eventHandler, params): | ||
1137 | global verbose | ||
1138 | global sort_model | ||
1139 | global print_model | ||
1140 | global is_printed | ||
1141 | global is_filter | ||
1142 | global screen_too_small | ||
1143 | |||
1144 | shutdown = 0 | ||
1145 | screen_too_small = False | ||
1146 | quit = False | ||
1147 | |||
1148 | # Unit test with no terminal? | ||
1149 | if unit_test_noterm: | ||
1150 | # Load bitbake, test that there is valid dependency data, then exit | ||
1151 | screen = None | ||
1152 | print("* UNIT TEST:START") | ||
1153 | dep = DepExplorer(screen) | ||
1154 | print("* UNIT TEST:BITBAKE FETCH") | ||
1155 | ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen) | ||
1156 | if ret: | ||
1157 | print("* UNIT TEST: BITBAKE FAILED") | ||
1158 | return ret | ||
1159 | # Test the acquired dependency data | ||
1160 | quilt_native_deps = 0 | ||
1161 | quilt_native_rdeps = 0 | ||
1162 | quilt_deps = 0 | ||
1163 | quilt_rdeps = 0 | ||
1164 | for i,task_obj in enumerate(dep.depends_model): | ||
1165 | if TYPE_DEP == task_obj[0]: | ||
1166 | task = task_obj[1] | ||
1167 | if task.startswith('quilt-native'): | ||
1168 | quilt_native_deps += 1 | ||
1169 | elif task.startswith('quilt'): | ||
1170 | quilt_deps += 1 | ||
1171 | elif TYPE_RDEP == task_obj[0]: | ||
1172 | task = task_obj[1] | ||
1173 | if task.startswith('quilt-native'): | ||
1174 | quilt_native_rdeps += 1 | ||
1175 | elif task.startswith('quilt'): | ||
1176 | quilt_rdeps += 1 | ||
1177 | # Print results | ||
1178 | failed = False | ||
1179 | if 0 < len(dep.depends_model): | ||
1180 | print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}") | ||
1181 | else: | ||
1182 | failed = True | ||
1183 | print(f"FAIL:Bitbake dependency count = 0") | ||
1184 | if quilt_native_deps: | ||
1185 | print(f"Pass:Quilt-native depends count = {quilt_native_deps}") | ||
1186 | else: | ||
1187 | failed = True | ||
1188 | print(f"FAIL:Quilt-native depends count = 0") | ||
1189 | if quilt_native_rdeps: | ||
1190 | print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}") | ||
1191 | else: | ||
1192 | failed = True | ||
1193 | print(f"FAIL:Quilt-native rdepends count = 0") | ||
1194 | if quilt_deps: | ||
1195 | print(f"Pass:Quilt depends count = {quilt_deps}") | ||
1196 | else: | ||
1197 | failed = True | ||
1198 | print(f"FAIL:Quilt depends count = 0") | ||
1199 | if quilt_rdeps: | ||
1200 | print(f"Pass:Quilt rdepends count = {quilt_rdeps}") | ||
1201 | else: | ||
1202 | failed = True | ||
1203 | print(f"FAIL:Quilt rdepends count = 0") | ||
1204 | print("* UNIT TEST:STOP") | ||
1205 | return failed | ||
1206 | |||
1207 | # Help method to dynamically test parent window too small | ||
1208 | def check_screen_size(dep, active_package): | ||
1209 | global screen_too_small | ||
1210 | rows, cols = screen.getmaxyx() | ||
1211 | if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN): | ||
1212 | if screen_too_small: | ||
1213 | # Now big enough, remove error message and redraw screen | ||
1214 | dep.draw_frames() | ||
1215 | active_package.cursor_on(True) | ||
1216 | screen_too_small = False | ||
1217 | return True | ||
1218 | # Test on App init | ||
1219 | if not dep: | ||
1220 | # Do not start this app if screen not big enough | ||
1221 | curses.endwin() | ||
1222 | print("") | ||
1223 | print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN)) | ||
1224 | print("Current screen is Cols=%s,Rows=%d" % (cols,rows)) | ||
1225 | return False | ||
1226 | # First time window too small | ||
1227 | if not screen_too_small: | ||
1228 | active_package.cursor_on(False) | ||
1229 | dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK) | ||
1230 | screen_too_small = True | ||
1231 | return False | ||
1232 | |||
1233 | # Helper method to turn off curses mode | ||
1234 | def curses_off(screen): | ||
1235 | if not screen: return | ||
1236 | # Safe error exit | ||
1237 | screen.keypad(False) | ||
1238 | curses.echo() | ||
1239 | curses.curs_set(1) | ||
1240 | curses.endwin() | ||
1241 | |||
1242 | if unit_test_results: | ||
1243 | print('\nUnit Test Results:') | ||
1244 | for line in unit_test_results: | ||
1245 | print(" %s" % line) | ||
1246 | |||
1247 | # | ||
1248 | # Initialize the ncurse environment | ||
1249 | # | ||
1250 | |||
1251 | screen = curses.initscr() | ||
1252 | try: | ||
1253 | if not check_screen_size(None, None): | ||
1254 | exit(1) | ||
1255 | try: | ||
1256 | curses.start_color() | ||
1257 | curses.use_default_colors(); | ||
1258 | curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE); | ||
1259 | curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK) | ||
1260 | curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE) | ||
1261 | curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED) | ||
1262 | except: | ||
1263 | curses.endwin() | ||
1264 | print("") | ||
1265 | print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:") | ||
1266 | print(" $ export TERM='xterm-256color'") | ||
1267 | exit(1) | ||
1268 | |||
1269 | screen.keypad(True) | ||
1270 | curses.noecho() | ||
1271 | curses.curs_set(0) | ||
1272 | screen.refresh(); | ||
1273 | except Exception as e: | ||
1274 | # Safe error exit | ||
1275 | curses_off(screen) | ||
1276 | print("Exception : %s" % e) | ||
1277 | print("Exception in startup:\n %s" % traceback.format_exc()) | ||
1278 | exit(1) | ||
1279 | |||
1280 | try: | ||
1281 | # | ||
1282 | # Instantiate the presentation layers | ||
1283 | # | ||
1284 | |||
1285 | dep = DepExplorer(screen) | ||
1286 | |||
1287 | # | ||
1288 | # Prepare bitbake | ||
1289 | # | ||
1290 | |||
1291 | # Fetch bitbake dependecy data | ||
1292 | ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen) | ||
1293 | if ret: return ret | ||
1294 | |||
1295 | # | ||
1296 | # Preset the views | ||
1297 | # | ||
1298 | |||
1299 | # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build'] | ||
1300 | primary_packages = cmdline[1] | ||
1301 | dep.package_view.set_primary(primary_packages) | ||
1302 | dep.dep_view.set_primary(primary_packages) | ||
1303 | dep.reverse_view.set_primary(primary_packages) | ||
1304 | dep.help_box_view.set_primary(primary_packages) | ||
1305 | dep.help_bar_view.show_help(True) | ||
1306 | active_package = dep.package_view | ||
1307 | active_package.cursor_on(True) | ||
1308 | dep.select(primary_packages[0]+'.') | ||
1309 | if unit_test: | ||
1310 | alert('UNIT_TEST',screen) | ||
1311 | |||
1312 | # Help method to start/stop the filter feature | ||
1313 | def filter_mode(new_filter_status): | ||
1314 | global is_filter | ||
1315 | if is_filter == new_filter_status: | ||
1316 | # Ignore no changes | ||
1317 | return | ||
1318 | if not new_filter_status: | ||
1319 | # Turn off | ||
1320 | curses.curs_set(0) | ||
1321 | #active_package.cursor_on(False) | ||
1322 | active_package = dep.package_view | ||
1323 | active_package.cursor_on(True) | ||
1324 | is_filter = False | ||
1325 | dep.help_bar_view.show_help(True) | ||
1326 | dep.filter_str = '' | ||
1327 | dep.select('') | ||
1328 | else: | ||
1329 | # Turn on | ||
1330 | curses.curs_set(1) | ||
1331 | dep.help_bar_view.show_help(False) | ||
1332 | dep.filter_view.clear() | ||
1333 | dep.filter_view.show(True) | ||
1334 | dep.filter_view.show_prompt() | ||
1335 | is_filter = True | ||
1336 | |||
1337 | # | ||
1338 | # Main user loop | ||
1339 | # | ||
1340 | |||
1341 | while not quit: | ||
1342 | if is_filter: | ||
1343 | dep.filter_view.show_prompt() | ||
1344 | if unit_test: | ||
1345 | c = unit_test_action(active_package) | ||
1346 | else: | ||
1347 | c = screen.getch() | ||
1348 | ch = chr(c) | ||
1349 | |||
1350 | # Do not draw if window now too small | ||
1351 | if not check_screen_size(dep,active_package): | ||
1352 | continue | ||
1353 | |||
1354 | if verbose: | ||
1355 | if c == CHAR_RETURN: | ||
1356 | screen.addstr(0, 4, "|%3d,CR |" % (c)) | ||
1357 | else: | ||
1358 | screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c))) | ||
1359 | |||
1360 | # pre-map alternate filter close keys | ||
1361 | if is_filter and (c == CHAR_ESCAPE): | ||
1362 | # Alternate exit from filter | ||
1363 | ch = '/' | ||
1364 | c = ord(ch) | ||
1365 | |||
1366 | # Filter and non-filter mode command keys | ||
1367 | # https://docs.python.org/3/library/curses.html | ||
1368 | if c in (curses.KEY_UP,CHAR_UP): | ||
1369 | active_package.line_up() | ||
1370 | if active_package == dep.package_view: | ||
1371 | dep.select('',only_update_dependents=True) | ||
1372 | elif c in (curses.KEY_DOWN,CHAR_DOWN): | ||
1373 | active_package.line_down() | ||
1374 | if active_package == dep.package_view: | ||
1375 | dep.select('',only_update_dependents=True) | ||
1376 | elif curses.KEY_PPAGE == c: | ||
1377 | active_package.page_up() | ||
1378 | if active_package == dep.package_view: | ||
1379 | dep.select('',only_update_dependents=True) | ||
1380 | elif curses.KEY_NPAGE == c: | ||
1381 | active_package.page_down() | ||
1382 | if active_package == dep.package_view: | ||
1383 | dep.select('',only_update_dependents=True) | ||
1384 | elif CHAR_TAB == c: | ||
1385 | # Tab between boxes | ||
1386 | active_package.cursor_on(False) | ||
1387 | if active_package == dep.package_view: | ||
1388 | active_package = dep.dep_view | ||
1389 | elif active_package == dep.dep_view: | ||
1390 | active_package = dep.reverse_view | ||
1391 | else: | ||
1392 | active_package = dep.package_view | ||
1393 | active_package.cursor_on(True) | ||
1394 | elif curses.KEY_BTAB == c: | ||
1395 | # Shift-Tab reverse between boxes | ||
1396 | active_package.cursor_on(False) | ||
1397 | if active_package == dep.package_view: | ||
1398 | active_package = dep.reverse_view | ||
1399 | elif active_package == dep.reverse_view: | ||
1400 | active_package = dep.dep_view | ||
1401 | else: | ||
1402 | active_package = dep.package_view | ||
1403 | active_package.cursor_on(True) | ||
1404 | elif (CHAR_RETURN == c): | ||
1405 | # CR to select | ||
1406 | selected = active_package.get_selected() | ||
1407 | if selected: | ||
1408 | active_package.cursor_on(False) | ||
1409 | active_package = dep.package_view | ||
1410 | filter_mode(False) | ||
1411 | dep.select(selected) | ||
1412 | else: | ||
1413 | filter_mode(False) | ||
1414 | dep.select(primary_packages[0]+'.') | ||
1415 | |||
1416 | elif '/' == ch: # Enter/exit dep.filter_view | ||
1417 | if is_filter: | ||
1418 | filter_mode(False) | ||
1419 | else: | ||
1420 | filter_mode(True) | ||
1421 | elif is_filter: | ||
1422 | # If in filter mode, re-direct all these other keys to the filter box | ||
1423 | result = dep.filter_view.input(c,ch) | ||
1424 | dep.filter_str = dep.filter_view.filter_str | ||
1425 | dep.select('') | ||
1426 | |||
1427 | # Non-filter mode command keys | ||
1428 | elif 'p' == ch: | ||
1429 | dep.print_deps(whole_group=False) | ||
1430 | elif 'P' == ch: | ||
1431 | dep.print_deps(whole_group=True) | ||
1432 | elif 'w' == ch: | ||
1433 | # Toggle the print model | ||
1434 | if print_model == PRINT_MODEL_1: | ||
1435 | print_model = PRINT_MODEL_2 | ||
1436 | else: | ||
1437 | print_model = PRINT_MODEL_1 | ||
1438 | elif 's' == ch: | ||
1439 | # Toggle the sort model | ||
1440 | if sort_model == SORT_DEPS: | ||
1441 | sort_model = SORT_ALPHA | ||
1442 | elif sort_model == SORT_ALPHA: | ||
1443 | if SORT_BITBAKE_ENABLE: | ||
1444 | sort_model = TASK_SORT_BITBAKE | ||
1445 | else: | ||
1446 | sort_model = SORT_DEPS | ||
1447 | else: | ||
1448 | sort_model = SORT_DEPS | ||
1449 | active_package.cursor_on(False) | ||
1450 | current_task = active_package.get_selected() | ||
1451 | dep.package_view.sort() | ||
1452 | dep.dep_view.sort() | ||
1453 | dep.reverse_view.sort() | ||
1454 | active_package = dep.package_view | ||
1455 | active_package.cursor_on(True) | ||
1456 | dep.select(current_task) | ||
1457 | # Announce the new sort model | ||
1458 | alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen) | ||
1459 | alert('',screen) | ||
1460 | |||
1461 | elif 'q' == ch: | ||
1462 | quit = True | ||
1463 | elif ch in ('h','?'): | ||
1464 | dep.help_box_view.show_help(True) | ||
1465 | dep.select(active_package.get_selected()) | ||
1466 | |||
1467 | # | ||
1468 | # Debugging commands | ||
1469 | # | ||
1470 | |||
1471 | elif 'V' == ch: | ||
1472 | verbose = not verbose | ||
1473 | alert('Verbose=%s' % str(verbose),screen) | ||
1474 | alert('',screen) | ||
1475 | elif 'R' == ch: | ||
1476 | screen.refresh() | ||
1477 | elif 'B' == ch: | ||
1478 | # Progress bar unit test | ||
1479 | dep.progress_view.progress('Test',0,40) | ||
1480 | curses.napms(1000) | ||
1481 | dep.progress_view.progress('',10,40) | ||
1482 | curses.napms(1000) | ||
1483 | dep.progress_view.progress('',20,40) | ||
1484 | curses.napms(1000) | ||
1485 | dep.progress_view.progress('',30,40) | ||
1486 | curses.napms(1000) | ||
1487 | dep.progress_view.progress('',40,40) | ||
1488 | curses.napms(1000) | ||
1489 | dep.progress_view.clear() | ||
1490 | dep.help_bar_view.show_help(True) | ||
1491 | elif 'Q' == ch: | ||
1492 | # Simulated error | ||
1493 | curses_off(screen) | ||
1494 | print('ERROR: simulated error exit') | ||
1495 | return 1 | ||
1496 | |||
1497 | # Safe exit | ||
1498 | curses_off(screen) | ||
1499 | except Exception as e: | ||
1500 | # Safe exit on error | ||
1501 | curses_off(screen) | ||
1502 | print("Exception : %s" % e) | ||
1503 | print("Exception in startup:\n %s" % traceback.format_exc()) | ||
1504 | |||
1505 | # Reminder to pick up your printed results | ||
1506 | if is_printed: | ||
1507 | print("") | ||
1508 | print("You have output ready!") | ||
1509 | print(" * Your printed dependency file is: %s" % print_file_name) | ||
1510 | print(" * Your previous results saved in: %s" % print_file_backup_name) | ||
1511 | print("") | ||
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py index ec5bd4f105..6bd21f1844 100644 --- a/bitbake/lib/bb/ui/toasterui.py +++ b/bitbake/lib/bb/ui/toasterui.py | |||
@@ -385,7 +385,7 @@ def main(server, eventHandler, params): | |||
385 | main.shutdown = 1 | 385 | main.shutdown = 1 |
386 | 386 | ||
387 | logger.info("ToasterUI build done, brbe: %s", brbe) | 387 | logger.info("ToasterUI build done, brbe: %s", brbe) |
388 | continue | 388 | break |
389 | 389 | ||
390 | if isinstance(event, (bb.command.CommandCompleted, | 390 | if isinstance(event, (bb.command.CommandCompleted, |
391 | bb.command.CommandFailed, | 391 | bb.command.CommandFailed, |
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py index 8607d0523b..c2f830d530 100644 --- a/bitbake/lib/bb/ui/uievent.py +++ b/bitbake/lib/bb/ui/uievent.py | |||
@@ -44,7 +44,7 @@ class BBUIEventQueue: | |||
44 | for count_tries in range(5): | 44 | for count_tries in range(5): |
45 | ret = self.BBServer.registerEventHandler(self.host, self.port) | 45 | ret = self.BBServer.registerEventHandler(self.host, self.port) |
46 | 46 | ||
47 | if isinstance(ret, collections.Iterable): | 47 | if isinstance(ret, collections.abc.Iterable): |
48 | self.EventHandle, error = ret | 48 | self.EventHandle, error = ret |
49 | else: | 49 | else: |
50 | self.EventHandle = ret | 50 | self.EventHandle = ret |
@@ -65,35 +65,27 @@ class BBUIEventQueue: | |||
65 | self.server = server | 65 | self.server = server |
66 | 66 | ||
67 | self.t = threading.Thread() | 67 | self.t = threading.Thread() |
68 | self.t.setDaemon(True) | 68 | self.t.daemon = True |
69 | self.t.run = self.startCallbackHandler | 69 | self.t.run = self.startCallbackHandler |
70 | self.t.start() | 70 | self.t.start() |
71 | 71 | ||
72 | def getEvent(self): | 72 | def getEvent(self): |
73 | 73 | with bb.utils.lock_timeout(self.eventQueueLock): | |
74 | self.eventQueueLock.acquire() | 74 | if not self.eventQueue: |
75 | 75 | return None | |
76 | if len(self.eventQueue) == 0: | 76 | item = self.eventQueue.pop(0) |
77 | self.eventQueueLock.release() | 77 | if not self.eventQueue: |
78 | return None | 78 | self.eventQueueNotify.clear() |
79 | 79 | return item | |
80 | item = self.eventQueue.pop(0) | ||
81 | |||
82 | if len(self.eventQueue) == 0: | ||
83 | self.eventQueueNotify.clear() | ||
84 | |||
85 | self.eventQueueLock.release() | ||
86 | return item | ||
87 | 80 | ||
88 | def waitEvent(self, delay): | 81 | def waitEvent(self, delay): |
89 | self.eventQueueNotify.wait(delay) | 82 | self.eventQueueNotify.wait(delay) |
90 | return self.getEvent() | 83 | return self.getEvent() |
91 | 84 | ||
92 | def queue_event(self, event): | 85 | def queue_event(self, event): |
93 | self.eventQueueLock.acquire() | 86 | with bb.utils.lock_timeout(self.eventQueueLock): |
94 | self.eventQueue.append(event) | 87 | self.eventQueue.append(event) |
95 | self.eventQueueNotify.set() | 88 | self.eventQueueNotify.set() |
96 | self.eventQueueLock.release() | ||
97 | 89 | ||
98 | def send_event(self, event): | 90 | def send_event(self, event): |
99 | self.queue_event(pickle.loads(event)) | 91 | self.queue_event(pickle.loads(event)) |
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py index 48d808ae28..82913e0da8 100644 --- a/bitbake/lib/bb/ui/uihelper.py +++ b/bitbake/lib/bb/ui/uihelper.py | |||
@@ -49,9 +49,11 @@ class BBUIHelper: | |||
49 | tid = event._fn + ":" + event._task | 49 | tid = event._fn + ":" + event._task |
50 | removetid(event.pid, tid) | 50 | removetid(event.pid, tid) |
51 | self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)}) | 51 | self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)}) |
52 | elif isinstance(event, bb.runqueue.runQueueTaskStarted): | 52 | elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted): |
53 | self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1 | 53 | self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed |
54 | self.tasknumber_total = event.stats.total | 54 | self.tasknumber_total = event.stats.total |
55 | self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered | ||
56 | self.setscene_total = event.stats.setscene_total | ||
55 | self.needUpdate = True | 57 | self.needUpdate = True |
56 | elif isinstance(event, bb.build.TaskProgress): | 58 | elif isinstance(event, bb.build.TaskProgress): |
57 | if event.pid > 0 and event.pid in self.pidmap: | 59 | if event.pid > 0 and event.pid in self.pidmap: |
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index b282d09abf..ebee65d3dd 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
@@ -13,10 +13,12 @@ import errno | |||
13 | import logging | 13 | import logging |
14 | import bb | 14 | import bb |
15 | import bb.msg | 15 | import bb.msg |
16 | import locale | ||
16 | import multiprocessing | 17 | import multiprocessing |
17 | import fcntl | 18 | import fcntl |
18 | import importlib | 19 | import importlib |
19 | from importlib import machinery | 20 | import importlib.machinery |
21 | import importlib.util | ||
20 | import itertools | 22 | import itertools |
21 | import subprocess | 23 | import subprocess |
22 | import glob | 24 | import glob |
@@ -26,6 +28,11 @@ import errno | |||
26 | import signal | 28 | import signal |
27 | import collections | 29 | import collections |
28 | import copy | 30 | import copy |
31 | import ctypes | ||
32 | import random | ||
33 | import socket | ||
34 | import struct | ||
35 | import tempfile | ||
29 | from subprocess import getstatusoutput | 36 | from subprocess import getstatusoutput |
30 | from contextlib import contextmanager | 37 | from contextlib import contextmanager |
31 | from ctypes import cdll | 38 | from ctypes import cdll |
@@ -43,7 +50,7 @@ def clean_context(): | |||
43 | 50 | ||
44 | def get_context(): | 51 | def get_context(): |
45 | return _context | 52 | return _context |
46 | 53 | ||
47 | 54 | ||
48 | def set_context(ctx): | 55 | def set_context(ctx): |
49 | _context = ctx | 56 | _context = ctx |
@@ -205,8 +212,8 @@ def explode_dep_versions2(s, *, sort=True): | |||
205 | inversion = True | 212 | inversion = True |
206 | # This list is based on behavior and supported comparisons from deb, opkg and rpm. | 213 | # This list is based on behavior and supported comparisons from deb, opkg and rpm. |
207 | # | 214 | # |
208 | # Even though =<, <<, ==, !=, =>, and >> may not be supported, | 215 | # Even though =<, <<, ==, !=, =>, and >> may not be supported, |
209 | # we list each possibly valid item. | 216 | # we list each possibly valid item. |
210 | # The build system is responsible for validation of what it supports. | 217 | # The build system is responsible for validation of what it supports. |
211 | if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): | 218 | if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): |
212 | lastcmp = i[0:2] | 219 | lastcmp = i[0:2] |
@@ -251,7 +258,7 @@ def explode_dep_versions(s): | |||
251 | """ | 258 | """ |
252 | Take an RDEPENDS style string of format: | 259 | Take an RDEPENDS style string of format: |
253 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 260 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." |
254 | skip null value and items appeared in dependancy string multiple times | 261 | skip null value and items appeared in dependency string multiple times |
255 | and return a dictionary of dependencies and versions. | 262 | and return a dictionary of dependencies and versions. |
256 | """ | 263 | """ |
257 | r = explode_dep_versions2(s) | 264 | r = explode_dep_versions2(s) |
@@ -340,7 +347,7 @@ def _print_exception(t, value, tb, realfile, text, context): | |||
340 | exception = traceback.format_exception_only(t, value) | 347 | exception = traceback.format_exception_only(t, value) |
341 | error.append('Error executing a python function in %s:\n' % realfile) | 348 | error.append('Error executing a python function in %s:\n' % realfile) |
342 | 349 | ||
343 | # Strip 'us' from the stack (better_exec call) unless that was where the | 350 | # Strip 'us' from the stack (better_exec call) unless that was where the |
344 | # error came from | 351 | # error came from |
345 | if tb.tb_next is not None: | 352 | if tb.tb_next is not None: |
346 | tb = tb.tb_next | 353 | tb = tb.tb_next |
@@ -379,7 +386,7 @@ def _print_exception(t, value, tb, realfile, text, context): | |||
379 | 386 | ||
380 | error.append("Exception: %s" % ''.join(exception)) | 387 | error.append("Exception: %s" % ''.join(exception)) |
381 | 388 | ||
382 | # If the exception is from spwaning a task, let's be helpful and display | 389 | # If the exception is from spawning a task, let's be helpful and display |
383 | # the output (which hopefully includes stderr). | 390 | # the output (which hopefully includes stderr). |
384 | if isinstance(value, subprocess.CalledProcessError) and value.output: | 391 | if isinstance(value, subprocess.CalledProcessError) and value.output: |
385 | error.append("Subprocess output:") | 392 | error.append("Subprocess output:") |
@@ -400,7 +407,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception | |||
400 | code = better_compile(code, realfile, realfile) | 407 | code = better_compile(code, realfile, realfile) |
401 | try: | 408 | try: |
402 | exec(code, get_context(), context) | 409 | exec(code, get_context(), context) |
403 | except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError): | 410 | except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError): |
404 | # Error already shown so passthrough, no need for traceback | 411 | # Error already shown so passthrough, no need for traceback |
405 | raise | 412 | raise |
406 | except Exception as e: | 413 | except Exception as e: |
@@ -427,12 +434,14 @@ def better_eval(source, locals, extraglobals = None): | |||
427 | return eval(source, ctx, locals) | 434 | return eval(source, ctx, locals) |
428 | 435 | ||
429 | @contextmanager | 436 | @contextmanager |
430 | def fileslocked(files): | 437 | def fileslocked(files, *args, **kwargs): |
431 | """Context manager for locking and unlocking file locks.""" | 438 | """Context manager for locking and unlocking file locks.""" |
432 | locks = [] | 439 | locks = [] |
433 | if files: | 440 | if files: |
434 | for lockfile in files: | 441 | for lockfile in files: |
435 | locks.append(bb.utils.lockfile(lockfile)) | 442 | l = bb.utils.lockfile(lockfile, *args, **kwargs) |
443 | if l is not None: | ||
444 | locks.append(l) | ||
436 | 445 | ||
437 | try: | 446 | try: |
438 | yield | 447 | yield |
@@ -451,9 +460,16 @@ def lockfile(name, shared=False, retry=True, block=False): | |||
451 | consider the possibility of sending a signal to the process to break | 460 | consider the possibility of sending a signal to the process to break |
452 | out - at which point you want block=True rather than retry=True. | 461 | out - at which point you want block=True rather than retry=True. |
453 | """ | 462 | """ |
463 | basename = os.path.basename(name) | ||
464 | if len(basename) > 255: | ||
465 | root, ext = os.path.splitext(basename) | ||
466 | basename = root[:255 - len(ext)] + ext | ||
467 | |||
454 | dirname = os.path.dirname(name) | 468 | dirname = os.path.dirname(name) |
455 | mkdirhier(dirname) | 469 | mkdirhier(dirname) |
456 | 470 | ||
471 | name = os.path.join(dirname, basename) | ||
472 | |||
457 | if not os.access(dirname, os.W_OK): | 473 | if not os.access(dirname, os.W_OK): |
458 | logger.error("Unable to acquire lock '%s', directory is not writable", | 474 | logger.error("Unable to acquire lock '%s', directory is not writable", |
459 | name) | 475 | name) |
@@ -487,7 +503,7 @@ def lockfile(name, shared=False, retry=True, block=False): | |||
487 | return lf | 503 | return lf |
488 | lf.close() | 504 | lf.close() |
489 | except OSError as e: | 505 | except OSError as e: |
490 | if e.errno == errno.EACCES: | 506 | if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG: |
491 | logger.error("Unable to acquire lock '%s', %s", | 507 | logger.error("Unable to acquire lock '%s', %s", |
492 | e.strerror, name) | 508 | e.strerror, name) |
493 | sys.exit(1) | 509 | sys.exit(1) |
@@ -532,7 +548,12 @@ def md5_file(filename): | |||
532 | Return the hex string representation of the MD5 checksum of filename. | 548 | Return the hex string representation of the MD5 checksum of filename. |
533 | """ | 549 | """ |
534 | import hashlib | 550 | import hashlib |
535 | return _hasher(hashlib.md5(), filename) | 551 | try: |
552 | sig = hashlib.new('MD5', usedforsecurity=False) | ||
553 | except TypeError: | ||
554 | # Some configurations don't appear to support two arguments | ||
555 | sig = hashlib.new('MD5') | ||
556 | return _hasher(sig, filename) | ||
536 | 557 | ||
537 | def sha256_file(filename): | 558 | def sha256_file(filename): |
538 | """ | 559 | """ |
@@ -583,11 +604,25 @@ def preserved_envvars(): | |||
583 | v = [ | 604 | v = [ |
584 | 'BBPATH', | 605 | 'BBPATH', |
585 | 'BB_PRESERVE_ENV', | 606 | 'BB_PRESERVE_ENV', |
586 | 'BB_ENV_WHITELIST', | 607 | 'BB_ENV_PASSTHROUGH_ADDITIONS', |
587 | 'BB_ENV_EXTRAWHITE', | ||
588 | ] | 608 | ] |
589 | return v + preserved_envvars_exported() | 609 | return v + preserved_envvars_exported() |
590 | 610 | ||
611 | def check_system_locale(): | ||
612 | """Make sure the required system locale are available and configured""" | ||
613 | default_locale = locale.getlocale(locale.LC_CTYPE) | ||
614 | |||
615 | try: | ||
616 | locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8")) | ||
617 | except: | ||
618 | sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system") | ||
619 | else: | ||
620 | locale.setlocale(locale.LC_CTYPE, default_locale) | ||
621 | |||
622 | if sys.getfilesystemencoding() != "utf-8": | ||
623 | sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n" | ||
624 | "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.") | ||
625 | |||
591 | def filter_environment(good_vars): | 626 | def filter_environment(good_vars): |
592 | """ | 627 | """ |
593 | Create a pristine environment for bitbake. This will remove variables that | 628 | Create a pristine environment for bitbake. This will remove variables that |
@@ -615,21 +650,21 @@ def filter_environment(good_vars): | |||
615 | 650 | ||
616 | def approved_variables(): | 651 | def approved_variables(): |
617 | """ | 652 | """ |
618 | Determine and return the list of whitelisted variables which are approved | 653 | Determine and return the list of variables which are approved |
619 | to remain in the environment. | 654 | to remain in the environment. |
620 | """ | 655 | """ |
621 | if 'BB_PRESERVE_ENV' in os.environ: | 656 | if 'BB_PRESERVE_ENV' in os.environ: |
622 | return os.environ.keys() | 657 | return os.environ.keys() |
623 | approved = [] | 658 | approved = [] |
624 | if 'BB_ENV_WHITELIST' in os.environ: | 659 | if 'BB_ENV_PASSTHROUGH' in os.environ: |
625 | approved = os.environ['BB_ENV_WHITELIST'].split() | 660 | approved = os.environ['BB_ENV_PASSTHROUGH'].split() |
626 | approved.extend(['BB_ENV_WHITELIST']) | 661 | approved.extend(['BB_ENV_PASSTHROUGH']) |
627 | else: | 662 | else: |
628 | approved = preserved_envvars() | 663 | approved = preserved_envvars() |
629 | if 'BB_ENV_EXTRAWHITE' in os.environ: | 664 | if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ: |
630 | approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) | 665 | approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split()) |
631 | if 'BB_ENV_EXTRAWHITE' not in approved: | 666 | if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved: |
632 | approved.extend(['BB_ENV_EXTRAWHITE']) | 667 | approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS']) |
633 | return approved | 668 | return approved |
634 | 669 | ||
635 | def clean_environment(): | 670 | def clean_environment(): |
@@ -683,8 +718,8 @@ def remove(path, recurse=False, ionice=False): | |||
683 | return | 718 | return |
684 | if recurse: | 719 | if recurse: |
685 | for name in glob.glob(path): | 720 | for name in glob.glob(path): |
686 | if _check_unsafe_delete_path(path): | 721 | if _check_unsafe_delete_path(name): |
687 | raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) | 722 | raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name) |
688 | # shutil.rmtree(name) would be ideal but its too slow | 723 | # shutil.rmtree(name) would be ideal but its too slow |
689 | cmd = [] | 724 | cmd = [] |
690 | if ionice: | 725 | if ionice: |
@@ -710,9 +745,9 @@ def prunedir(topdir, ionice=False): | |||
710 | # but thats possibly insane and suffixes is probably going to be small | 745 | # but thats possibly insane and suffixes is probably going to be small |
711 | # | 746 | # |
712 | def prune_suffix(var, suffixes, d): | 747 | def prune_suffix(var, suffixes, d): |
713 | """ | 748 | """ |
714 | See if var ends with any of the suffixes listed and | 749 | See if var ends with any of the suffixes listed and |
715 | remove it if found | 750 | remove it if found |
716 | """ | 751 | """ |
717 | for suffix in suffixes: | 752 | for suffix in suffixes: |
718 | if suffix and var.endswith(suffix): | 753 | if suffix and var.endswith(suffix): |
@@ -723,7 +758,8 @@ def mkdirhier(directory): | |||
723 | """Create a directory like 'mkdir -p', but does not complain if | 758 | """Create a directory like 'mkdir -p', but does not complain if |
724 | directory already exists like os.makedirs | 759 | directory already exists like os.makedirs |
725 | """ | 760 | """ |
726 | 761 | if '${' in str(directory): | |
762 | bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory)) | ||
727 | try: | 763 | try: |
728 | os.makedirs(directory) | 764 | os.makedirs(directory) |
729 | except OSError as e: | 765 | except OSError as e: |
@@ -742,7 +778,7 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
742 | if not sstat: | 778 | if not sstat: |
743 | sstat = os.lstat(src) | 779 | sstat = os.lstat(src) |
744 | except Exception as e: | 780 | except Exception as e: |
745 | print("movefile: Stating source file failed...", e) | 781 | logger.warning("movefile: Stating source file failed...", e) |
746 | return None | 782 | return None |
747 | 783 | ||
748 | destexists = 1 | 784 | destexists = 1 |
@@ -770,7 +806,7 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
770 | os.unlink(src) | 806 | os.unlink(src) |
771 | return os.lstat(dest) | 807 | return os.lstat(dest) |
772 | except Exception as e: | 808 | except Exception as e: |
773 | print("movefile: failed to properly create symlink:", dest, "->", target, e) | 809 | logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e) |
774 | return None | 810 | return None |
775 | 811 | ||
776 | renamefailed = 1 | 812 | renamefailed = 1 |
@@ -782,12 +818,12 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
782 | 818 | ||
783 | if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: | 819 | if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: |
784 | try: | 820 | try: |
785 | os.rename(src, destpath) | 821 | bb.utils.rename(src, destpath) |
786 | renamefailed = 0 | 822 | renamefailed = 0 |
787 | except Exception as e: | 823 | except Exception as e: |
788 | if e.errno != errno.EXDEV: | 824 | if e.errno != errno.EXDEV: |
789 | # Some random error. | 825 | # Some random error. |
790 | print("movefile: Failed to move", src, "to", dest, e) | 826 | logger.warning("movefile: Failed to move", src, "to", dest, e) |
791 | return None | 827 | return None |
792 | # Invalid cross-device-link 'bind' mounted or actually Cross-Device | 828 | # Invalid cross-device-link 'bind' mounted or actually Cross-Device |
793 | 829 | ||
@@ -796,16 +832,16 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
796 | if stat.S_ISREG(sstat[stat.ST_MODE]): | 832 | if stat.S_ISREG(sstat[stat.ST_MODE]): |
797 | try: # For safety copy then move it over. | 833 | try: # For safety copy then move it over. |
798 | shutil.copyfile(src, destpath + "#new") | 834 | shutil.copyfile(src, destpath + "#new") |
799 | os.rename(destpath + "#new", destpath) | 835 | bb.utils.rename(destpath + "#new", destpath) |
800 | didcopy = 1 | 836 | didcopy = 1 |
801 | except Exception as e: | 837 | except Exception as e: |
802 | print('movefile: copy', src, '->', dest, 'failed.', e) | 838 | logger.warning('movefile: copy', src, '->', dest, 'failed.', e) |
803 | return None | 839 | return None |
804 | else: | 840 | else: |
805 | #we don't yet handle special, so we need to fall back to /bin/mv | 841 | #we don't yet handle special, so we need to fall back to /bin/mv |
806 | a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") | 842 | a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") |
807 | if a[0] != 0: | 843 | if a[0] != 0: |
808 | print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) | 844 | logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) |
809 | return None # failure | 845 | return None # failure |
810 | try: | 846 | try: |
811 | if didcopy: | 847 | if didcopy: |
@@ -813,7 +849,7 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
813 | os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown | 849 | os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown |
814 | os.unlink(src) | 850 | os.unlink(src) |
815 | except Exception as e: | 851 | except Exception as e: |
816 | print("movefile: Failed to chown/chmod/unlink", dest, e) | 852 | logger.warning("movefile: Failed to chown/chmod/unlink", dest, e) |
817 | return None | 853 | return None |
818 | 854 | ||
819 | if newmtime: | 855 | if newmtime: |
@@ -874,7 +910,7 @@ def copyfile(src, dest, newmtime = None, sstat = None): | |||
874 | 910 | ||
875 | # For safety copy then move it over. | 911 | # For safety copy then move it over. |
876 | shutil.copyfile(src, dest + "#new") | 912 | shutil.copyfile(src, dest + "#new") |
877 | os.rename(dest + "#new", dest) | 913 | bb.utils.rename(dest + "#new", dest) |
878 | except Exception as e: | 914 | except Exception as e: |
879 | logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) | 915 | logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) |
880 | return False | 916 | return False |
@@ -965,13 +1001,16 @@ def umask(new_mask): | |||
965 | os.umask(current_mask) | 1001 | os.umask(current_mask) |
966 | 1002 | ||
967 | def to_boolean(string, default=None): | 1003 | def to_boolean(string, default=None): |
968 | """ | 1004 | """ |
969 | Check input string and return boolean value True/False/None | 1005 | Check input string and return boolean value True/False/None |
970 | depending upon the checks | 1006 | depending upon the checks |
971 | """ | 1007 | """ |
972 | if not string: | 1008 | if not string: |
973 | return default | 1009 | return default |
974 | 1010 | ||
1011 | if isinstance(string, int): | ||
1012 | return string != 0 | ||
1013 | |||
975 | normalized = string.lower() | 1014 | normalized = string.lower() |
976 | if normalized in ("y", "yes", "1", "true"): | 1015 | if normalized in ("y", "yes", "1", "true"): |
977 | return True | 1016 | return True |
@@ -1103,7 +1142,10 @@ def get_referenced_vars(start_expr, d): | |||
1103 | 1142 | ||
1104 | 1143 | ||
1105 | def cpu_count(): | 1144 | def cpu_count(): |
1106 | return multiprocessing.cpu_count() | 1145 | try: |
1146 | return len(os.sched_getaffinity(0)) | ||
1147 | except OSError: | ||
1148 | return multiprocessing.cpu_count() | ||
1107 | 1149 | ||
1108 | def nonblockingfd(fd): | 1150 | def nonblockingfd(fd): |
1109 | fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) | 1151 | fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) |
@@ -1178,7 +1220,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1178 | variables: a list of variable names to look for. Functions | 1220 | variables: a list of variable names to look for. Functions |
1179 | may also be specified, but must be specified with '()' at | 1221 | may also be specified, but must be specified with '()' at |
1180 | the end of the name. Note that the function doesn't have | 1222 | the end of the name. Note that the function doesn't have |
1181 | any intrinsic understanding of _append, _prepend, _remove, | 1223 | any intrinsic understanding of :append, :prepend, :remove, |
1182 | or overrides, so these are considered as part of the name. | 1224 | or overrides, so these are considered as part of the name. |
1183 | These values go into a regular expression, so regular | 1225 | These values go into a regular expression, so regular |
1184 | expression syntax is allowed. | 1226 | expression syntax is allowed. |
@@ -1590,33 +1632,89 @@ def set_process_name(name): | |||
1590 | except: | 1632 | except: |
1591 | pass | 1633 | pass |
1592 | 1634 | ||
1635 | def enable_loopback_networking(): | ||
1636 | # From bits/ioctls.h | ||
1637 | SIOCGIFFLAGS = 0x8913 | ||
1638 | SIOCSIFFLAGS = 0x8914 | ||
1639 | SIOCSIFADDR = 0x8916 | ||
1640 | SIOCSIFNETMASK = 0x891C | ||
1641 | |||
1642 | # if.h | ||
1643 | IFF_UP = 0x1 | ||
1644 | IFF_RUNNING = 0x40 | ||
1645 | |||
1646 | # bits/socket.h | ||
1647 | AF_INET = 2 | ||
1648 | |||
1649 | # char ifr_name[IFNAMSIZ=16] | ||
1650 | ifr_name = struct.pack("@16s", b"lo") | ||
1651 | def netdev_req(fd, req, data = b""): | ||
1652 | # Pad and add interface name | ||
1653 | data = ifr_name + data + (b'\x00' * (16 - len(data))) | ||
1654 | # Return all data after interface name | ||
1655 | return fcntl.ioctl(fd, req, data)[16:] | ||
1656 | |||
1657 | with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock: | ||
1658 | fd = sock.fileno() | ||
1659 | |||
1660 | # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; } | ||
1661 | req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1) | ||
1662 | netdev_req(fd, SIOCSIFADDR, req) | ||
1663 | |||
1664 | # short ifr_flags | ||
1665 | flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0] | ||
1666 | flags |= IFF_UP | IFF_RUNNING | ||
1667 | netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags)) | ||
1668 | |||
1669 | # struct sockaddr_in ifr_netmask | ||
1670 | req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0) | ||
1671 | netdev_req(fd, SIOCSIFNETMASK, req) | ||
1672 | |||
1673 | def disable_network(uid=None, gid=None): | ||
1674 | """ | ||
1675 | Disable networking in the current process if the kernel supports it, else | ||
1676 | just return after logging to debug. To do this we need to create a new user | ||
1677 | namespace, then map back to the original uid/gid. | ||
1678 | """ | ||
1679 | libc = ctypes.CDLL('libc.so.6') | ||
1680 | |||
1681 | # From sched.h | ||
1682 | # New user namespace | ||
1683 | CLONE_NEWUSER = 0x10000000 | ||
1684 | # New network namespace | ||
1685 | CLONE_NEWNET = 0x40000000 | ||
1686 | |||
1687 | if uid is None: | ||
1688 | uid = os.getuid() | ||
1689 | if gid is None: | ||
1690 | gid = os.getgid() | ||
1691 | |||
1692 | ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER) | ||
1693 | if ret != 0: | ||
1694 | logger.debug("System doesn't support disabling network without admin privs") | ||
1695 | return | ||
1696 | with open("/proc/self/uid_map", "w") as f: | ||
1697 | f.write("%s %s 1" % (uid, uid)) | ||
1698 | with open("/proc/self/setgroups", "w") as f: | ||
1699 | f.write("deny") | ||
1700 | with open("/proc/self/gid_map", "w") as f: | ||
1701 | f.write("%s %s 1" % (gid, gid)) | ||
1702 | |||
1593 | def export_proxies(d): | 1703 | def export_proxies(d): |
1704 | from bb.fetch2 import get_fetcher_environment | ||
1594 | """ export common proxies variables from datastore to environment """ | 1705 | """ export common proxies variables from datastore to environment """ |
1595 | import os | 1706 | newenv = get_fetcher_environment(d) |
1596 | 1707 | for v in newenv: | |
1597 | variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', | 1708 | os.environ[v] = newenv[v] |
1598 | 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY', | ||
1599 | 'GIT_PROXY_COMMAND'] | ||
1600 | exported = False | ||
1601 | |||
1602 | for v in variables: | ||
1603 | if v in os.environ.keys(): | ||
1604 | exported = True | ||
1605 | else: | ||
1606 | v_proxy = d.getVar(v) | ||
1607 | if v_proxy is not None: | ||
1608 | os.environ[v] = v_proxy | ||
1609 | exported = True | ||
1610 | |||
1611 | return exported | ||
1612 | |||
1613 | 1709 | ||
1614 | def load_plugins(logger, plugins, pluginpath): | 1710 | def load_plugins(logger, plugins, pluginpath): |
1615 | def load_plugin(name): | 1711 | def load_plugin(name): |
1616 | logger.debug('Loading plugin %s' % name) | 1712 | logger.debug('Loading plugin %s' % name) |
1617 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) | 1713 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) |
1618 | if spec: | 1714 | if spec: |
1619 | return spec.loader.load_module() | 1715 | mod = importlib.util.module_from_spec(spec) |
1716 | spec.loader.exec_module(mod) | ||
1717 | return mod | ||
1620 | 1718 | ||
1621 | logger.debug('Loading plugins from %s...' % pluginpath) | 1719 | logger.debug('Loading plugins from %s...' % pluginpath) |
1622 | 1720 | ||
@@ -1669,3 +1767,102 @@ def is_semver(version): | |||
1669 | return False | 1767 | return False |
1670 | 1768 | ||
1671 | return True | 1769 | return True |
1770 | |||
1771 | # Wrapper around os.rename which can handle cross device problems | ||
1772 | # e.g. from container filesystems | ||
1773 | def rename(src, dst): | ||
1774 | try: | ||
1775 | os.rename(src, dst) | ||
1776 | except OSError as err: | ||
1777 | if err.errno == 18: | ||
1778 | # Invalid cross-device link error | ||
1779 | shutil.move(src, dst) | ||
1780 | else: | ||
1781 | raise err | ||
1782 | |||
1783 | @contextmanager | ||
1784 | def environment(**envvars): | ||
1785 | """ | ||
1786 | Context manager to selectively update the environment with the specified mapping. | ||
1787 | """ | ||
1788 | backup = dict(os.environ) | ||
1789 | try: | ||
1790 | os.environ.update(envvars) | ||
1791 | yield | ||
1792 | finally: | ||
1793 | for var in envvars: | ||
1794 | if var in backup: | ||
1795 | os.environ[var] = backup[var] | ||
1796 | elif var in os.environ: | ||
1797 | del os.environ[var] | ||
1798 | |||
1799 | def is_local_uid(uid=''): | ||
1800 | """ | ||
1801 | Check whether uid is a local one or not. | ||
1802 | Can't use pwd module since it gets all UIDs, not local ones only. | ||
1803 | """ | ||
1804 | if not uid: | ||
1805 | uid = os.getuid() | ||
1806 | with open('/etc/passwd', 'r') as f: | ||
1807 | for line in f: | ||
1808 | line_split = line.split(':') | ||
1809 | if len(line_split) < 3: | ||
1810 | continue | ||
1811 | if str(uid) == line_split[2]: | ||
1812 | return True | ||
1813 | return False | ||
1814 | |||
1815 | def mkstemp(suffix=None, prefix=None, dir=None, text=False): | ||
1816 | """ | ||
1817 | Generates a unique filename, independent of time. | ||
1818 | |||
1819 | mkstemp() in glibc (at least) generates unique file names based on the | ||
1820 | current system time. When combined with highly parallel builds, and | ||
1821 | operating over NFS (e.g. shared sstate/downloads) this can result in | ||
1822 | conflicts and race conditions. | ||
1823 | |||
1824 | This function adds additional entropy to the file name so that a collision | ||
1825 | is independent of time and thus extremely unlikely. | ||
1826 | """ | ||
1827 | entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20)) | ||
1828 | if prefix: | ||
1829 | prefix = prefix + entropy | ||
1830 | else: | ||
1831 | prefix = tempfile.gettempprefix() + entropy | ||
1832 | return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text) | ||
1833 | |||
1834 | def path_is_descendant(descendant, ancestor): | ||
1835 | """ | ||
1836 | Returns True if the path `descendant` is a descendant of `ancestor` | ||
1837 | (including being equivalent to `ancestor` itself). Otherwise returns False. | ||
1838 | Correctly accounts for symlinks, bind mounts, etc. by using | ||
1839 | os.path.samestat() to compare paths | ||
1840 | |||
1841 | May raise any exception that os.stat() raises | ||
1842 | """ | ||
1843 | |||
1844 | ancestor_stat = os.stat(ancestor) | ||
1845 | |||
1846 | # Recurse up each directory component of the descendant to see if it is | ||
1847 | # equivalent to the ancestor | ||
1848 | check_dir = os.path.abspath(descendant).rstrip("/") | ||
1849 | while check_dir: | ||
1850 | check_stat = os.stat(check_dir) | ||
1851 | if os.path.samestat(check_stat, ancestor_stat): | ||
1852 | return True | ||
1853 | check_dir = os.path.dirname(check_dir).rstrip("/") | ||
1854 | |||
1855 | return False | ||
1856 | |||
1857 | # If we don't have a timeout of some kind and a process/thread exits badly (for example | ||
1858 | # OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better | ||
1859 | # we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. | ||
1860 | @contextmanager | ||
1861 | def lock_timeout(lock): | ||
1862 | held = lock.acquire(timeout=5*60) | ||
1863 | try: | ||
1864 | if not held: | ||
1865 | os._exit(1) | ||
1866 | yield held | ||
1867 | finally: | ||
1868 | lock.release() | ||
diff --git a/bitbake/lib/bb/xattr.py b/bitbake/lib/bb/xattr.py new file mode 100755 index 0000000000..7b634944a4 --- /dev/null +++ b/bitbake/lib/bb/xattr.py | |||
@@ -0,0 +1,126 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright 2023 by Garmin Ltd. or its subsidiaries | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | |||
7 | import sys | ||
8 | import ctypes | ||
9 | import os | ||
10 | import errno | ||
11 | |||
12 | libc = ctypes.CDLL("libc.so.6", use_errno=True) | ||
13 | fsencoding = sys.getfilesystemencoding() | ||
14 | |||
15 | |||
16 | libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t] | ||
17 | libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t] | ||
18 | |||
19 | |||
20 | def listxattr(path, follow=True): | ||
21 | func = libc.listxattr if follow else libc.llistxattr | ||
22 | |||
23 | os_path = os.fsencode(path) | ||
24 | |||
25 | while True: | ||
26 | length = func(os_path, None, 0) | ||
27 | |||
28 | if length < 0: | ||
29 | err = ctypes.get_errno() | ||
30 | raise OSError(err, os.strerror(err), str(path)) | ||
31 | |||
32 | if length == 0: | ||
33 | return [] | ||
34 | |||
35 | arr = ctypes.create_string_buffer(length) | ||
36 | |||
37 | read_length = func(os_path, arr, length) | ||
38 | if read_length != length: | ||
39 | # Race! | ||
40 | continue | ||
41 | |||
42 | return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a] | ||
43 | |||
44 | |||
45 | libc.getxattr.argtypes = [ | ||
46 | ctypes.c_char_p, | ||
47 | ctypes.c_char_p, | ||
48 | ctypes.c_char_p, | ||
49 | ctypes.c_size_t, | ||
50 | ] | ||
51 | libc.lgetxattr.argtypes = [ | ||
52 | ctypes.c_char_p, | ||
53 | ctypes.c_char_p, | ||
54 | ctypes.c_char_p, | ||
55 | ctypes.c_size_t, | ||
56 | ] | ||
57 | |||
58 | |||
59 | def getxattr(path, name, follow=True): | ||
60 | func = libc.getxattr if follow else libc.lgetxattr | ||
61 | |||
62 | os_path = os.fsencode(path) | ||
63 | os_name = os.fsencode(name) | ||
64 | |||
65 | while True: | ||
66 | length = func(os_path, os_name, None, 0) | ||
67 | |||
68 | if length < 0: | ||
69 | err = ctypes.get_errno() | ||
70 | if err == errno.ENODATA: | ||
71 | return None | ||
72 | raise OSError(err, os.strerror(err), str(path)) | ||
73 | |||
74 | if length == 0: | ||
75 | return "" | ||
76 | |||
77 | arr = ctypes.create_string_buffer(length) | ||
78 | |||
79 | read_length = func(os_path, os_name, arr, length) | ||
80 | if read_length != length: | ||
81 | # Race! | ||
82 | continue | ||
83 | |||
84 | return arr.raw | ||
85 | |||
86 | |||
87 | def get_all_xattr(path, follow=True): | ||
88 | attrs = {} | ||
89 | |||
90 | names = listxattr(path, follow) | ||
91 | |||
92 | for name in names: | ||
93 | value = getxattr(path, name, follow) | ||
94 | if value is None: | ||
95 | # This can happen if a value is erased after listxattr is called, | ||
96 | # so ignore it | ||
97 | continue | ||
98 | attrs[name] = value | ||
99 | |||
100 | return attrs | ||
101 | |||
102 | |||
103 | def main(): | ||
104 | import argparse | ||
105 | from pathlib import Path | ||
106 | |||
107 | parser = argparse.ArgumentParser() | ||
108 | parser.add_argument("path", help="File Path", type=Path) | ||
109 | |||
110 | args = parser.parse_args() | ||
111 | |||
112 | attrs = get_all_xattr(args.path) | ||
113 | |||
114 | for name, value in attrs.items(): | ||
115 | try: | ||
116 | value = value.decode(fsencoding) | ||
117 | except UnicodeDecodeError: | ||
118 | pass | ||
119 | |||
120 | print(f"{name} = {value}") | ||
121 | |||
122 | return 0 | ||
123 | |||
124 | |||
125 | if __name__ == "__main__": | ||
126 | sys.exit(main()) | ||
diff --git a/bitbake/lib/bblayers/__init__.py b/bitbake/lib/bblayers/__init__.py index 4e7c09da04..78efd29750 100644 --- a/bitbake/lib/bblayers/__init__.py +++ b/bitbake/lib/bblayers/__init__.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py index f05f5d330f..a8f2699335 100644 --- a/bitbake/lib/bblayers/action.py +++ b/bitbake/lib/bblayers/action.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -9,6 +11,7 @@ import shutil | |||
9 | import sys | 11 | import sys |
10 | import tempfile | 12 | import tempfile |
11 | 13 | ||
14 | from bb.cookerdata import findTopdir | ||
12 | import bb.utils | 15 | import bb.utils |
13 | 16 | ||
14 | from bblayers.common import LayerPlugin | 17 | from bblayers.common import LayerPlugin |
@@ -35,7 +38,7 @@ class ActionPlugin(LayerPlugin): | |||
35 | sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir) | 38 | sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir) |
36 | return 1 | 39 | return 1 |
37 | 40 | ||
38 | bblayers_conf = os.path.join('conf', 'bblayers.conf') | 41 | bblayers_conf = os.path.join(findTopdir(),'conf', 'bblayers.conf') |
39 | if not os.path.exists(bblayers_conf): | 42 | if not os.path.exists(bblayers_conf): |
40 | sys.stderr.write("Unable to find bblayers.conf\n") | 43 | sys.stderr.write("Unable to find bblayers.conf\n") |
41 | return 1 | 44 | return 1 |
@@ -47,13 +50,15 @@ class ActionPlugin(LayerPlugin): | |||
47 | 50 | ||
48 | try: | 51 | try: |
49 | notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None) | 52 | notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None) |
53 | self.tinfoil.modified_files() | ||
50 | if not (args.force or notadded): | 54 | if not (args.force or notadded): |
51 | try: | 55 | try: |
52 | self.tinfoil.run_command('parseConfiguration') | 56 | self.tinfoil.run_command('parseConfiguration') |
53 | except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): | 57 | except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): |
54 | # Restore the back up copy of bblayers.conf | 58 | # Restore the back up copy of bblayers.conf |
55 | shutil.copy2(backup, bblayers_conf) | 59 | shutil.copy2(backup, bblayers_conf) |
56 | bb.fatal("Parse failure with the specified layer added, aborting.") | 60 | self.tinfoil.modified_files() |
61 | bb.fatal("Parse failure with the specified layer added, exiting.") | ||
57 | else: | 62 | else: |
58 | for item in notadded: | 63 | for item in notadded: |
59 | sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item) | 64 | sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item) |
@@ -63,7 +68,7 @@ class ActionPlugin(LayerPlugin): | |||
63 | 68 | ||
64 | def do_remove_layer(self, args): | 69 | def do_remove_layer(self, args): |
65 | """Remove one or more layers from bblayers.conf.""" | 70 | """Remove one or more layers from bblayers.conf.""" |
66 | bblayers_conf = os.path.join('conf', 'bblayers.conf') | 71 | bblayers_conf = os.path.join(findTopdir() ,'conf', 'bblayers.conf') |
67 | if not os.path.exists(bblayers_conf): | 72 | if not os.path.exists(bblayers_conf): |
68 | sys.stderr.write("Unable to find bblayers.conf\n") | 73 | sys.stderr.write("Unable to find bblayers.conf\n") |
69 | return 1 | 74 | return 1 |
@@ -78,6 +83,7 @@ class ActionPlugin(LayerPlugin): | |||
78 | layerdir = os.path.abspath(item) | 83 | layerdir = os.path.abspath(item) |
79 | layerdirs.append(layerdir) | 84 | layerdirs.append(layerdir) |
80 | (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs) | 85 | (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs) |
86 | self.tinfoil.modified_files() | ||
81 | if notremoved: | 87 | if notremoved: |
82 | for item in notremoved: | 88 | for item in notremoved: |
83 | sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item) | 89 | sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item) |
@@ -237,6 +243,9 @@ build results (as the layer priority order has effectively changed). | |||
237 | if not entry_found: | 243 | if not entry_found: |
238 | logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full) | 244 | logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full) |
239 | 245 | ||
246 | self.tinfoil.modified_files() | ||
247 | |||
248 | |||
240 | def get_file_layer(self, filename): | 249 | def get_file_layer(self, filename): |
241 | layerdir = self.get_file_layerdir(filename) | 250 | layerdir = self.get_file_layerdir(filename) |
242 | if layerdir: | 251 | if layerdir: |
diff --git a/bitbake/lib/bblayers/common.py b/bitbake/lib/bblayers/common.py index 6c76ef3505..f7b9cee371 100644 --- a/bitbake/lib/bblayers/common.py +++ b/bitbake/lib/bblayers/common.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/bitbake/lib/bblayers/layerindex.py b/bitbake/lib/bblayers/layerindex.py index b2f27b21ee..ba91fac669 100644 --- a/bitbake/lib/bblayers/layerindex.py +++ b/bitbake/lib/bblayers/layerindex.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -47,6 +49,31 @@ class LayerIndexPlugin(ActionPlugin): | |||
47 | else: | 49 | else: |
48 | logger.plain("Repository %s needs to be fetched" % url) | 50 | logger.plain("Repository %s needs to be fetched" % url) |
49 | return subdir, layername, layerdir | 51 | return subdir, layername, layerdir |
52 | elif os.path.exists(repodir) and branch: | ||
53 | """ | ||
54 | If the repo is already cloned, ensure it is on the correct branch, | ||
55 | switching branches if necessary and possible. | ||
56 | """ | ||
57 | base_cmd = ['git', '--git-dir=%s/.git' % repodir, '--work-tree=%s' % repodir] | ||
58 | cmd = base_cmd + ['branch'] | ||
59 | completed_proc = subprocess.run(cmd, text=True, capture_output=True) | ||
60 | if completed_proc.returncode: | ||
61 | logger.error("Unable to validate repo %s (%s)" % (repodir, stderr)) | ||
62 | return None, None, None | ||
63 | else: | ||
64 | if branch != completed_proc.stdout[2:-1]: | ||
65 | cmd = base_cmd + ['status', '--short'] | ||
66 | completed_proc = subprocess.run(cmd, text=True, capture_output=True) | ||
67 | if completed_proc.stdout.count('\n') != 0: | ||
68 | logger.warning("There are uncommitted changes in repo %s" % repodir) | ||
69 | cmd = base_cmd + ['checkout', branch] | ||
70 | completed_proc = subprocess.run(cmd, text=True, capture_output=True) | ||
71 | if completed_proc.returncode: | ||
72 | # Could be due to original shallow clone on a different branch for example | ||
73 | logger.error("Unable to automatically switch %s to desired branch '%s' (%s)" | ||
74 | % (repodir, branch, completed_proc.stderr)) | ||
75 | return None, None, None | ||
76 | return subdir, layername, layerdir | ||
50 | elif os.path.exists(layerdir): | 77 | elif os.path.exists(layerdir): |
51 | return subdir, layername, layerdir | 78 | return subdir, layername, layerdir |
52 | else: | 79 | else: |
@@ -159,12 +186,17 @@ class LayerIndexPlugin(ActionPlugin): | |||
159 | logger.plain(' recommended by: %s' % ' '.join(recommendedby)) | 186 | logger.plain(' recommended by: %s' % ' '.join(recommendedby)) |
160 | 187 | ||
161 | if dependencies: | 188 | if dependencies: |
162 | fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR') | 189 | if args.fetchdir: |
163 | if not fetchdir: | 190 | fetchdir = args.fetchdir |
164 | logger.error("Cannot get BBLAYERS_FETCH_DIR") | 191 | else: |
165 | return 1 | 192 | fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR') |
193 | if not fetchdir: | ||
194 | logger.error("Cannot get BBLAYERS_FETCH_DIR") | ||
195 | return 1 | ||
196 | |||
166 | if not os.path.exists(fetchdir): | 197 | if not os.path.exists(fetchdir): |
167 | os.makedirs(fetchdir) | 198 | os.makedirs(fetchdir) |
199 | |||
168 | addlayers = [] | 200 | addlayers = [] |
169 | 201 | ||
170 | for deplayerbranch in dependencies: | 202 | for deplayerbranch in dependencies: |
@@ -206,6 +238,8 @@ class LayerIndexPlugin(ActionPlugin): | |||
206 | """ | 238 | """ |
207 | args.show_only = True | 239 | args.show_only = True |
208 | args.ignore = [] | 240 | args.ignore = [] |
241 | args.fetchdir = "" | ||
242 | args.shallow = True | ||
209 | self.do_layerindex_fetch(args) | 243 | self.do_layerindex_fetch(args) |
210 | 244 | ||
211 | def register_commands(self, sp): | 245 | def register_commands(self, sp): |
@@ -214,6 +248,7 @@ class LayerIndexPlugin(ActionPlugin): | |||
214 | parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch') | 248 | parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch') |
215 | parser_layerindex_fetch.add_argument('-s', '--shallow', help='do only shallow clones (--depth=1)', action='store_true') | 249 | parser_layerindex_fetch.add_argument('-s', '--shallow', help='do only shallow clones (--depth=1)', action='store_true') |
216 | parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER') | 250 | parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER') |
251 | parser_layerindex_fetch.add_argument('-f', '--fetchdir', help='directory to fetch the layer(s) into (will be created if it does not exist)') | ||
217 | parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch') | 252 | parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch') |
218 | 253 | ||
219 | parser_layerindex_show_depends = self.add_command(sp, 'layerindex-show-depends', self.do_layerindex_show_depends, parserecipes=False) | 254 | parser_layerindex_show_depends = self.add_command(sp, 'layerindex-show-depends', self.do_layerindex_show_depends, parserecipes=False) |
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py index f5e3c84747..bfc18a7593 100644 --- a/bitbake/lib/bblayers/query.py +++ b/bitbake/lib/bblayers/query.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -27,12 +29,12 @@ class QueryPlugin(LayerPlugin): | |||
27 | 29 | ||
28 | def do_show_layers(self, args): | 30 | def do_show_layers(self, args): |
29 | """show current configured layers.""" | 31 | """show current configured layers.""" |
30 | logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority")) | 32 | logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(70), "priority")) |
31 | logger.plain('=' * 74) | 33 | logger.plain('=' * 104) |
32 | for layer, _, regex, pri in self.tinfoil.cooker.bbfile_config_priorities: | 34 | for layer, _, regex, pri in self.tinfoil.cooker.bbfile_config_priorities: |
33 | layerdir = self.bbfile_collections.get(layer, None) | 35 | layerdir = self.bbfile_collections.get(layer, None) |
34 | layername = self.get_layer_name(layerdir) | 36 | layername = layer |
35 | logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), pri)) | 37 | logger.plain("%s %s %s" % (layername.ljust(20), layerdir.ljust(70), pri)) |
36 | 38 | ||
37 | def version_str(self, pe, pv, pr = None): | 39 | def version_str(self, pe, pv, pr = None): |
38 | verstr = "%s" % pv | 40 | verstr = "%s" % pv |
@@ -55,11 +57,12 @@ are overlayed will also be listed, with a " (skipped)" suffix. | |||
55 | # Check for overlayed .bbclass files | 57 | # Check for overlayed .bbclass files |
56 | classes = collections.defaultdict(list) | 58 | classes = collections.defaultdict(list) |
57 | for layerdir in self.bblayers: | 59 | for layerdir in self.bblayers: |
58 | classdir = os.path.join(layerdir, 'classes') | 60 | for c in ["classes-global", "classes-recipe", "classes"]: |
59 | if os.path.exists(classdir): | 61 | classdir = os.path.join(layerdir, c) |
60 | for classfile in os.listdir(classdir): | 62 | if os.path.exists(classdir): |
61 | if os.path.splitext(classfile)[1] == '.bbclass': | 63 | for classfile in os.listdir(classdir): |
62 | classes[classfile].append(classdir) | 64 | if os.path.splitext(classfile)[1] == '.bbclass': |
65 | classes[classfile].append(classdir) | ||
63 | 66 | ||
64 | # Locating classes and other files is a bit more complicated than recipes - | 67 | # Locating classes and other files is a bit more complicated than recipes - |
65 | # layer priority is not a factor; instead BitBake uses the first matching | 68 | # layer priority is not a factor; instead BitBake uses the first matching |
@@ -122,13 +125,18 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
122 | if inherits: | 125 | if inherits: |
123 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) | 126 | bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) |
124 | for classname in inherits: | 127 | for classname in inherits: |
125 | classfile = 'classes/%s.bbclass' % classname | 128 | found = False |
126 | if not bb.utils.which(bbpath, classfile, history=False): | 129 | for c in ["classes-global", "classes-recipe", "classes"]: |
127 | logger.error('No class named %s found in BBPATH', classfile) | 130 | cfile = c + '/%s.bbclass' % classname |
131 | if bb.utils.which(bbpath, cfile, history=False): | ||
132 | found = True | ||
133 | break | ||
134 | if not found: | ||
135 | logger.error('No class named %s found in BBPATH', classname) | ||
128 | sys.exit(1) | 136 | sys.exit(1) |
129 | 137 | ||
130 | pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn | 138 | pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn |
131 | (latest_versions, preferred_versions) = self.tinfoil.find_providers(mc) | 139 | (latest_versions, preferred_versions, required_versions) = self.tinfoil.find_providers(mc) |
132 | allproviders = self.tinfoil.get_all_providers(mc) | 140 | allproviders = self.tinfoil.get_all_providers(mc) |
133 | 141 | ||
134 | # Ensure we list skipped recipes | 142 | # Ensure we list skipped recipes |
@@ -154,7 +162,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
154 | def print_item(f, pn, ver, layer, ispref): | 162 | def print_item(f, pn, ver, layer, ispref): |
155 | if not selected_layer or layer == selected_layer: | 163 | if not selected_layer or layer == selected_layer: |
156 | if not bare and f in skiplist: | 164 | if not bare and f in skiplist: |
157 | skipped = ' (skipped)' | 165 | skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist[f].skipreason |
158 | else: | 166 | else: |
159 | skipped = '' | 167 | skipped = '' |
160 | if show_filenames: | 168 | if show_filenames: |
@@ -172,7 +180,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
172 | logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) | 180 | logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) |
173 | 181 | ||
174 | global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() | 182 | global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() |
175 | cls_re = re.compile('classes/') | 183 | cls_re = re.compile('classes.*/') |
176 | 184 | ||
177 | preffiles = [] | 185 | preffiles = [] |
178 | show_unique_pn = [] | 186 | show_unique_pn = [] |
@@ -274,7 +282,10 @@ Lists recipes with the bbappends that apply to them as subitems. | |||
274 | else: | 282 | else: |
275 | logger.plain('=== Appended recipes ===') | 283 | logger.plain('=== Appended recipes ===') |
276 | 284 | ||
277 | pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys()) | 285 | |
286 | cooker_data = self.tinfoil.cooker.recipecaches[args.mc] | ||
287 | |||
288 | pnlist = list(cooker_data.pkg_pn.keys()) | ||
278 | pnlist.sort() | 289 | pnlist.sort() |
279 | appends = False | 290 | appends = False |
280 | for pn in pnlist: | 291 | for pn in pnlist: |
@@ -287,7 +298,7 @@ Lists recipes with the bbappends that apply to them as subitems. | |||
287 | if not found: | 298 | if not found: |
288 | continue | 299 | continue |
289 | 300 | ||
290 | if self.show_appends_for_pn(pn): | 301 | if self.show_appends_for_pn(pn, cooker_data, args.mc): |
291 | appends = True | 302 | appends = True |
292 | 303 | ||
293 | if not args.pnspec and self.show_appends_for_skipped(): | 304 | if not args.pnspec and self.show_appends_for_skipped(): |
@@ -296,8 +307,10 @@ Lists recipes with the bbappends that apply to them as subitems. | |||
296 | if not appends: | 307 | if not appends: |
297 | logger.plain('No append files found') | 308 | logger.plain('No append files found') |
298 | 309 | ||
299 | def show_appends_for_pn(self, pn): | 310 | def show_appends_for_pn(self, pn, cooker_data, mc): |
300 | filenames = self.tinfoil.cooker_data.pkg_pn[pn] | 311 | filenames = cooker_data.pkg_pn[pn] |
312 | if mc: | ||
313 | pn = "mc:%s:%s" % (mc, pn) | ||
301 | 314 | ||
302 | best = self.tinfoil.find_best_provider(pn) | 315 | best = self.tinfoil.find_best_provider(pn) |
303 | best_filename = os.path.basename(best[3]) | 316 | best_filename = os.path.basename(best[3]) |
@@ -405,7 +418,7 @@ NOTE: .bbappend files can impact the dependencies. | |||
405 | self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers) | 418 | self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers) |
406 | 419 | ||
407 | # The inherit class | 420 | # The inherit class |
408 | cls_re = re.compile('classes/') | 421 | cls_re = re.compile('classes.*/') |
409 | if f in self.tinfoil.cooker_data.inherits: | 422 | if f in self.tinfoil.cooker_data.inherits: |
410 | inherits = self.tinfoil.cooker_data.inherits[f] | 423 | inherits = self.tinfoil.cooker_data.inherits[f] |
411 | for cls in inherits: | 424 | for cls in inherits: |
@@ -441,10 +454,10 @@ NOTE: .bbappend files can impact the dependencies. | |||
441 | line = fnfile.readline() | 454 | line = fnfile.readline() |
442 | 455 | ||
443 | # The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass | 456 | # The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass |
444 | conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$") | 457 | conf_re = re.compile(r".*/conf/machine/[^\/]*\.conf$") |
445 | inc_re = re.compile(".*\.inc$") | 458 | inc_re = re.compile(r".*\.inc$") |
446 | # The "inherit xxx" in .bbclass | 459 | # The "inherit xxx" in .bbclass |
447 | bbclass_re = re.compile(".*\.bbclass$") | 460 | bbclass_re = re.compile(r".*\.bbclass$") |
448 | for layerdir in self.bblayers: | 461 | for layerdir in self.bblayers: |
449 | layername = self.get_layer_name(layerdir) | 462 | layername = self.get_layer_name(layerdir) |
450 | for dirpath, dirnames, filenames in os.walk(layerdir): | 463 | for dirpath, dirnames, filenames in os.walk(layerdir): |
@@ -522,6 +535,7 @@ NOTE: .bbappend files can impact the dependencies. | |||
522 | 535 | ||
523 | parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends) | 536 | parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends) |
524 | parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)') | 537 | parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)') |
538 | parser_show_appends.add_argument('--mc', help='use specified multiconfig', default='') | ||
525 | 539 | ||
526 | parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends) | 540 | parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends) |
527 | parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true') | 541 | parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true') |
diff --git a/bitbake/lib/bs4/tests/test_tree.py b/bitbake/lib/bs4/tests/test_tree.py index 8e5c66426e..cf0f1abe0c 100644 --- a/bitbake/lib/bs4/tests/test_tree.py +++ b/bitbake/lib/bs4/tests/test_tree.py | |||
@@ -585,7 +585,7 @@ class SiblingTest(TreeTest): | |||
585 | </html>''' | 585 | </html>''' |
586 | # All that whitespace looks good but makes the tests more | 586 | # All that whitespace looks good but makes the tests more |
587 | # difficult. Get rid of it. | 587 | # difficult. Get rid of it. |
588 | markup = re.compile("\n\s*").sub("", markup) | 588 | markup = re.compile(r"\n\s*").sub("", markup) |
589 | self.tree = self.soup(markup) | 589 | self.tree = self.soup(markup) |
590 | 590 | ||
591 | 591 | ||
diff --git a/bitbake/lib/codegen.py b/bitbake/lib/codegen.py index 62a6748c47..018b283177 100644 --- a/bitbake/lib/codegen.py +++ b/bitbake/lib/codegen.py | |||
@@ -392,14 +392,8 @@ class SourceGenerator(NodeVisitor): | |||
392 | def visit_Name(self, node): | 392 | def visit_Name(self, node): |
393 | self.write(node.id) | 393 | self.write(node.id) |
394 | 394 | ||
395 | def visit_Str(self, node): | 395 | def visit_Constant(self, node): |
396 | self.write(repr(node.s)) | 396 | self.write(repr(node.value)) |
397 | |||
398 | def visit_Bytes(self, node): | ||
399 | self.write(repr(node.s)) | ||
400 | |||
401 | def visit_Num(self, node): | ||
402 | self.write(repr(node.n)) | ||
403 | 397 | ||
404 | def visit_Tuple(self, node): | 398 | def visit_Tuple(self, node): |
405 | self.write('(') | 399 | self.write('(') |
diff --git a/bitbake/lib/hashserv/__init__.py b/bitbake/lib/hashserv/__init__.py index 5f2e101e52..74367eb6b4 100644 --- a/bitbake/lib/hashserv/__init__.py +++ b/bitbake/lib/hashserv/__init__.py | |||
@@ -5,129 +5,102 @@ | |||
5 | 5 | ||
6 | import asyncio | 6 | import asyncio |
7 | from contextlib import closing | 7 | from contextlib import closing |
8 | import re | ||
9 | import sqlite3 | ||
10 | import itertools | 8 | import itertools |
11 | import json | 9 | import json |
10 | from collections import namedtuple | ||
11 | from urllib.parse import urlparse | ||
12 | from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS | ||
13 | |||
14 | User = namedtuple("User", ("username", "permissions")) | ||
15 | |||
16 | def create_server( | ||
17 | addr, | ||
18 | dbname, | ||
19 | *, | ||
20 | sync=True, | ||
21 | upstream=None, | ||
22 | read_only=False, | ||
23 | db_username=None, | ||
24 | db_password=None, | ||
25 | anon_perms=None, | ||
26 | admin_username=None, | ||
27 | admin_password=None, | ||
28 | ): | ||
29 | def sqlite_engine(): | ||
30 | from .sqlite import DatabaseEngine | ||
31 | |||
32 | return DatabaseEngine(dbname, sync) | ||
33 | |||
34 | def sqlalchemy_engine(): | ||
35 | from .sqlalchemy import DatabaseEngine | ||
36 | |||
37 | return DatabaseEngine(dbname, db_username, db_password) | ||
12 | 38 | ||
13 | UNIX_PREFIX = "unix://" | 39 | from . import server |
14 | |||
15 | ADDR_TYPE_UNIX = 0 | ||
16 | ADDR_TYPE_TCP = 1 | ||
17 | |||
18 | # The Python async server defaults to a 64K receive buffer, so we hardcode our | ||
19 | # maximum chunk size. It would be better if the client and server reported to | ||
20 | # each other what the maximum chunk sizes were, but that will slow down the | ||
21 | # connection setup with a round trip delay so I'd rather not do that unless it | ||
22 | # is necessary | ||
23 | DEFAULT_MAX_CHUNK = 32 * 1024 | ||
24 | |||
25 | TABLE_DEFINITION = ( | ||
26 | ("method", "TEXT NOT NULL"), | ||
27 | ("outhash", "TEXT NOT NULL"), | ||
28 | ("taskhash", "TEXT NOT NULL"), | ||
29 | ("unihash", "TEXT NOT NULL"), | ||
30 | ("created", "DATETIME"), | ||
31 | |||
32 | # Optional fields | ||
33 | ("owner", "TEXT"), | ||
34 | ("PN", "TEXT"), | ||
35 | ("PV", "TEXT"), | ||
36 | ("PR", "TEXT"), | ||
37 | ("task", "TEXT"), | ||
38 | ("outhash_siginfo", "TEXT"), | ||
39 | ) | ||
40 | |||
41 | TABLE_COLUMNS = tuple(name for name, _ in TABLE_DEFINITION) | ||
42 | |||
43 | def setup_database(database, sync=True): | ||
44 | db = sqlite3.connect(database) | ||
45 | db.row_factory = sqlite3.Row | ||
46 | |||
47 | with closing(db.cursor()) as cursor: | ||
48 | cursor.execute(''' | ||
49 | CREATE TABLE IF NOT EXISTS tasks_v2 ( | ||
50 | id INTEGER PRIMARY KEY AUTOINCREMENT, | ||
51 | %s | ||
52 | UNIQUE(method, outhash, taskhash) | ||
53 | ) | ||
54 | ''' % " ".join("%s %s," % (name, typ) for name, typ in TABLE_DEFINITION)) | ||
55 | cursor.execute('PRAGMA journal_mode = WAL') | ||
56 | cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF')) | ||
57 | |||
58 | # Drop old indexes | ||
59 | cursor.execute('DROP INDEX IF EXISTS taskhash_lookup') | ||
60 | cursor.execute('DROP INDEX IF EXISTS outhash_lookup') | ||
61 | |||
62 | # Create new indexes | ||
63 | cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v2 ON tasks_v2 (method, taskhash, created)') | ||
64 | cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v2 ON tasks_v2 (method, outhash)') | ||
65 | |||
66 | return db | ||
67 | |||
68 | |||
69 | def parse_address(addr): | ||
70 | if addr.startswith(UNIX_PREFIX): | ||
71 | return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX):],)) | ||
72 | else: | ||
73 | m = re.match(r'\[(?P<host>[^\]]*)\]:(?P<port>\d+)$', addr) | ||
74 | if m is not None: | ||
75 | host = m.group('host') | ||
76 | port = m.group('port') | ||
77 | else: | ||
78 | host, port = addr.split(':') | ||
79 | |||
80 | return (ADDR_TYPE_TCP, (host, int(port))) | ||
81 | |||
82 | 40 | ||
83 | def chunkify(msg, max_chunk): | 41 | if "://" in dbname: |
84 | if len(msg) < max_chunk - 1: | 42 | db_engine = sqlalchemy_engine() |
85 | yield ''.join((msg, "\n")) | ||
86 | else: | 43 | else: |
87 | yield ''.join((json.dumps({ | 44 | db_engine = sqlite_engine() |
88 | 'chunk-stream': None | ||
89 | }), "\n")) | ||
90 | 45 | ||
91 | args = [iter(msg)] * (max_chunk - 1) | 46 | if anon_perms is None: |
92 | for m in map(''.join, itertools.zip_longest(*args, fillvalue='')): | 47 | anon_perms = server.DEFAULT_ANON_PERMS |
93 | yield ''.join(itertools.chain(m, "\n")) | ||
94 | yield "\n" | ||
95 | 48 | ||
96 | 49 | s = server.Server( | |
97 | def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False): | 50 | db_engine, |
98 | from . import server | 51 | upstream=upstream, |
99 | db = setup_database(dbname, sync=sync) | 52 | read_only=read_only, |
100 | s = server.Server(db, upstream=upstream, read_only=read_only) | 53 | anon_perms=anon_perms, |
54 | admin_username=admin_username, | ||
55 | admin_password=admin_password, | ||
56 | ) | ||
101 | 57 | ||
102 | (typ, a) = parse_address(addr) | 58 | (typ, a) = parse_address(addr) |
103 | if typ == ADDR_TYPE_UNIX: | 59 | if typ == ADDR_TYPE_UNIX: |
104 | s.start_unix_server(*a) | 60 | s.start_unix_server(*a) |
61 | elif typ == ADDR_TYPE_WS: | ||
62 | url = urlparse(a[0]) | ||
63 | s.start_websocket_server(url.hostname, url.port) | ||
105 | else: | 64 | else: |
106 | s.start_tcp_server(*a) | 65 | s.start_tcp_server(*a) |
107 | 66 | ||
108 | return s | 67 | return s |
109 | 68 | ||
110 | 69 | ||
111 | def create_client(addr): | 70 | def create_client(addr, username=None, password=None): |
112 | from . import client | 71 | from . import client |
113 | c = client.Client() | ||
114 | 72 | ||
115 | (typ, a) = parse_address(addr) | 73 | c = client.Client(username, password) |
116 | if typ == ADDR_TYPE_UNIX: | 74 | |
117 | c.connect_unix(*a) | 75 | try: |
118 | else: | 76 | (typ, a) = parse_address(addr) |
119 | c.connect_tcp(*a) | 77 | if typ == ADDR_TYPE_UNIX: |
78 | c.connect_unix(*a) | ||
79 | elif typ == ADDR_TYPE_WS: | ||
80 | c.connect_websocket(*a) | ||
81 | else: | ||
82 | c.connect_tcp(*a) | ||
83 | return c | ||
84 | except Exception as e: | ||
85 | c.close() | ||
86 | raise e | ||
120 | 87 | ||
121 | return c | ||
122 | 88 | ||
123 | async def create_async_client(addr): | 89 | async def create_async_client(addr, username=None, password=None): |
124 | from . import client | 90 | from . import client |
125 | c = client.AsyncClient() | ||
126 | 91 | ||
127 | (typ, a) = parse_address(addr) | 92 | c = client.AsyncClient(username, password) |
128 | if typ == ADDR_TYPE_UNIX: | 93 | |
129 | await c.connect_unix(*a) | 94 | try: |
130 | else: | 95 | (typ, a) = parse_address(addr) |
131 | await c.connect_tcp(*a) | 96 | if typ == ADDR_TYPE_UNIX: |
97 | await c.connect_unix(*a) | ||
98 | elif typ == ADDR_TYPE_WS: | ||
99 | await c.connect_websocket(*a) | ||
100 | else: | ||
101 | await c.connect_tcp(*a) | ||
132 | 102 | ||
133 | return c | 103 | return c |
104 | except Exception as e: | ||
105 | await c.close() | ||
106 | raise e | ||
diff --git a/bitbake/lib/hashserv/client.py b/bitbake/lib/hashserv/client.py index e05c1eb568..0b254beddd 100644 --- a/bitbake/lib/hashserv/client.py +++ b/bitbake/lib/hashserv/client.py | |||
@@ -3,231 +3,344 @@ | |||
3 | # SPDX-License-Identifier: GPL-2.0-only | 3 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 4 | # |
5 | 5 | ||
6 | import asyncio | ||
7 | import json | ||
8 | import logging | 6 | import logging |
9 | import socket | 7 | import socket |
10 | import os | 8 | import bb.asyncrpc |
11 | from . import chunkify, DEFAULT_MAX_CHUNK, create_async_client | 9 | import json |
10 | from . import create_async_client | ||
12 | 11 | ||
13 | 12 | ||
14 | logger = logging.getLogger("hashserv.client") | 13 | logger = logging.getLogger("hashserv.client") |
15 | 14 | ||
16 | 15 | ||
17 | class HashConnectionError(Exception): | 16 | class AsyncClient(bb.asyncrpc.AsyncClient): |
18 | pass | ||
19 | |||
20 | |||
21 | class AsyncClient(object): | ||
22 | MODE_NORMAL = 0 | 17 | MODE_NORMAL = 0 |
23 | MODE_GET_STREAM = 1 | 18 | MODE_GET_STREAM = 1 |
19 | MODE_EXIST_STREAM = 2 | ||
24 | 20 | ||
25 | def __init__(self): | 21 | def __init__(self, username=None, password=None): |
26 | self.reader = None | 22 | super().__init__("OEHASHEQUIV", "1.1", logger) |
27 | self.writer = None | ||
28 | self.mode = self.MODE_NORMAL | 23 | self.mode = self.MODE_NORMAL |
29 | self.max_chunk = DEFAULT_MAX_CHUNK | 24 | self.username = username |
30 | 25 | self.password = password | |
31 | async def connect_tcp(self, address, port): | 26 | self.saved_become_user = None |
32 | async def connect_sock(): | ||
33 | return await asyncio.open_connection(address, port) | ||
34 | |||
35 | self._connect_sock = connect_sock | ||
36 | |||
37 | async def connect_unix(self, path): | ||
38 | async def connect_sock(): | ||
39 | return await asyncio.open_unix_connection(path) | ||
40 | |||
41 | self._connect_sock = connect_sock | ||
42 | |||
43 | async def connect(self): | ||
44 | if self.reader is None or self.writer is None: | ||
45 | (self.reader, self.writer) = await self._connect_sock() | ||
46 | |||
47 | self.writer.write("OEHASHEQUIV 1.1\n\n".encode("utf-8")) | ||
48 | await self.writer.drain() | ||
49 | |||
50 | cur_mode = self.mode | ||
51 | self.mode = self.MODE_NORMAL | ||
52 | await self._set_mode(cur_mode) | ||
53 | |||
54 | async def close(self): | ||
55 | self.reader = None | ||
56 | |||
57 | if self.writer is not None: | ||
58 | self.writer.close() | ||
59 | self.writer = None | ||
60 | |||
61 | async def _send_wrapper(self, proc): | ||
62 | count = 0 | ||
63 | while True: | ||
64 | try: | ||
65 | await self.connect() | ||
66 | return await proc() | ||
67 | except ( | ||
68 | OSError, | ||
69 | HashConnectionError, | ||
70 | json.JSONDecodeError, | ||
71 | UnicodeDecodeError, | ||
72 | ) as e: | ||
73 | logger.warning("Error talking to server: %s" % e) | ||
74 | if count >= 3: | ||
75 | if not isinstance(e, HashConnectionError): | ||
76 | raise HashConnectionError(str(e)) | ||
77 | raise e | ||
78 | await self.close() | ||
79 | count += 1 | ||
80 | |||
81 | async def send_message(self, msg): | ||
82 | async def get_line(): | ||
83 | line = await self.reader.readline() | ||
84 | if not line: | ||
85 | raise HashConnectionError("Connection closed") | ||
86 | |||
87 | line = line.decode("utf-8") | ||
88 | |||
89 | if not line.endswith("\n"): | ||
90 | raise HashConnectionError("Bad message %r" % message) | ||
91 | |||
92 | return line | ||
93 | 27 | ||
28 | async def setup_connection(self): | ||
29 | await super().setup_connection() | ||
30 | self.mode = self.MODE_NORMAL | ||
31 | if self.username: | ||
32 | # Save off become user temporarily because auth() resets it | ||
33 | become = self.saved_become_user | ||
34 | await self.auth(self.username, self.password) | ||
35 | |||
36 | if become: | ||
37 | await self.become_user(become) | ||
38 | |||
39 | async def send_stream(self, mode, msg): | ||
94 | async def proc(): | 40 | async def proc(): |
95 | for c in chunkify(json.dumps(msg), self.max_chunk): | 41 | await self._set_mode(mode) |
96 | self.writer.write(c.encode("utf-8")) | 42 | await self.socket.send(msg) |
97 | await self.writer.drain() | 43 | return await self.socket.recv() |
98 | 44 | ||
99 | l = await get_line() | 45 | return await self._send_wrapper(proc) |
100 | 46 | ||
101 | m = json.loads(l) | 47 | async def invoke(self, *args, **kwargs): |
102 | if m and "chunk-stream" in m: | 48 | # It's OK if connection errors cause a failure here, because the mode |
103 | lines = [] | 49 | # is also reset to normal on a new connection |
104 | while True: | 50 | await self._set_mode(self.MODE_NORMAL) |
105 | l = (await get_line()).rstrip("\n") | 51 | return await super().invoke(*args, **kwargs) |
106 | if not l: | ||
107 | break | ||
108 | lines.append(l) | ||
109 | 52 | ||
110 | m = json.loads("".join(lines)) | 53 | async def _set_mode(self, new_mode): |
54 | async def stream_to_normal(): | ||
55 | await self.socket.send("END") | ||
56 | return await self.socket.recv() | ||
111 | 57 | ||
112 | return m | 58 | async def normal_to_stream(command): |
59 | r = await self.invoke({command: None}) | ||
60 | if r != "ok": | ||
61 | raise ConnectionError( | ||
62 | f"Unable to transition to stream mode: Bad response from server {r!r}" | ||
63 | ) | ||
113 | 64 | ||
114 | return await self._send_wrapper(proc) | 65 | self.logger.debug("Mode is now %s", command) |
115 | 66 | ||
116 | async def send_stream(self, msg): | 67 | if new_mode == self.mode: |
117 | async def proc(): | 68 | return |
118 | self.writer.write(("%s\n" % msg).encode("utf-8")) | ||
119 | await self.writer.drain() | ||
120 | l = await self.reader.readline() | ||
121 | if not l: | ||
122 | raise HashConnectionError("Connection closed") | ||
123 | return l.decode("utf-8").rstrip() | ||
124 | 69 | ||
125 | return await self._send_wrapper(proc) | 70 | self.logger.debug("Transitioning mode %s -> %s", self.mode, new_mode) |
126 | 71 | ||
127 | async def _set_mode(self, new_mode): | 72 | # Always transition to normal mode before switching to any other mode |
128 | if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM: | 73 | if self.mode != self.MODE_NORMAL: |
129 | r = await self.send_stream("END") | 74 | r = await self._send_wrapper(stream_to_normal) |
130 | if r != "ok": | 75 | if r != "ok": |
131 | raise HashConnectionError("Bad response from server %r" % r) | 76 | self.check_invoke_error(r) |
132 | elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL: | 77 | raise ConnectionError( |
133 | r = await self.send_message({"get-stream": None}) | 78 | f"Unable to transition to normal mode: Bad response from server {r!r}" |
134 | if r != "ok": | 79 | ) |
135 | raise HashConnectionError("Bad response from server %r" % r) | 80 | self.logger.debug("Mode is now normal") |
136 | elif new_mode != self.mode: | 81 | |
137 | raise Exception( | 82 | if new_mode == self.MODE_GET_STREAM: |
138 | "Undefined mode transition %r -> %r" % (self.mode, new_mode) | 83 | await normal_to_stream("get-stream") |
139 | ) | 84 | elif new_mode == self.MODE_EXIST_STREAM: |
85 | await normal_to_stream("exists-stream") | ||
86 | elif new_mode != self.MODE_NORMAL: | ||
87 | raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}") | ||
140 | 88 | ||
141 | self.mode = new_mode | 89 | self.mode = new_mode |
142 | 90 | ||
143 | async def get_unihash(self, method, taskhash): | 91 | async def get_unihash(self, method, taskhash): |
144 | await self._set_mode(self.MODE_GET_STREAM) | 92 | r = await self.send_stream(self.MODE_GET_STREAM, "%s %s" % (method, taskhash)) |
145 | r = await self.send_stream("%s %s" % (method, taskhash)) | ||
146 | if not r: | 93 | if not r: |
147 | return None | 94 | return None |
148 | return r | 95 | return r |
149 | 96 | ||
150 | async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): | 97 | async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): |
151 | await self._set_mode(self.MODE_NORMAL) | ||
152 | m = extra.copy() | 98 | m = extra.copy() |
153 | m["taskhash"] = taskhash | 99 | m["taskhash"] = taskhash |
154 | m["method"] = method | 100 | m["method"] = method |
155 | m["outhash"] = outhash | 101 | m["outhash"] = outhash |
156 | m["unihash"] = unihash | 102 | m["unihash"] = unihash |
157 | return await self.send_message({"report": m}) | 103 | return await self.invoke({"report": m}) |
158 | 104 | ||
159 | async def report_unihash_equiv(self, taskhash, method, unihash, extra={}): | 105 | async def report_unihash_equiv(self, taskhash, method, unihash, extra={}): |
160 | await self._set_mode(self.MODE_NORMAL) | ||
161 | m = extra.copy() | 106 | m = extra.copy() |
162 | m["taskhash"] = taskhash | 107 | m["taskhash"] = taskhash |
163 | m["method"] = method | 108 | m["method"] = method |
164 | m["unihash"] = unihash | 109 | m["unihash"] = unihash |
165 | return await self.send_message({"report-equiv": m}) | 110 | return await self.invoke({"report-equiv": m}) |
166 | 111 | ||
167 | async def get_taskhash(self, method, taskhash, all_properties=False): | 112 | async def get_taskhash(self, method, taskhash, all_properties=False): |
168 | await self._set_mode(self.MODE_NORMAL) | 113 | return await self.invoke( |
169 | return await self.send_message( | ||
170 | {"get": {"taskhash": taskhash, "method": method, "all": all_properties}} | 114 | {"get": {"taskhash": taskhash, "method": method, "all": all_properties}} |
171 | ) | 115 | ) |
172 | 116 | ||
173 | async def get_outhash(self, method, outhash, taskhash): | 117 | async def unihash_exists(self, unihash): |
174 | await self._set_mode(self.MODE_NORMAL) | 118 | r = await self.send_stream(self.MODE_EXIST_STREAM, unihash) |
175 | return await self.send_message( | 119 | return r == "true" |
176 | {"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method}} | 120 | |
121 | async def get_outhash(self, method, outhash, taskhash, with_unihash=True): | ||
122 | return await self.invoke( | ||
123 | { | ||
124 | "get-outhash": { | ||
125 | "outhash": outhash, | ||
126 | "taskhash": taskhash, | ||
127 | "method": method, | ||
128 | "with_unihash": with_unihash, | ||
129 | } | ||
130 | } | ||
177 | ) | 131 | ) |
178 | 132 | ||
179 | async def get_stats(self): | 133 | async def get_stats(self): |
180 | await self._set_mode(self.MODE_NORMAL) | 134 | return await self.invoke({"get-stats": None}) |
181 | return await self.send_message({"get-stats": None}) | ||
182 | 135 | ||
183 | async def reset_stats(self): | 136 | async def reset_stats(self): |
184 | await self._set_mode(self.MODE_NORMAL) | 137 | return await self.invoke({"reset-stats": None}) |
185 | return await self.send_message({"reset-stats": None}) | ||
186 | 138 | ||
187 | async def backfill_wait(self): | 139 | async def backfill_wait(self): |
188 | await self._set_mode(self.MODE_NORMAL) | 140 | return (await self.invoke({"backfill-wait": None}))["tasks"] |
189 | return (await self.send_message({"backfill-wait": None}))["tasks"] | 141 | |
142 | async def remove(self, where): | ||
143 | return await self.invoke({"remove": {"where": where}}) | ||
144 | |||
145 | async def clean_unused(self, max_age): | ||
146 | return await self.invoke({"clean-unused": {"max_age_seconds": max_age}}) | ||
147 | |||
148 | async def auth(self, username, token): | ||
149 | result = await self.invoke({"auth": {"username": username, "token": token}}) | ||
150 | self.username = username | ||
151 | self.password = token | ||
152 | self.saved_become_user = None | ||
153 | return result | ||
154 | |||
155 | async def refresh_token(self, username=None): | ||
156 | m = {} | ||
157 | if username: | ||
158 | m["username"] = username | ||
159 | result = await self.invoke({"refresh-token": m}) | ||
160 | if ( | ||
161 | self.username | ||
162 | and not self.saved_become_user | ||
163 | and result["username"] == self.username | ||
164 | ): | ||
165 | self.password = result["token"] | ||
166 | return result | ||
167 | |||
168 | async def set_user_perms(self, username, permissions): | ||
169 | return await self.invoke( | ||
170 | {"set-user-perms": {"username": username, "permissions": permissions}} | ||
171 | ) | ||
172 | |||
173 | async def get_user(self, username=None): | ||
174 | m = {} | ||
175 | if username: | ||
176 | m["username"] = username | ||
177 | return await self.invoke({"get-user": m}) | ||
178 | |||
179 | async def get_all_users(self): | ||
180 | return (await self.invoke({"get-all-users": {}}))["users"] | ||
181 | |||
182 | async def new_user(self, username, permissions): | ||
183 | return await self.invoke( | ||
184 | {"new-user": {"username": username, "permissions": permissions}} | ||
185 | ) | ||
186 | |||
187 | async def delete_user(self, username): | ||
188 | return await self.invoke({"delete-user": {"username": username}}) | ||
189 | |||
190 | async def become_user(self, username): | ||
191 | result = await self.invoke({"become-user": {"username": username}}) | ||
192 | if username == self.username: | ||
193 | self.saved_become_user = None | ||
194 | else: | ||
195 | self.saved_become_user = username | ||
196 | return result | ||
197 | |||
198 | async def get_db_usage(self): | ||
199 | return (await self.invoke({"get-db-usage": {}}))["usage"] | ||
200 | |||
201 | async def get_db_query_columns(self): | ||
202 | return (await self.invoke({"get-db-query-columns": {}}))["columns"] | ||
203 | |||
204 | async def gc_status(self): | ||
205 | return await self.invoke({"gc-status": {}}) | ||
206 | |||
207 | async def gc_mark(self, mark, where): | ||
208 | """ | ||
209 | Starts a new garbage collection operation identified by "mark". If | ||
210 | garbage collection is already in progress with "mark", the collection | ||
211 | is continued. | ||
212 | |||
213 | All unihash entries that match the "where" clause are marked to be | ||
214 | kept. In addition, any new entries added to the database after this | ||
215 | command will be automatically marked with "mark" | ||
216 | """ | ||
217 | return await self.invoke({"gc-mark": {"mark": mark, "where": where}}) | ||
190 | 218 | ||
219 | async def gc_sweep(self, mark): | ||
220 | """ | ||
221 | Finishes garbage collection for "mark". All unihash entries that have | ||
222 | not been marked will be deleted. | ||
191 | 223 | ||
192 | class Client(object): | 224 | It is recommended to clean unused outhash entries after running this to |
193 | def __init__(self): | 225 | cleanup any dangling outhashes |
194 | self.client = AsyncClient() | 226 | """ |
195 | self.loop = asyncio.new_event_loop() | 227 | return await self.invoke({"gc-sweep": {"mark": mark}}) |
196 | 228 | ||
197 | for call in ( | 229 | |
230 | class Client(bb.asyncrpc.Client): | ||
231 | def __init__(self, username=None, password=None): | ||
232 | self.username = username | ||
233 | self.password = password | ||
234 | |||
235 | super().__init__() | ||
236 | self._add_methods( | ||
198 | "connect_tcp", | 237 | "connect_tcp", |
199 | "close", | 238 | "connect_websocket", |
200 | "get_unihash", | 239 | "get_unihash", |
201 | "report_unihash", | 240 | "report_unihash", |
202 | "report_unihash_equiv", | 241 | "report_unihash_equiv", |
203 | "get_taskhash", | 242 | "get_taskhash", |
243 | "unihash_exists", | ||
244 | "get_outhash", | ||
204 | "get_stats", | 245 | "get_stats", |
205 | "reset_stats", | 246 | "reset_stats", |
206 | "backfill_wait", | 247 | "backfill_wait", |
207 | ): | 248 | "remove", |
208 | downcall = getattr(self.client, call) | 249 | "clean_unused", |
209 | setattr(self, call, self._get_downcall_wrapper(downcall)) | 250 | "auth", |
210 | 251 | "refresh_token", | |
211 | def _get_downcall_wrapper(self, downcall): | 252 | "set_user_perms", |
212 | def wrapper(*args, **kwargs): | 253 | "get_user", |
213 | return self.loop.run_until_complete(downcall(*args, **kwargs)) | 254 | "get_all_users", |
214 | 255 | "new_user", | |
215 | return wrapper | 256 | "delete_user", |
216 | 257 | "become_user", | |
217 | def connect_unix(self, path): | 258 | "get_db_usage", |
218 | # AF_UNIX has path length issues so chdir here to workaround | 259 | "get_db_query_columns", |
219 | cwd = os.getcwd() | 260 | "gc_status", |
220 | try: | 261 | "gc_mark", |
221 | os.chdir(os.path.dirname(path)) | 262 | "gc_sweep", |
222 | self.loop.run_until_complete(self.client.connect_unix(os.path.basename(path))) | 263 | ) |
223 | self.loop.run_until_complete(self.client.connect()) | 264 | |
224 | finally: | 265 | def _get_async_client(self): |
225 | os.chdir(cwd) | 266 | return AsyncClient(self.username, self.password) |
226 | 267 | ||
227 | @property | 268 | |
228 | def max_chunk(self): | 269 | class ClientPool(bb.asyncrpc.ClientPool): |
229 | return self.client.max_chunk | 270 | def __init__( |
230 | 271 | self, | |
231 | @max_chunk.setter | 272 | address, |
232 | def max_chunk(self, value): | 273 | max_clients, |
233 | self.client.max_chunk = value | 274 | *, |
275 | username=None, | ||
276 | password=None, | ||
277 | become=None, | ||
278 | ): | ||
279 | super().__init__(max_clients) | ||
280 | self.address = address | ||
281 | self.username = username | ||
282 | self.password = password | ||
283 | self.become = become | ||
284 | |||
285 | async def _new_client(self): | ||
286 | client = await create_async_client( | ||
287 | self.address, | ||
288 | username=self.username, | ||
289 | password=self.password, | ||
290 | ) | ||
291 | if self.become: | ||
292 | await client.become_user(self.become) | ||
293 | return client | ||
294 | |||
295 | def _run_key_tasks(self, queries, call): | ||
296 | results = {key: None for key in queries.keys()} | ||
297 | |||
298 | def make_task(key, args): | ||
299 | async def task(client): | ||
300 | nonlocal results | ||
301 | unihash = await call(client, args) | ||
302 | results[key] = unihash | ||
303 | |||
304 | return task | ||
305 | |||
306 | def gen_tasks(): | ||
307 | for key, args in queries.items(): | ||
308 | yield make_task(key, args) | ||
309 | |||
310 | self.run_tasks(gen_tasks()) | ||
311 | return results | ||
312 | |||
313 | def get_unihashes(self, queries): | ||
314 | """ | ||
315 | Query multiple unihashes in parallel. | ||
316 | |||
317 | The queries argument is a dictionary with arbitrary key. The values | ||
318 | must be a tuple of (method, taskhash). | ||
319 | |||
320 | Returns a dictionary with a corresponding key for each input key, and | ||
321 | the value is the queried unihash (which might be none if the query | ||
322 | failed) | ||
323 | """ | ||
324 | |||
325 | async def call(client, args): | ||
326 | method, taskhash = args | ||
327 | return await client.get_unihash(method, taskhash) | ||
328 | |||
329 | return self._run_key_tasks(queries, call) | ||
330 | |||
331 | def unihashes_exist(self, queries): | ||
332 | """ | ||
333 | Query multiple unihash existence checks in parallel. | ||
334 | |||
335 | The queries argument is a dictionary with arbitrary key. The values | ||
336 | must be a unihash. | ||
337 | |||
338 | Returns a dictionary with a corresponding key for each input key, and | ||
339 | the value is True or False if the unihash is known by the server (or | ||
340 | None if there was a failure) | ||
341 | """ | ||
342 | |||
343 | async def call(client, unihash): | ||
344 | return await client.unihash_exists(unihash) | ||
345 | |||
346 | return self._run_key_tasks(queries, call) | ||
diff --git a/bitbake/lib/hashserv/server.py b/bitbake/lib/hashserv/server.py index a0dc0c170f..68f64f983b 100644 --- a/bitbake/lib/hashserv/server.py +++ b/bitbake/lib/hashserv/server.py | |||
@@ -3,20 +3,51 @@ | |||
3 | # SPDX-License-Identifier: GPL-2.0-only | 3 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 4 | # |
5 | 5 | ||
6 | from contextlib import closing, contextmanager | 6 | from datetime import datetime, timedelta |
7 | from datetime import datetime | ||
8 | import asyncio | 7 | import asyncio |
9 | import json | ||
10 | import logging | 8 | import logging |
11 | import math | 9 | import math |
12 | import os | ||
13 | import signal | ||
14 | import socket | ||
15 | import sys | ||
16 | import time | 10 | import time |
17 | from . import chunkify, DEFAULT_MAX_CHUNK, create_async_client, TABLE_COLUMNS | 11 | import os |
12 | import base64 | ||
13 | import hashlib | ||
14 | from . import create_async_client | ||
15 | import bb.asyncrpc | ||
16 | |||
17 | logger = logging.getLogger("hashserv.server") | ||
18 | |||
19 | |||
20 | # This permission only exists to match nothing | ||
21 | NONE_PERM = "@none" | ||
22 | |||
23 | READ_PERM = "@read" | ||
24 | REPORT_PERM = "@report" | ||
25 | DB_ADMIN_PERM = "@db-admin" | ||
26 | USER_ADMIN_PERM = "@user-admin" | ||
27 | ALL_PERM = "@all" | ||
18 | 28 | ||
19 | logger = logging.getLogger('hashserv.server') | 29 | ALL_PERMISSIONS = { |
30 | READ_PERM, | ||
31 | REPORT_PERM, | ||
32 | DB_ADMIN_PERM, | ||
33 | USER_ADMIN_PERM, | ||
34 | ALL_PERM, | ||
35 | } | ||
36 | |||
37 | DEFAULT_ANON_PERMS = ( | ||
38 | READ_PERM, | ||
39 | REPORT_PERM, | ||
40 | DB_ADMIN_PERM, | ||
41 | ) | ||
42 | |||
43 | TOKEN_ALGORITHM = "sha256" | ||
44 | |||
45 | # 48 bytes of random data will result in 64 characters when base64 | ||
46 | # encoded. This number also ensures that the base64 encoding won't have any | ||
47 | # trailing '=' characters. | ||
48 | TOKEN_SIZE = 48 | ||
49 | |||
50 | SALT_SIZE = 8 | ||
20 | 51 | ||
21 | 52 | ||
22 | class Measurement(object): | 53 | class Measurement(object): |
@@ -106,522 +137,745 @@ class Stats(object): | |||
106 | return math.sqrt(self.s / (self.num - 1)) | 137 | return math.sqrt(self.s / (self.num - 1)) |
107 | 138 | ||
108 | def todict(self): | 139 | def todict(self): |
109 | return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')} | 140 | return { |
110 | 141 | k: getattr(self, k) | |
111 | 142 | for k in ("num", "total_time", "max_time", "average", "stdev") | |
112 | class ClientError(Exception): | ||
113 | pass | ||
114 | |||
115 | class ServerError(Exception): | ||
116 | pass | ||
117 | |||
118 | def insert_task(cursor, data, ignore=False): | ||
119 | keys = sorted(data.keys()) | ||
120 | query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % ( | ||
121 | " OR IGNORE" if ignore else "", | ||
122 | ', '.join(keys), | ||
123 | ', '.join(':' + k for k in keys)) | ||
124 | cursor.execute(query, data) | ||
125 | |||
126 | async def copy_from_upstream(client, db, method, taskhash): | ||
127 | d = await client.get_taskhash(method, taskhash, True) | ||
128 | if d is not None: | ||
129 | # Filter out unknown columns | ||
130 | d = {k: v for k, v in d.items() if k in TABLE_COLUMNS} | ||
131 | keys = sorted(d.keys()) | ||
132 | |||
133 | with closing(db.cursor()) as cursor: | ||
134 | insert_task(cursor, d) | ||
135 | db.commit() | ||
136 | |||
137 | return d | ||
138 | |||
139 | async def copy_outhash_from_upstream(client, db, method, outhash, taskhash): | ||
140 | d = await client.get_outhash(method, outhash, taskhash) | ||
141 | if d is not None: | ||
142 | # Filter out unknown columns | ||
143 | d = {k: v for k, v in d.items() if k in TABLE_COLUMNS} | ||
144 | keys = sorted(d.keys()) | ||
145 | |||
146 | with closing(db.cursor()) as cursor: | ||
147 | insert_task(cursor, d) | ||
148 | db.commit() | ||
149 | |||
150 | return d | ||
151 | |||
152 | class ServerClient(object): | ||
153 | FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1' | ||
154 | ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1' | ||
155 | OUTHASH_QUERY = ''' | ||
156 | -- Find tasks with a matching outhash (that is, tasks that | ||
157 | -- are equivalent) | ||
158 | SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash | ||
159 | |||
160 | -- If there is an exact match on the taskhash, return it. | ||
161 | -- Otherwise return the oldest matching outhash of any | ||
162 | -- taskhash | ||
163 | ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END, | ||
164 | created ASC | ||
165 | |||
166 | -- Only return one row | ||
167 | LIMIT 1 | ||
168 | ''' | ||
169 | |||
170 | def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only): | ||
171 | self.reader = reader | ||
172 | self.writer = writer | ||
173 | self.db = db | ||
174 | self.request_stats = request_stats | ||
175 | self.max_chunk = DEFAULT_MAX_CHUNK | ||
176 | self.backfill_queue = backfill_queue | ||
177 | self.upstream = upstream | ||
178 | |||
179 | self.handlers = { | ||
180 | 'get': self.handle_get, | ||
181 | 'get-outhash': self.handle_get_outhash, | ||
182 | 'get-stream': self.handle_get_stream, | ||
183 | 'get-stats': self.handle_get_stats, | ||
184 | 'chunk-stream': self.handle_chunk, | ||
185 | } | 143 | } |
186 | 144 | ||
187 | if not read_only: | ||
188 | self.handlers.update({ | ||
189 | 'report': self.handle_report, | ||
190 | 'report-equiv': self.handle_equivreport, | ||
191 | 'reset-stats': self.handle_reset_stats, | ||
192 | 'backfill-wait': self.handle_backfill_wait, | ||
193 | }) | ||
194 | 145 | ||
195 | async def process_requests(self): | 146 | token_refresh_semaphore = asyncio.Lock() |
196 | if self.upstream is not None: | ||
197 | self.upstream_client = await create_async_client(self.upstream) | ||
198 | else: | ||
199 | self.upstream_client = None | ||
200 | 147 | ||
201 | try: | ||
202 | 148 | ||
149 | async def new_token(): | ||
150 | # Prevent malicious users from using this API to deduce the entropy | ||
151 | # pool on the server and thus be able to guess a token. *All* token | ||
152 | # refresh requests lock the same global semaphore and then sleep for a | ||
153 | # short time. The effectively rate limits the total number of requests | ||
154 | # than can be made across all clients to 10/second, which should be enough | ||
155 | # since you have to be an authenticated users to make the request in the | ||
156 | # first place | ||
157 | async with token_refresh_semaphore: | ||
158 | await asyncio.sleep(0.1) | ||
159 | raw = os.getrandom(TOKEN_SIZE, os.GRND_NONBLOCK) | ||
203 | 160 | ||
204 | self.addr = self.writer.get_extra_info('peername') | 161 | return base64.b64encode(raw, b"._").decode("utf-8") |
205 | logger.debug('Client %r connected' % (self.addr,)) | ||
206 | 162 | ||
207 | # Read protocol and version | ||
208 | protocol = await self.reader.readline() | ||
209 | if protocol is None: | ||
210 | return | ||
211 | 163 | ||
212 | (proto_name, proto_version) = protocol.decode('utf-8').rstrip().split() | 164 | def new_salt(): |
213 | if proto_name != 'OEHASHEQUIV': | 165 | return os.getrandom(SALT_SIZE, os.GRND_NONBLOCK).hex() |
214 | return | ||
215 | 166 | ||
216 | proto_version = tuple(int(v) for v in proto_version.split('.')) | ||
217 | if proto_version < (1, 0) or proto_version > (1, 1): | ||
218 | return | ||
219 | 167 | ||
220 | # Read headers. Currently, no headers are implemented, so look for | 168 | def hash_token(algo, salt, token): |
221 | # an empty line to signal the end of the headers | 169 | h = hashlib.new(algo) |
222 | while True: | 170 | h.update(salt.encode("utf-8")) |
223 | line = await self.reader.readline() | 171 | h.update(token.encode("utf-8")) |
224 | if line is None: | 172 | return ":".join([algo, salt, h.hexdigest()]) |
225 | return | ||
226 | 173 | ||
227 | line = line.decode('utf-8').rstrip() | ||
228 | if not line: | ||
229 | break | ||
230 | 174 | ||
231 | # Handle messages | 175 | def permissions(*permissions, allow_anon=True, allow_self_service=False): |
232 | while True: | 176 | """ |
233 | d = await self.read_message() | 177 | Function decorator that can be used to decorate an RPC function call and |
234 | if d is None: | 178 | check that the current users permissions match the require permissions. |
235 | break | ||
236 | await self.dispatch_message(d) | ||
237 | await self.writer.drain() | ||
238 | except ClientError as e: | ||
239 | logger.error(str(e)) | ||
240 | finally: | ||
241 | if self.upstream_client is not None: | ||
242 | await self.upstream_client.close() | ||
243 | 179 | ||
244 | self.writer.close() | 180 | If allow_anon is True, the user will also be allowed to make the RPC call |
181 | if the anonymous user permissions match the permissions. | ||
245 | 182 | ||
246 | async def dispatch_message(self, msg): | 183 | If allow_self_service is True, and the "username" property in the request |
247 | for k in self.handlers.keys(): | 184 | is the currently logged in user, or not specified, the user will also be |
248 | if k in msg: | 185 | allowed to make the request. This allows users to access normal privileged |
249 | logger.debug('Handling %s' % k) | 186 | API, as long as they are only modifying their own user properties (e.g. |
250 | if 'stream' in k: | 187 | users can be allowed to reset their own token without @user-admin |
251 | await self.handlers[k](msg[k]) | 188 | permissions, but not the token for any other user. |
189 | """ | ||
190 | |||
191 | def wrapper(func): | ||
192 | async def wrap(self, request): | ||
193 | if allow_self_service and self.user is not None: | ||
194 | username = request.get("username", self.user.username) | ||
195 | if username == self.user.username: | ||
196 | request["username"] = self.user.username | ||
197 | return await func(self, request) | ||
198 | |||
199 | if not self.user_has_permissions(*permissions, allow_anon=allow_anon): | ||
200 | if not self.user: | ||
201 | username = "Anonymous user" | ||
202 | user_perms = self.server.anon_perms | ||
252 | else: | 203 | else: |
253 | with self.request_stats.start_sample() as self.request_sample, \ | 204 | username = self.user.username |
254 | self.request_sample.measure(): | 205 | user_perms = self.user.permissions |
255 | await self.handlers[k](msg[k]) | 206 | |
256 | return | 207 | self.logger.info( |
208 | "User %s with permissions %r denied from calling %s. Missing permissions(s) %r", | ||
209 | username, | ||
210 | ", ".join(user_perms), | ||
211 | func.__name__, | ||
212 | ", ".join(permissions), | ||
213 | ) | ||
214 | raise bb.asyncrpc.InvokeError( | ||
215 | f"{username} is not allowed to access permissions(s) {', '.join(permissions)}" | ||
216 | ) | ||
217 | |||
218 | return await func(self, request) | ||
219 | |||
220 | return wrap | ||
221 | |||
222 | return wrapper | ||
223 | |||
224 | |||
225 | class ServerClient(bb.asyncrpc.AsyncServerConnection): | ||
226 | def __init__(self, socket, server): | ||
227 | super().__init__(socket, "OEHASHEQUIV", server.logger) | ||
228 | self.server = server | ||
229 | self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK | ||
230 | self.user = None | ||
231 | |||
232 | self.handlers.update( | ||
233 | { | ||
234 | "get": self.handle_get, | ||
235 | "get-outhash": self.handle_get_outhash, | ||
236 | "get-stream": self.handle_get_stream, | ||
237 | "exists-stream": self.handle_exists_stream, | ||
238 | "get-stats": self.handle_get_stats, | ||
239 | "get-db-usage": self.handle_get_db_usage, | ||
240 | "get-db-query-columns": self.handle_get_db_query_columns, | ||
241 | # Not always read-only, but internally checks if the server is | ||
242 | # read-only | ||
243 | "report": self.handle_report, | ||
244 | "auth": self.handle_auth, | ||
245 | "get-user": self.handle_get_user, | ||
246 | "get-all-users": self.handle_get_all_users, | ||
247 | "become-user": self.handle_become_user, | ||
248 | } | ||
249 | ) | ||
257 | 250 | ||
258 | raise ClientError("Unrecognized command %r" % msg) | 251 | if not self.server.read_only: |
252 | self.handlers.update( | ||
253 | { | ||
254 | "report-equiv": self.handle_equivreport, | ||
255 | "reset-stats": self.handle_reset_stats, | ||
256 | "backfill-wait": self.handle_backfill_wait, | ||
257 | "remove": self.handle_remove, | ||
258 | "gc-mark": self.handle_gc_mark, | ||
259 | "gc-sweep": self.handle_gc_sweep, | ||
260 | "gc-status": self.handle_gc_status, | ||
261 | "clean-unused": self.handle_clean_unused, | ||
262 | "refresh-token": self.handle_refresh_token, | ||
263 | "set-user-perms": self.handle_set_perms, | ||
264 | "new-user": self.handle_new_user, | ||
265 | "delete-user": self.handle_delete_user, | ||
266 | } | ||
267 | ) | ||
259 | 268 | ||
260 | def write_message(self, msg): | 269 | def raise_no_user_error(self, username): |
261 | for c in chunkify(json.dumps(msg), self.max_chunk): | 270 | raise bb.asyncrpc.InvokeError(f"No user named '{username}' exists") |
262 | self.writer.write(c.encode('utf-8')) | ||
263 | 271 | ||
264 | async def read_message(self): | 272 | def user_has_permissions(self, *permissions, allow_anon=True): |
265 | l = await self.reader.readline() | 273 | permissions = set(permissions) |
266 | if not l: | 274 | if allow_anon: |
267 | return None | 275 | if ALL_PERM in self.server.anon_perms: |
276 | return True | ||
268 | 277 | ||
269 | try: | 278 | if not permissions - self.server.anon_perms: |
270 | message = l.decode('utf-8') | 279 | return True |
271 | 280 | ||
272 | if not message.endswith('\n'): | 281 | if self.user is None: |
273 | return None | 282 | return False |
274 | 283 | ||
275 | return json.loads(message) | 284 | if ALL_PERM in self.user.permissions: |
276 | except (json.JSONDecodeError, UnicodeDecodeError) as e: | 285 | return True |
277 | logger.error('Bad message from client: %r' % message) | ||
278 | raise e | ||
279 | 286 | ||
280 | async def handle_chunk(self, request): | 287 | if not permissions - self.user.permissions: |
281 | lines = [] | 288 | return True |
282 | try: | ||
283 | while True: | ||
284 | l = await self.reader.readline() | ||
285 | l = l.rstrip(b"\n").decode("utf-8") | ||
286 | if not l: | ||
287 | break | ||
288 | lines.append(l) | ||
289 | 289 | ||
290 | msg = json.loads(''.join(lines)) | 290 | return False |
291 | except (json.JSONDecodeError, UnicodeDecodeError) as e: | ||
292 | logger.error('Bad message from client: %r' % message) | ||
293 | raise e | ||
294 | 291 | ||
295 | if 'chunk-stream' in msg: | 292 | def validate_proto_version(self): |
296 | raise ClientError("Nested chunks are not allowed") | 293 | return self.proto_version > (1, 0) and self.proto_version <= (1, 1) |
297 | 294 | ||
298 | await self.dispatch_message(msg) | 295 | async def process_requests(self): |
296 | async with self.server.db_engine.connect(self.logger) as db: | ||
297 | self.db = db | ||
298 | if self.server.upstream is not None: | ||
299 | self.upstream_client = await create_async_client(self.server.upstream) | ||
300 | else: | ||
301 | self.upstream_client = None | ||
299 | 302 | ||
300 | async def handle_get(self, request): | 303 | try: |
301 | method = request['method'] | 304 | await super().process_requests() |
302 | taskhash = request['taskhash'] | 305 | finally: |
306 | if self.upstream_client is not None: | ||
307 | await self.upstream_client.close() | ||
303 | 308 | ||
304 | if request.get('all', False): | 309 | async def dispatch_message(self, msg): |
305 | row = self.query_equivalent(method, taskhash, self.ALL_QUERY) | 310 | for k in self.handlers.keys(): |
306 | else: | 311 | if k in msg: |
307 | row = self.query_equivalent(method, taskhash, self.FAST_QUERY) | 312 | self.logger.debug("Handling %s" % k) |
313 | if "stream" in k: | ||
314 | return await self.handlers[k](msg[k]) | ||
315 | else: | ||
316 | with self.server.request_stats.start_sample() as self.request_sample, self.request_sample.measure(): | ||
317 | return await self.handlers[k](msg[k]) | ||
308 | 318 | ||
309 | if row is not None: | 319 | raise bb.asyncrpc.ClientError("Unrecognized command %r" % msg) |
310 | logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash'])) | 320 | |
311 | d = {k: row[k] for k in row.keys()} | 321 | @permissions(READ_PERM) |
312 | elif self.upstream_client is not None: | 322 | async def handle_get(self, request): |
313 | d = await copy_from_upstream(self.upstream_client, self.db, method, taskhash) | 323 | method = request["method"] |
324 | taskhash = request["taskhash"] | ||
325 | fetch_all = request.get("all", False) | ||
326 | |||
327 | return await self.get_unihash(method, taskhash, fetch_all) | ||
328 | |||
329 | async def get_unihash(self, method, taskhash, fetch_all=False): | ||
330 | d = None | ||
331 | |||
332 | if fetch_all: | ||
333 | row = await self.db.get_unihash_by_taskhash_full(method, taskhash) | ||
334 | if row is not None: | ||
335 | d = {k: row[k] for k in row.keys()} | ||
336 | elif self.upstream_client is not None: | ||
337 | d = await self.upstream_client.get_taskhash(method, taskhash, True) | ||
338 | await self.update_unified(d) | ||
314 | else: | 339 | else: |
315 | d = None | 340 | row = await self.db.get_equivalent(method, taskhash) |
341 | |||
342 | if row is not None: | ||
343 | d = {k: row[k] for k in row.keys()} | ||
344 | elif self.upstream_client is not None: | ||
345 | d = await self.upstream_client.get_taskhash(method, taskhash) | ||
346 | await self.db.insert_unihash(d["method"], d["taskhash"], d["unihash"]) | ||
316 | 347 | ||
317 | self.write_message(d) | 348 | return d |
318 | 349 | ||
350 | @permissions(READ_PERM) | ||
319 | async def handle_get_outhash(self, request): | 351 | async def handle_get_outhash(self, request): |
320 | with closing(self.db.cursor()) as cursor: | 352 | method = request["method"] |
321 | cursor.execute(self.OUTHASH_QUERY, | 353 | outhash = request["outhash"] |
322 | {k: request[k] for k in ('method', 'outhash', 'taskhash')}) | 354 | taskhash = request["taskhash"] |
355 | with_unihash = request.get("with_unihash", True) | ||
323 | 356 | ||
324 | row = cursor.fetchone() | 357 | return await self.get_outhash(method, outhash, taskhash, with_unihash) |
358 | |||
359 | async def get_outhash(self, method, outhash, taskhash, with_unihash=True): | ||
360 | d = None | ||
361 | if with_unihash: | ||
362 | row = await self.db.get_unihash_by_outhash(method, outhash) | ||
363 | else: | ||
364 | row = await self.db.get_outhash(method, outhash) | ||
325 | 365 | ||
326 | if row is not None: | 366 | if row is not None: |
327 | logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash'])) | ||
328 | d = {k: row[k] for k in row.keys()} | 367 | d = {k: row[k] for k in row.keys()} |
329 | else: | 368 | elif self.upstream_client is not None: |
330 | d = None | 369 | d = await self.upstream_client.get_outhash(method, outhash, taskhash) |
370 | await self.update_unified(d) | ||
331 | 371 | ||
332 | self.write_message(d) | 372 | return d |
333 | 373 | ||
334 | async def handle_get_stream(self, request): | 374 | async def update_unified(self, data): |
335 | self.write_message('ok') | 375 | if data is None: |
376 | return | ||
377 | |||
378 | await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"]) | ||
379 | await self.db.insert_outhash(data) | ||
380 | |||
381 | async def _stream_handler(self, handler): | ||
382 | await self.socket.send_message("ok") | ||
336 | 383 | ||
337 | while True: | 384 | while True: |
338 | upstream = None | 385 | upstream = None |
339 | 386 | ||
340 | l = await self.reader.readline() | 387 | l = await self.socket.recv() |
341 | if not l: | 388 | if not l: |
342 | return | 389 | break |
343 | 390 | ||
344 | try: | 391 | try: |
345 | # This inner loop is very sensitive and must be as fast as | 392 | # This inner loop is very sensitive and must be as fast as |
346 | # possible (which is why the request sample is handled manually | 393 | # possible (which is why the request sample is handled manually |
347 | # instead of using 'with', and also why logging statements are | 394 | # instead of using 'with', and also why logging statements are |
348 | # commented out. | 395 | # commented out. |
349 | self.request_sample = self.request_stats.start_sample() | 396 | self.request_sample = self.server.request_stats.start_sample() |
350 | request_measure = self.request_sample.measure() | 397 | request_measure = self.request_sample.measure() |
351 | request_measure.start() | 398 | request_measure.start() |
352 | 399 | ||
353 | l = l.decode('utf-8').rstrip() | 400 | if l == "END": |
354 | if l == 'END': | 401 | break |
355 | self.writer.write('ok\n'.encode('utf-8')) | ||
356 | return | ||
357 | |||
358 | (method, taskhash) = l.split() | ||
359 | #logger.debug('Looking up %s %s' % (method, taskhash)) | ||
360 | row = self.query_equivalent(method, taskhash, self.FAST_QUERY) | ||
361 | if row is not None: | ||
362 | msg = ('%s\n' % row['unihash']).encode('utf-8') | ||
363 | #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash'])) | ||
364 | elif self.upstream_client is not None: | ||
365 | upstream = await self.upstream_client.get_unihash(method, taskhash) | ||
366 | if upstream: | ||
367 | msg = ("%s\n" % upstream).encode("utf-8") | ||
368 | else: | ||
369 | msg = "\n".encode("utf-8") | ||
370 | else: | ||
371 | msg = '\n'.encode('utf-8') | ||
372 | 402 | ||
373 | self.writer.write(msg) | 403 | msg = await handler(l) |
404 | await self.socket.send(msg) | ||
374 | finally: | 405 | finally: |
375 | request_measure.end() | 406 | request_measure.end() |
376 | self.request_sample.end() | 407 | self.request_sample.end() |
377 | 408 | ||
378 | await self.writer.drain() | 409 | await self.socket.send("ok") |
410 | return self.NO_RESPONSE | ||
379 | 411 | ||
380 | # Post to the backfill queue after writing the result to minimize | 412 | @permissions(READ_PERM) |
381 | # the turn around time on a request | 413 | async def handle_get_stream(self, request): |
382 | if upstream is not None: | 414 | async def handler(l): |
383 | await self.backfill_queue.put((method, taskhash)) | 415 | (method, taskhash) = l.split() |
416 | # self.logger.debug('Looking up %s %s' % (method, taskhash)) | ||
417 | row = await self.db.get_equivalent(method, taskhash) | ||
384 | 418 | ||
385 | async def handle_report(self, data): | 419 | if row is not None: |
386 | with closing(self.db.cursor()) as cursor: | 420 | # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash'])) |
387 | cursor.execute(self.OUTHASH_QUERY, | 421 | return row["unihash"] |
388 | {k: data[k] for k in ('method', 'outhash', 'taskhash')}) | ||
389 | |||
390 | row = cursor.fetchone() | ||
391 | |||
392 | if row is None and self.upstream_client: | ||
393 | # Try upstream | ||
394 | row = await copy_outhash_from_upstream(self.upstream_client, | ||
395 | self.db, | ||
396 | data['method'], | ||
397 | data['outhash'], | ||
398 | data['taskhash']) | ||
399 | |||
400 | # If no matching outhash was found, or one *was* found but it | ||
401 | # wasn't an exact match on the taskhash, a new entry for this | ||
402 | # taskhash should be added | ||
403 | if row is None or row['taskhash'] != data['taskhash']: | ||
404 | # If a row matching the outhash was found, the unihash for | ||
405 | # the new taskhash should be the same as that one. | ||
406 | # Otherwise the caller provided unihash is used. | ||
407 | unihash = data['unihash'] | ||
408 | if row is not None: | ||
409 | unihash = row['unihash'] | ||
410 | |||
411 | insert_data = { | ||
412 | 'method': data['method'], | ||
413 | 'outhash': data['outhash'], | ||
414 | 'taskhash': data['taskhash'], | ||
415 | 'unihash': unihash, | ||
416 | 'created': datetime.now() | ||
417 | } | ||
418 | 422 | ||
419 | for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): | 423 | if self.upstream_client is not None: |
420 | if k in data: | 424 | upstream = await self.upstream_client.get_unihash(method, taskhash) |
421 | insert_data[k] = data[k] | 425 | if upstream: |
426 | await self.server.backfill_queue.put((method, taskhash)) | ||
427 | return upstream | ||
422 | 428 | ||
423 | insert_task(cursor, insert_data) | 429 | return "" |
424 | self.db.commit() | ||
425 | 430 | ||
426 | logger.info('Adding taskhash %s with unihash %s', | 431 | return await self._stream_handler(handler) |
427 | data['taskhash'], unihash) | ||
428 | 432 | ||
429 | d = { | 433 | @permissions(READ_PERM) |
430 | 'taskhash': data['taskhash'], | 434 | async def handle_exists_stream(self, request): |
431 | 'method': data['method'], | 435 | async def handler(l): |
432 | 'unihash': unihash | 436 | if await self.db.unihash_exists(l): |
433 | } | 437 | return "true" |
434 | else: | ||
435 | d = {k: row[k] for k in ('taskhash', 'method', 'unihash')} | ||
436 | 438 | ||
437 | self.write_message(d) | 439 | if self.upstream_client is not None: |
440 | if await self.upstream_client.unihash_exists(l): | ||
441 | return "true" | ||
438 | 442 | ||
439 | async def handle_equivreport(self, data): | 443 | return "false" |
440 | with closing(self.db.cursor()) as cursor: | ||
441 | insert_data = { | ||
442 | 'method': data['method'], | ||
443 | 'outhash': "", | ||
444 | 'taskhash': data['taskhash'], | ||
445 | 'unihash': data['unihash'], | ||
446 | 'created': datetime.now() | ||
447 | } | ||
448 | 444 | ||
449 | for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): | 445 | return await self._stream_handler(handler) |
450 | if k in data: | ||
451 | insert_data[k] = data[k] | ||
452 | 446 | ||
453 | insert_task(cursor, insert_data, ignore=True) | 447 | async def report_readonly(self, data): |
454 | self.db.commit() | 448 | method = data["method"] |
449 | outhash = data["outhash"] | ||
450 | taskhash = data["taskhash"] | ||
455 | 451 | ||
456 | # Fetch the unihash that will be reported for the taskhash. If the | 452 | info = await self.get_outhash(method, outhash, taskhash) |
457 | # unihash matches, it means this row was inserted (or the mapping | 453 | if info: |
458 | # was already valid) | 454 | unihash = info["unihash"] |
459 | row = self.query_equivalent(data['method'], data['taskhash'], self.FAST_QUERY) | 455 | else: |
456 | unihash = data["unihash"] | ||
460 | 457 | ||
461 | if row['unihash'] == data['unihash']: | 458 | return { |
462 | logger.info('Adding taskhash equivalence for %s with unihash %s', | 459 | "taskhash": taskhash, |
463 | data['taskhash'], row['unihash']) | 460 | "method": method, |
461 | "unihash": unihash, | ||
462 | } | ||
464 | 463 | ||
465 | d = {k: row[k] for k in ('taskhash', 'method', 'unihash')} | 464 | # Since this can be called either read only or to report, the check to |
465 | # report is made inside the function | ||
466 | @permissions(READ_PERM) | ||
467 | async def handle_report(self, data): | ||
468 | if self.server.read_only or not self.user_has_permissions(REPORT_PERM): | ||
469 | return await self.report_readonly(data) | ||
470 | |||
471 | outhash_data = { | ||
472 | "method": data["method"], | ||
473 | "outhash": data["outhash"], | ||
474 | "taskhash": data["taskhash"], | ||
475 | "created": datetime.now(), | ||
476 | } | ||
466 | 477 | ||
467 | self.write_message(d) | 478 | for k in ("owner", "PN", "PV", "PR", "task", "outhash_siginfo"): |
479 | if k in data: | ||
480 | outhash_data[k] = data[k] | ||
468 | 481 | ||
482 | if self.user: | ||
483 | outhash_data["owner"] = self.user.username | ||
469 | 484 | ||
470 | async def handle_get_stats(self, request): | 485 | # Insert the new entry, unless it already exists |
471 | d = { | 486 | if await self.db.insert_outhash(outhash_data): |
472 | 'requests': self.request_stats.todict(), | 487 | # If this row is new, check if it is equivalent to another |
488 | # output hash | ||
489 | row = await self.db.get_equivalent_for_outhash( | ||
490 | data["method"], data["outhash"], data["taskhash"] | ||
491 | ) | ||
492 | |||
493 | if row is not None: | ||
494 | # A matching output hash was found. Set our taskhash to the | ||
495 | # same unihash since they are equivalent | ||
496 | unihash = row["unihash"] | ||
497 | else: | ||
498 | # No matching output hash was found. This is probably the | ||
499 | # first outhash to be added. | ||
500 | unihash = data["unihash"] | ||
501 | |||
502 | # Query upstream to see if it has a unihash we can use | ||
503 | if self.upstream_client is not None: | ||
504 | upstream_data = await self.upstream_client.get_outhash( | ||
505 | data["method"], data["outhash"], data["taskhash"] | ||
506 | ) | ||
507 | if upstream_data is not None: | ||
508 | unihash = upstream_data["unihash"] | ||
509 | |||
510 | await self.db.insert_unihash(data["method"], data["taskhash"], unihash) | ||
511 | |||
512 | unihash_data = await self.get_unihash(data["method"], data["taskhash"]) | ||
513 | if unihash_data is not None: | ||
514 | unihash = unihash_data["unihash"] | ||
515 | else: | ||
516 | unihash = data["unihash"] | ||
517 | |||
518 | return { | ||
519 | "taskhash": data["taskhash"], | ||
520 | "method": data["method"], | ||
521 | "unihash": unihash, | ||
473 | } | 522 | } |
474 | 523 | ||
475 | self.write_message(d) | 524 | @permissions(READ_PERM, REPORT_PERM) |
525 | async def handle_equivreport(self, data): | ||
526 | await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"]) | ||
527 | |||
528 | # Fetch the unihash that will be reported for the taskhash. If the | ||
529 | # unihash matches, it means this row was inserted (or the mapping | ||
530 | # was already valid) | ||
531 | row = await self.db.get_equivalent(data["method"], data["taskhash"]) | ||
532 | |||
533 | if row["unihash"] == data["unihash"]: | ||
534 | self.logger.info( | ||
535 | "Adding taskhash equivalence for %s with unihash %s", | ||
536 | data["taskhash"], | ||
537 | row["unihash"], | ||
538 | ) | ||
539 | |||
540 | return {k: row[k] for k in ("taskhash", "method", "unihash")} | ||
476 | 541 | ||
542 | @permissions(READ_PERM) | ||
543 | async def handle_get_stats(self, request): | ||
544 | return { | ||
545 | "requests": self.server.request_stats.todict(), | ||
546 | } | ||
547 | |||
548 | @permissions(DB_ADMIN_PERM) | ||
477 | async def handle_reset_stats(self, request): | 549 | async def handle_reset_stats(self, request): |
478 | d = { | 550 | d = { |
479 | 'requests': self.request_stats.todict(), | 551 | "requests": self.server.request_stats.todict(), |
480 | } | 552 | } |
481 | 553 | ||
482 | self.request_stats.reset() | 554 | self.server.request_stats.reset() |
483 | self.write_message(d) | 555 | return d |
484 | 556 | ||
557 | @permissions(READ_PERM) | ||
485 | async def handle_backfill_wait(self, request): | 558 | async def handle_backfill_wait(self, request): |
486 | d = { | 559 | d = { |
487 | 'tasks': self.backfill_queue.qsize(), | 560 | "tasks": self.server.backfill_queue.qsize(), |
488 | } | 561 | } |
489 | await self.backfill_queue.join() | 562 | await self.server.backfill_queue.join() |
490 | self.write_message(d) | 563 | return d |
564 | |||
565 | @permissions(DB_ADMIN_PERM) | ||
566 | async def handle_remove(self, request): | ||
567 | condition = request["where"] | ||
568 | if not isinstance(condition, dict): | ||
569 | raise TypeError("Bad condition type %s" % type(condition)) | ||
570 | |||
571 | return {"count": await self.db.remove(condition)} | ||
572 | |||
573 | @permissions(DB_ADMIN_PERM) | ||
574 | async def handle_gc_mark(self, request): | ||
575 | condition = request["where"] | ||
576 | mark = request["mark"] | ||
577 | |||
578 | if not isinstance(condition, dict): | ||
579 | raise TypeError("Bad condition type %s" % type(condition)) | ||
580 | |||
581 | if not isinstance(mark, str): | ||
582 | raise TypeError("Bad mark type %s" % type(mark)) | ||
583 | |||
584 | return {"count": await self.db.gc_mark(mark, condition)} | ||
585 | |||
586 | @permissions(DB_ADMIN_PERM) | ||
587 | async def handle_gc_sweep(self, request): | ||
588 | mark = request["mark"] | ||
589 | |||
590 | if not isinstance(mark, str): | ||
591 | raise TypeError("Bad mark type %s" % type(mark)) | ||
592 | |||
593 | current_mark = await self.db.get_current_gc_mark() | ||
594 | |||
595 | if not current_mark or mark != current_mark: | ||
596 | raise bb.asyncrpc.InvokeError( | ||
597 | f"'{mark}' is not the current mark. Refusing to sweep" | ||
598 | ) | ||
599 | |||
600 | count = await self.db.gc_sweep() | ||
601 | |||
602 | return {"count": count} | ||
603 | |||
604 | @permissions(DB_ADMIN_PERM) | ||
605 | async def handle_gc_status(self, request): | ||
606 | (keep_rows, remove_rows, current_mark) = await self.db.gc_status() | ||
607 | return { | ||
608 | "keep": keep_rows, | ||
609 | "remove": remove_rows, | ||
610 | "mark": current_mark, | ||
611 | } | ||
612 | |||
613 | @permissions(DB_ADMIN_PERM) | ||
614 | async def handle_clean_unused(self, request): | ||
615 | max_age = request["max_age_seconds"] | ||
616 | oldest = datetime.now() - timedelta(seconds=-max_age) | ||
617 | return {"count": await self.db.clean_unused(oldest)} | ||
618 | |||
619 | @permissions(DB_ADMIN_PERM) | ||
620 | async def handle_get_db_usage(self, request): | ||
621 | return {"usage": await self.db.get_usage()} | ||
622 | |||
623 | @permissions(DB_ADMIN_PERM) | ||
624 | async def handle_get_db_query_columns(self, request): | ||
625 | return {"columns": await self.db.get_query_columns()} | ||
626 | |||
627 | # The authentication API is always allowed | ||
628 | async def handle_auth(self, request): | ||
629 | username = str(request["username"]) | ||
630 | token = str(request["token"]) | ||
631 | |||
632 | async def fail_auth(): | ||
633 | nonlocal username | ||
634 | # Rate limit bad login attempts | ||
635 | await asyncio.sleep(1) | ||
636 | raise bb.asyncrpc.InvokeError(f"Unable to authenticate as {username}") | ||
637 | |||
638 | user, db_token = await self.db.lookup_user_token(username) | ||
639 | |||
640 | if not user or not db_token: | ||
641 | await fail_auth() | ||
491 | 642 | ||
492 | def query_equivalent(self, method, taskhash, query): | ||
493 | # This is part of the inner loop and must be as fast as possible | ||
494 | try: | 643 | try: |
495 | cursor = self.db.cursor() | 644 | algo, salt, _ = db_token.split(":") |
496 | cursor.execute(query, {'method': method, 'taskhash': taskhash}) | 645 | except ValueError: |
497 | return cursor.fetchone() | 646 | await fail_auth() |
498 | except: | ||
499 | cursor.close() | ||
500 | 647 | ||
648 | if hash_token(algo, salt, token) != db_token: | ||
649 | await fail_auth() | ||
501 | 650 | ||
502 | class Server(object): | 651 | self.user = user |
503 | def __init__(self, db, loop=None, upstream=None, read_only=False): | ||
504 | if upstream and read_only: | ||
505 | raise ServerError("Read-only hashserv cannot pull from an upstream server") | ||
506 | 652 | ||
507 | self.request_stats = Stats() | 653 | self.logger.info("Authenticated as %s", username) |
508 | self.db = db | ||
509 | 654 | ||
510 | if loop is None: | 655 | return { |
511 | self.loop = asyncio.new_event_loop() | 656 | "result": True, |
512 | self.close_loop = True | 657 | "username": self.user.username, |
513 | else: | 658 | "permissions": sorted(list(self.user.permissions)), |
514 | self.loop = loop | 659 | } |
515 | self.close_loop = False | ||
516 | 660 | ||
517 | self.upstream = upstream | 661 | @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False) |
518 | self.read_only = read_only | 662 | async def handle_refresh_token(self, request): |
663 | username = str(request["username"]) | ||
519 | 664 | ||
520 | self._cleanup_socket = None | 665 | token = await new_token() |
521 | 666 | ||
522 | def start_tcp_server(self, host, port): | 667 | updated = await self.db.set_user_token( |
523 | self.server = self.loop.run_until_complete( | 668 | username, |
524 | asyncio.start_server(self.handle_client, host, port, loop=self.loop) | 669 | hash_token(TOKEN_ALGORITHM, new_salt(), token), |
525 | ) | 670 | ) |
671 | if not updated: | ||
672 | self.raise_no_user_error(username) | ||
526 | 673 | ||
527 | for s in self.server.sockets: | 674 | return {"username": username, "token": token} |
528 | logger.info('Listening on %r' % (s.getsockname(),)) | ||
529 | # Newer python does this automatically. Do it manually here for | ||
530 | # maximum compatibility | ||
531 | s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) | ||
532 | s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1) | ||
533 | |||
534 | name = self.server.sockets[0].getsockname() | ||
535 | if self.server.sockets[0].family == socket.AF_INET6: | ||
536 | self.address = "[%s]:%d" % (name[0], name[1]) | ||
537 | else: | ||
538 | self.address = "%s:%d" % (name[0], name[1]) | ||
539 | 675 | ||
540 | def start_unix_server(self, path): | 676 | def get_perm_arg(self, arg): |
541 | def cleanup(): | 677 | if not isinstance(arg, list): |
542 | os.unlink(path) | 678 | raise bb.asyncrpc.InvokeError("Unexpected type for permissions") |
543 | 679 | ||
544 | cwd = os.getcwd() | 680 | arg = set(arg) |
545 | try: | 681 | try: |
546 | # Work around path length limits in AF_UNIX | 682 | arg.remove(NONE_PERM) |
547 | os.chdir(os.path.dirname(path)) | 683 | except KeyError: |
548 | self.server = self.loop.run_until_complete( | 684 | pass |
549 | asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop) | 685 | |
686 | unknown_perms = arg - ALL_PERMISSIONS | ||
687 | if unknown_perms: | ||
688 | raise bb.asyncrpc.InvokeError( | ||
689 | "Unknown permissions %s" % ", ".join(sorted(list(unknown_perms))) | ||
550 | ) | 690 | ) |
551 | finally: | ||
552 | os.chdir(cwd) | ||
553 | 691 | ||
554 | logger.info('Listening on %r' % path) | 692 | return sorted(list(arg)) |
555 | 693 | ||
556 | self._cleanup_socket = cleanup | 694 | def return_perms(self, permissions): |
557 | self.address = "unix://%s" % os.path.abspath(path) | 695 | if ALL_PERM in permissions: |
696 | return sorted(list(ALL_PERMISSIONS)) | ||
697 | return sorted(list(permissions)) | ||
558 | 698 | ||
559 | async def handle_client(self, reader, writer): | 699 | @permissions(USER_ADMIN_PERM, allow_anon=False) |
560 | # writer.transport.set_write_buffer_limits(0) | 700 | async def handle_set_perms(self, request): |
561 | try: | 701 | username = str(request["username"]) |
562 | client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only) | 702 | permissions = self.get_perm_arg(request["permissions"]) |
563 | await client.process_requests() | ||
564 | except Exception as e: | ||
565 | import traceback | ||
566 | logger.error('Error from client: %s' % str(e), exc_info=True) | ||
567 | traceback.print_exc() | ||
568 | writer.close() | ||
569 | logger.info('Client disconnected') | ||
570 | |||
571 | @contextmanager | ||
572 | def _backfill_worker(self): | ||
573 | async def backfill_worker_task(): | ||
574 | client = await create_async_client(self.upstream) | ||
575 | try: | ||
576 | while True: | ||
577 | item = await self.backfill_queue.get() | ||
578 | if item is None: | ||
579 | self.backfill_queue.task_done() | ||
580 | break | ||
581 | method, taskhash = item | ||
582 | await copy_from_upstream(client, self.db, method, taskhash) | ||
583 | self.backfill_queue.task_done() | ||
584 | finally: | ||
585 | await client.close() | ||
586 | 703 | ||
587 | async def join_worker(worker): | 704 | if not await self.db.set_user_perms(username, permissions): |
588 | await self.backfill_queue.put(None) | 705 | self.raise_no_user_error(username) |
589 | await worker | ||
590 | 706 | ||
591 | if self.upstream is not None: | 707 | return { |
592 | worker = asyncio.ensure_future(backfill_worker_task()) | 708 | "username": username, |
593 | try: | 709 | "permissions": self.return_perms(permissions), |
594 | yield | 710 | } |
595 | finally: | ||
596 | self.loop.run_until_complete(join_worker(worker)) | ||
597 | else: | ||
598 | yield | ||
599 | 711 | ||
600 | def serve_forever(self): | 712 | @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False) |
601 | def signal_handler(): | 713 | async def handle_get_user(self, request): |
602 | self.loop.stop() | 714 | username = str(request["username"]) |
603 | 715 | ||
604 | asyncio.set_event_loop(self.loop) | 716 | user = await self.db.lookup_user(username) |
605 | try: | 717 | if user is None: |
606 | self.backfill_queue = asyncio.Queue() | 718 | return None |
719 | |||
720 | return { | ||
721 | "username": user.username, | ||
722 | "permissions": self.return_perms(user.permissions), | ||
723 | } | ||
724 | |||
725 | @permissions(USER_ADMIN_PERM, allow_anon=False) | ||
726 | async def handle_get_all_users(self, request): | ||
727 | users = await self.db.get_all_users() | ||
728 | return { | ||
729 | "users": [ | ||
730 | { | ||
731 | "username": u.username, | ||
732 | "permissions": self.return_perms(u.permissions), | ||
733 | } | ||
734 | for u in users | ||
735 | ] | ||
736 | } | ||
737 | |||
738 | @permissions(USER_ADMIN_PERM, allow_anon=False) | ||
739 | async def handle_new_user(self, request): | ||
740 | username = str(request["username"]) | ||
741 | permissions = self.get_perm_arg(request["permissions"]) | ||
742 | |||
743 | token = await new_token() | ||
744 | |||
745 | inserted = await self.db.new_user( | ||
746 | username, | ||
747 | permissions, | ||
748 | hash_token(TOKEN_ALGORITHM, new_salt(), token), | ||
749 | ) | ||
750 | if not inserted: | ||
751 | raise bb.asyncrpc.InvokeError(f"Cannot create new user '{username}'") | ||
752 | |||
753 | return { | ||
754 | "username": username, | ||
755 | "permissions": self.return_perms(permissions), | ||
756 | "token": token, | ||
757 | } | ||
758 | |||
759 | @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False) | ||
760 | async def handle_delete_user(self, request): | ||
761 | username = str(request["username"]) | ||
762 | |||
763 | if not await self.db.delete_user(username): | ||
764 | self.raise_no_user_error(username) | ||
765 | |||
766 | return {"username": username} | ||
607 | 767 | ||
608 | self.loop.add_signal_handler(signal.SIGTERM, signal_handler) | 768 | @permissions(USER_ADMIN_PERM, allow_anon=False) |
769 | async def handle_become_user(self, request): | ||
770 | username = str(request["username"]) | ||
609 | 771 | ||
610 | with self._backfill_worker(): | 772 | user = await self.db.lookup_user(username) |
611 | try: | 773 | if user is None: |
612 | self.loop.run_forever() | 774 | raise bb.asyncrpc.InvokeError(f"User {username} doesn't exist") |
613 | except KeyboardInterrupt: | ||
614 | pass | ||
615 | 775 | ||
616 | self.server.close() | 776 | self.user = user |
777 | |||
778 | self.logger.info("Became user %s", username) | ||
779 | |||
780 | return { | ||
781 | "username": self.user.username, | ||
782 | "permissions": self.return_perms(self.user.permissions), | ||
783 | } | ||
784 | |||
785 | |||
786 | class Server(bb.asyncrpc.AsyncServer): | ||
787 | def __init__( | ||
788 | self, | ||
789 | db_engine, | ||
790 | upstream=None, | ||
791 | read_only=False, | ||
792 | anon_perms=DEFAULT_ANON_PERMS, | ||
793 | admin_username=None, | ||
794 | admin_password=None, | ||
795 | ): | ||
796 | if upstream and read_only: | ||
797 | raise bb.asyncrpc.ServerError( | ||
798 | "Read-only hashserv cannot pull from an upstream server" | ||
799 | ) | ||
800 | |||
801 | disallowed_perms = set(anon_perms) - set( | ||
802 | [NONE_PERM, READ_PERM, REPORT_PERM, DB_ADMIN_PERM] | ||
803 | ) | ||
804 | |||
805 | if disallowed_perms: | ||
806 | raise bb.asyncrpc.ServerError( | ||
807 | f"Permission(s) {' '.join(disallowed_perms)} are not allowed for anonymous users" | ||
808 | ) | ||
617 | 809 | ||
618 | self.loop.run_until_complete(self.server.wait_closed()) | 810 | super().__init__(logger) |
619 | logger.info('Server shutting down') | ||
620 | finally: | ||
621 | if self.close_loop: | ||
622 | if sys.version_info >= (3, 6): | ||
623 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
624 | self.loop.close() | ||
625 | 811 | ||
626 | if self._cleanup_socket is not None: | 812 | self.request_stats = Stats() |
627 | self._cleanup_socket() | 813 | self.db_engine = db_engine |
814 | self.upstream = upstream | ||
815 | self.read_only = read_only | ||
816 | self.backfill_queue = None | ||
817 | self.anon_perms = set(anon_perms) | ||
818 | self.admin_username = admin_username | ||
819 | self.admin_password = admin_password | ||
820 | |||
821 | self.logger.info( | ||
822 | "Anonymous user permissions are: %s", ", ".join(self.anon_perms) | ||
823 | ) | ||
824 | |||
825 | def accept_client(self, socket): | ||
826 | return ServerClient(socket, self) | ||
827 | |||
828 | async def create_admin_user(self): | ||
829 | admin_permissions = (ALL_PERM,) | ||
830 | async with self.db_engine.connect(self.logger) as db: | ||
831 | added = await db.new_user( | ||
832 | self.admin_username, | ||
833 | admin_permissions, | ||
834 | hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password), | ||
835 | ) | ||
836 | if added: | ||
837 | self.logger.info("Created admin user '%s'", self.admin_username) | ||
838 | else: | ||
839 | await db.set_user_perms( | ||
840 | self.admin_username, | ||
841 | admin_permissions, | ||
842 | ) | ||
843 | await db.set_user_token( | ||
844 | self.admin_username, | ||
845 | hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password), | ||
846 | ) | ||
847 | self.logger.info("Admin user '%s' updated", self.admin_username) | ||
848 | |||
849 | async def backfill_worker_task(self): | ||
850 | async with await create_async_client( | ||
851 | self.upstream | ||
852 | ) as client, self.db_engine.connect(self.logger) as db: | ||
853 | while True: | ||
854 | item = await self.backfill_queue.get() | ||
855 | if item is None: | ||
856 | self.backfill_queue.task_done() | ||
857 | break | ||
858 | |||
859 | method, taskhash = item | ||
860 | d = await client.get_taskhash(method, taskhash) | ||
861 | if d is not None: | ||
862 | await db.insert_unihash(d["method"], d["taskhash"], d["unihash"]) | ||
863 | self.backfill_queue.task_done() | ||
864 | |||
865 | def start(self): | ||
866 | tasks = super().start() | ||
867 | if self.upstream: | ||
868 | self.backfill_queue = asyncio.Queue() | ||
869 | tasks += [self.backfill_worker_task()] | ||
870 | |||
871 | self.loop.run_until_complete(self.db_engine.create()) | ||
872 | |||
873 | if self.admin_username: | ||
874 | self.loop.run_until_complete(self.create_admin_user()) | ||
875 | |||
876 | return tasks | ||
877 | |||
878 | async def stop(self): | ||
879 | if self.backfill_queue is not None: | ||
880 | await self.backfill_queue.put(None) | ||
881 | await super().stop() | ||
diff --git a/bitbake/lib/hashserv/sqlalchemy.py b/bitbake/lib/hashserv/sqlalchemy.py new file mode 100644 index 0000000000..f7b0226a7a --- /dev/null +++ b/bitbake/lib/hashserv/sqlalchemy.py | |||
@@ -0,0 +1,598 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright (C) 2023 Garmin Ltd. | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import logging | ||
9 | from datetime import datetime | ||
10 | from . import User | ||
11 | |||
12 | from sqlalchemy.ext.asyncio import create_async_engine | ||
13 | from sqlalchemy.pool import NullPool | ||
14 | from sqlalchemy import ( | ||
15 | MetaData, | ||
16 | Column, | ||
17 | Table, | ||
18 | Text, | ||
19 | Integer, | ||
20 | UniqueConstraint, | ||
21 | DateTime, | ||
22 | Index, | ||
23 | select, | ||
24 | insert, | ||
25 | exists, | ||
26 | literal, | ||
27 | and_, | ||
28 | delete, | ||
29 | update, | ||
30 | func, | ||
31 | inspect, | ||
32 | ) | ||
33 | import sqlalchemy.engine | ||
34 | from sqlalchemy.orm import declarative_base | ||
35 | from sqlalchemy.exc import IntegrityError | ||
36 | from sqlalchemy.dialects.postgresql import insert as postgres_insert | ||
37 | |||
38 | Base = declarative_base() | ||
39 | |||
40 | |||
41 | class UnihashesV3(Base): | ||
42 | __tablename__ = "unihashes_v3" | ||
43 | id = Column(Integer, primary_key=True, autoincrement=True) | ||
44 | method = Column(Text, nullable=False) | ||
45 | taskhash = Column(Text, nullable=False) | ||
46 | unihash = Column(Text, nullable=False) | ||
47 | gc_mark = Column(Text, nullable=False) | ||
48 | |||
49 | __table_args__ = ( | ||
50 | UniqueConstraint("method", "taskhash"), | ||
51 | Index("taskhash_lookup_v4", "method", "taskhash"), | ||
52 | Index("unihash_lookup_v1", "unihash"), | ||
53 | ) | ||
54 | |||
55 | |||
56 | class OuthashesV2(Base): | ||
57 | __tablename__ = "outhashes_v2" | ||
58 | id = Column(Integer, primary_key=True, autoincrement=True) | ||
59 | method = Column(Text, nullable=False) | ||
60 | taskhash = Column(Text, nullable=False) | ||
61 | outhash = Column(Text, nullable=False) | ||
62 | created = Column(DateTime) | ||
63 | owner = Column(Text) | ||
64 | PN = Column(Text) | ||
65 | PV = Column(Text) | ||
66 | PR = Column(Text) | ||
67 | task = Column(Text) | ||
68 | outhash_siginfo = Column(Text) | ||
69 | |||
70 | __table_args__ = ( | ||
71 | UniqueConstraint("method", "taskhash", "outhash"), | ||
72 | Index("outhash_lookup_v3", "method", "outhash"), | ||
73 | ) | ||
74 | |||
75 | |||
76 | class Users(Base): | ||
77 | __tablename__ = "users" | ||
78 | id = Column(Integer, primary_key=True, autoincrement=True) | ||
79 | username = Column(Text, nullable=False) | ||
80 | token = Column(Text, nullable=False) | ||
81 | permissions = Column(Text) | ||
82 | |||
83 | __table_args__ = (UniqueConstraint("username"),) | ||
84 | |||
85 | |||
86 | class Config(Base): | ||
87 | __tablename__ = "config" | ||
88 | id = Column(Integer, primary_key=True, autoincrement=True) | ||
89 | name = Column(Text, nullable=False) | ||
90 | value = Column(Text) | ||
91 | __table_args__ = ( | ||
92 | UniqueConstraint("name"), | ||
93 | Index("config_lookup", "name"), | ||
94 | ) | ||
95 | |||
96 | |||
97 | # | ||
98 | # Old table versions | ||
99 | # | ||
100 | DeprecatedBase = declarative_base() | ||
101 | |||
102 | |||
103 | class UnihashesV2(DeprecatedBase): | ||
104 | __tablename__ = "unihashes_v2" | ||
105 | id = Column(Integer, primary_key=True, autoincrement=True) | ||
106 | method = Column(Text, nullable=False) | ||
107 | taskhash = Column(Text, nullable=False) | ||
108 | unihash = Column(Text, nullable=False) | ||
109 | |||
110 | __table_args__ = ( | ||
111 | UniqueConstraint("method", "taskhash"), | ||
112 | Index("taskhash_lookup_v3", "method", "taskhash"), | ||
113 | ) | ||
114 | |||
115 | |||
116 | class DatabaseEngine(object): | ||
117 | def __init__(self, url, username=None, password=None): | ||
118 | self.logger = logging.getLogger("hashserv.sqlalchemy") | ||
119 | self.url = sqlalchemy.engine.make_url(url) | ||
120 | |||
121 | if username is not None: | ||
122 | self.url = self.url.set(username=username) | ||
123 | |||
124 | if password is not None: | ||
125 | self.url = self.url.set(password=password) | ||
126 | |||
127 | async def create(self): | ||
128 | def check_table_exists(conn, name): | ||
129 | return inspect(conn).has_table(name) | ||
130 | |||
131 | self.logger.info("Using database %s", self.url) | ||
132 | if self.url.drivername == 'postgresql+psycopg': | ||
133 | # Psygopg 3 (psygopg) driver can handle async connection pooling | ||
134 | self.engine = create_async_engine(self.url, max_overflow=-1) | ||
135 | else: | ||
136 | self.engine = create_async_engine(self.url, poolclass=NullPool) | ||
137 | |||
138 | async with self.engine.begin() as conn: | ||
139 | # Create tables | ||
140 | self.logger.info("Creating tables...") | ||
141 | await conn.run_sync(Base.metadata.create_all) | ||
142 | |||
143 | if await conn.run_sync(check_table_exists, UnihashesV2.__tablename__): | ||
144 | self.logger.info("Upgrading Unihashes V2 -> V3...") | ||
145 | statement = insert(UnihashesV3).from_select( | ||
146 | ["id", "method", "unihash", "taskhash", "gc_mark"], | ||
147 | select( | ||
148 | UnihashesV2.id, | ||
149 | UnihashesV2.method, | ||
150 | UnihashesV2.unihash, | ||
151 | UnihashesV2.taskhash, | ||
152 | literal("").label("gc_mark"), | ||
153 | ), | ||
154 | ) | ||
155 | self.logger.debug("%s", statement) | ||
156 | await conn.execute(statement) | ||
157 | |||
158 | await conn.run_sync(Base.metadata.drop_all, [UnihashesV2.__table__]) | ||
159 | self.logger.info("Upgrade complete") | ||
160 | |||
161 | def connect(self, logger): | ||
162 | return Database(self.engine, logger) | ||
163 | |||
164 | |||
165 | def map_row(row): | ||
166 | if row is None: | ||
167 | return None | ||
168 | return dict(**row._mapping) | ||
169 | |||
170 | |||
171 | def map_user(row): | ||
172 | if row is None: | ||
173 | return None | ||
174 | return User( | ||
175 | username=row.username, | ||
176 | permissions=set(row.permissions.split()), | ||
177 | ) | ||
178 | |||
179 | |||
180 | def _make_condition_statement(table, condition): | ||
181 | where = {} | ||
182 | for c in table.__table__.columns: | ||
183 | if c.key in condition and condition[c.key] is not None: | ||
184 | where[c] = condition[c.key] | ||
185 | |||
186 | return [(k == v) for k, v in where.items()] | ||
187 | |||
188 | |||
189 | class Database(object): | ||
190 | def __init__(self, engine, logger): | ||
191 | self.engine = engine | ||
192 | self.db = None | ||
193 | self.logger = logger | ||
194 | |||
195 | async def __aenter__(self): | ||
196 | self.db = await self.engine.connect() | ||
197 | return self | ||
198 | |||
199 | async def __aexit__(self, exc_type, exc_value, traceback): | ||
200 | await self.close() | ||
201 | |||
202 | async def close(self): | ||
203 | await self.db.close() | ||
204 | self.db = None | ||
205 | |||
206 | async def _execute(self, statement): | ||
207 | self.logger.debug("%s", statement) | ||
208 | return await self.db.execute(statement) | ||
209 | |||
210 | async def _set_config(self, name, value): | ||
211 | while True: | ||
212 | result = await self._execute( | ||
213 | update(Config).where(Config.name == name).values(value=value) | ||
214 | ) | ||
215 | |||
216 | if result.rowcount == 0: | ||
217 | self.logger.debug("Config '%s' not found. Adding it", name) | ||
218 | try: | ||
219 | await self._execute(insert(Config).values(name=name, value=value)) | ||
220 | except IntegrityError: | ||
221 | # Race. Try again | ||
222 | continue | ||
223 | |||
224 | break | ||
225 | |||
226 | def _get_config_subquery(self, name, default=None): | ||
227 | if default is not None: | ||
228 | return func.coalesce( | ||
229 | select(Config.value).where(Config.name == name).scalar_subquery(), | ||
230 | default, | ||
231 | ) | ||
232 | return select(Config.value).where(Config.name == name).scalar_subquery() | ||
233 | |||
234 | async def _get_config(self, name): | ||
235 | result = await self._execute(select(Config.value).where(Config.name == name)) | ||
236 | row = result.first() | ||
237 | if row is None: | ||
238 | return None | ||
239 | return row.value | ||
240 | |||
241 | async def get_unihash_by_taskhash_full(self, method, taskhash): | ||
242 | async with self.db.begin(): | ||
243 | result = await self._execute( | ||
244 | select( | ||
245 | OuthashesV2, | ||
246 | UnihashesV3.unihash.label("unihash"), | ||
247 | ) | ||
248 | .join( | ||
249 | UnihashesV3, | ||
250 | and_( | ||
251 | UnihashesV3.method == OuthashesV2.method, | ||
252 | UnihashesV3.taskhash == OuthashesV2.taskhash, | ||
253 | ), | ||
254 | ) | ||
255 | .where( | ||
256 | OuthashesV2.method == method, | ||
257 | OuthashesV2.taskhash == taskhash, | ||
258 | ) | ||
259 | .order_by( | ||
260 | OuthashesV2.created.asc(), | ||
261 | ) | ||
262 | .limit(1) | ||
263 | ) | ||
264 | return map_row(result.first()) | ||
265 | |||
266 | async def get_unihash_by_outhash(self, method, outhash): | ||
267 | async with self.db.begin(): | ||
268 | result = await self._execute( | ||
269 | select(OuthashesV2, UnihashesV3.unihash.label("unihash")) | ||
270 | .join( | ||
271 | UnihashesV3, | ||
272 | and_( | ||
273 | UnihashesV3.method == OuthashesV2.method, | ||
274 | UnihashesV3.taskhash == OuthashesV2.taskhash, | ||
275 | ), | ||
276 | ) | ||
277 | .where( | ||
278 | OuthashesV2.method == method, | ||
279 | OuthashesV2.outhash == outhash, | ||
280 | ) | ||
281 | .order_by( | ||
282 | OuthashesV2.created.asc(), | ||
283 | ) | ||
284 | .limit(1) | ||
285 | ) | ||
286 | return map_row(result.first()) | ||
287 | |||
288 | async def unihash_exists(self, unihash): | ||
289 | async with self.db.begin(): | ||
290 | result = await self._execute( | ||
291 | select(UnihashesV3).where(UnihashesV3.unihash == unihash).limit(1) | ||
292 | ) | ||
293 | |||
294 | return result.first() is not None | ||
295 | |||
296 | async def get_outhash(self, method, outhash): | ||
297 | async with self.db.begin(): | ||
298 | result = await self._execute( | ||
299 | select(OuthashesV2) | ||
300 | .where( | ||
301 | OuthashesV2.method == method, | ||
302 | OuthashesV2.outhash == outhash, | ||
303 | ) | ||
304 | .order_by( | ||
305 | OuthashesV2.created.asc(), | ||
306 | ) | ||
307 | .limit(1) | ||
308 | ) | ||
309 | return map_row(result.first()) | ||
310 | |||
311 | async def get_equivalent_for_outhash(self, method, outhash, taskhash): | ||
312 | async with self.db.begin(): | ||
313 | result = await self._execute( | ||
314 | select( | ||
315 | OuthashesV2.taskhash.label("taskhash"), | ||
316 | UnihashesV3.unihash.label("unihash"), | ||
317 | ) | ||
318 | .join( | ||
319 | UnihashesV3, | ||
320 | and_( | ||
321 | UnihashesV3.method == OuthashesV2.method, | ||
322 | UnihashesV3.taskhash == OuthashesV2.taskhash, | ||
323 | ), | ||
324 | ) | ||
325 | .where( | ||
326 | OuthashesV2.method == method, | ||
327 | OuthashesV2.outhash == outhash, | ||
328 | OuthashesV2.taskhash != taskhash, | ||
329 | ) | ||
330 | .order_by( | ||
331 | OuthashesV2.created.asc(), | ||
332 | ) | ||
333 | .limit(1) | ||
334 | ) | ||
335 | return map_row(result.first()) | ||
336 | |||
337 | async def get_equivalent(self, method, taskhash): | ||
338 | async with self.db.begin(): | ||
339 | result = await self._execute( | ||
340 | select( | ||
341 | UnihashesV3.unihash, | ||
342 | UnihashesV3.method, | ||
343 | UnihashesV3.taskhash, | ||
344 | ).where( | ||
345 | UnihashesV3.method == method, | ||
346 | UnihashesV3.taskhash == taskhash, | ||
347 | ) | ||
348 | ) | ||
349 | return map_row(result.first()) | ||
350 | |||
351 | async def remove(self, condition): | ||
352 | async def do_remove(table): | ||
353 | where = _make_condition_statement(table, condition) | ||
354 | if where: | ||
355 | async with self.db.begin(): | ||
356 | result = await self._execute(delete(table).where(*where)) | ||
357 | return result.rowcount | ||
358 | |||
359 | return 0 | ||
360 | |||
361 | count = 0 | ||
362 | count += await do_remove(UnihashesV3) | ||
363 | count += await do_remove(OuthashesV2) | ||
364 | |||
365 | return count | ||
366 | |||
367 | async def get_current_gc_mark(self): | ||
368 | async with self.db.begin(): | ||
369 | return await self._get_config("gc-mark") | ||
370 | |||
371 | async def gc_status(self): | ||
372 | async with self.db.begin(): | ||
373 | gc_mark_subquery = self._get_config_subquery("gc-mark", "") | ||
374 | |||
375 | result = await self._execute( | ||
376 | select(func.count()) | ||
377 | .select_from(UnihashesV3) | ||
378 | .where(UnihashesV3.gc_mark == gc_mark_subquery) | ||
379 | ) | ||
380 | keep_rows = result.scalar() | ||
381 | |||
382 | result = await self._execute( | ||
383 | select(func.count()) | ||
384 | .select_from(UnihashesV3) | ||
385 | .where(UnihashesV3.gc_mark != gc_mark_subquery) | ||
386 | ) | ||
387 | remove_rows = result.scalar() | ||
388 | |||
389 | return (keep_rows, remove_rows, await self._get_config("gc-mark")) | ||
390 | |||
391 | async def gc_mark(self, mark, condition): | ||
392 | async with self.db.begin(): | ||
393 | await self._set_config("gc-mark", mark) | ||
394 | |||
395 | where = _make_condition_statement(UnihashesV3, condition) | ||
396 | if not where: | ||
397 | return 0 | ||
398 | |||
399 | result = await self._execute( | ||
400 | update(UnihashesV3) | ||
401 | .values(gc_mark=self._get_config_subquery("gc-mark", "")) | ||
402 | .where(*where) | ||
403 | ) | ||
404 | return result.rowcount | ||
405 | |||
406 | async def gc_sweep(self): | ||
407 | async with self.db.begin(): | ||
408 | result = await self._execute( | ||
409 | delete(UnihashesV3).where( | ||
410 | # A sneaky conditional that provides some errant use | ||
411 | # protection: If the config mark is NULL, this will not | ||
412 | # match any rows because No default is specified in the | ||
413 | # select statement | ||
414 | UnihashesV3.gc_mark | ||
415 | != self._get_config_subquery("gc-mark") | ||
416 | ) | ||
417 | ) | ||
418 | await self._set_config("gc-mark", None) | ||
419 | |||
420 | return result.rowcount | ||
421 | |||
422 | async def clean_unused(self, oldest): | ||
423 | async with self.db.begin(): | ||
424 | result = await self._execute( | ||
425 | delete(OuthashesV2).where( | ||
426 | OuthashesV2.created < oldest, | ||
427 | ~( | ||
428 | select(UnihashesV3.id) | ||
429 | .where( | ||
430 | UnihashesV3.method == OuthashesV2.method, | ||
431 | UnihashesV3.taskhash == OuthashesV2.taskhash, | ||
432 | ) | ||
433 | .limit(1) | ||
434 | .exists() | ||
435 | ), | ||
436 | ) | ||
437 | ) | ||
438 | return result.rowcount | ||
439 | |||
440 | async def insert_unihash(self, method, taskhash, unihash): | ||
441 | # Postgres specific ignore on insert duplicate | ||
442 | if self.engine.name == "postgresql": | ||
443 | statement = ( | ||
444 | postgres_insert(UnihashesV3) | ||
445 | .values( | ||
446 | method=method, | ||
447 | taskhash=taskhash, | ||
448 | unihash=unihash, | ||
449 | gc_mark=self._get_config_subquery("gc-mark", ""), | ||
450 | ) | ||
451 | .on_conflict_do_nothing(index_elements=("method", "taskhash")) | ||
452 | ) | ||
453 | else: | ||
454 | statement = insert(UnihashesV3).values( | ||
455 | method=method, | ||
456 | taskhash=taskhash, | ||
457 | unihash=unihash, | ||
458 | gc_mark=self._get_config_subquery("gc-mark", ""), | ||
459 | ) | ||
460 | |||
461 | try: | ||
462 | async with self.db.begin(): | ||
463 | result = await self._execute(statement) | ||
464 | return result.rowcount != 0 | ||
465 | except IntegrityError: | ||
466 | self.logger.debug( | ||
467 | "%s, %s, %s already in unihash database", method, taskhash, unihash | ||
468 | ) | ||
469 | return False | ||
470 | |||
471 | async def insert_outhash(self, data): | ||
472 | outhash_columns = set(c.key for c in OuthashesV2.__table__.columns) | ||
473 | |||
474 | data = {k: v for k, v in data.items() if k in outhash_columns} | ||
475 | |||
476 | if "created" in data and not isinstance(data["created"], datetime): | ||
477 | data["created"] = datetime.fromisoformat(data["created"]) | ||
478 | |||
479 | # Postgres specific ignore on insert duplicate | ||
480 | if self.engine.name == "postgresql": | ||
481 | statement = ( | ||
482 | postgres_insert(OuthashesV2) | ||
483 | .values(**data) | ||
484 | .on_conflict_do_nothing( | ||
485 | index_elements=("method", "taskhash", "outhash") | ||
486 | ) | ||
487 | ) | ||
488 | else: | ||
489 | statement = insert(OuthashesV2).values(**data) | ||
490 | |||
491 | try: | ||
492 | async with self.db.begin(): | ||
493 | result = await self._execute(statement) | ||
494 | return result.rowcount != 0 | ||
495 | except IntegrityError: | ||
496 | self.logger.debug( | ||
497 | "%s, %s already in outhash database", data["method"], data["outhash"] | ||
498 | ) | ||
499 | return False | ||
500 | |||
501 | async def _get_user(self, username): | ||
502 | async with self.db.begin(): | ||
503 | result = await self._execute( | ||
504 | select( | ||
505 | Users.username, | ||
506 | Users.permissions, | ||
507 | Users.token, | ||
508 | ).where( | ||
509 | Users.username == username, | ||
510 | ) | ||
511 | ) | ||
512 | return result.first() | ||
513 | |||
514 | async def lookup_user_token(self, username): | ||
515 | row = await self._get_user(username) | ||
516 | if not row: | ||
517 | return None, None | ||
518 | return map_user(row), row.token | ||
519 | |||
520 | async def lookup_user(self, username): | ||
521 | return map_user(await self._get_user(username)) | ||
522 | |||
523 | async def set_user_token(self, username, token): | ||
524 | async with self.db.begin(): | ||
525 | result = await self._execute( | ||
526 | update(Users) | ||
527 | .where( | ||
528 | Users.username == username, | ||
529 | ) | ||
530 | .values( | ||
531 | token=token, | ||
532 | ) | ||
533 | ) | ||
534 | return result.rowcount != 0 | ||
535 | |||
536 | async def set_user_perms(self, username, permissions): | ||
537 | async with self.db.begin(): | ||
538 | result = await self._execute( | ||
539 | update(Users) | ||
540 | .where(Users.username == username) | ||
541 | .values(permissions=" ".join(permissions)) | ||
542 | ) | ||
543 | return result.rowcount != 0 | ||
544 | |||
545 | async def get_all_users(self): | ||
546 | async with self.db.begin(): | ||
547 | result = await self._execute( | ||
548 | select( | ||
549 | Users.username, | ||
550 | Users.permissions, | ||
551 | ) | ||
552 | ) | ||
553 | return [map_user(row) for row in result] | ||
554 | |||
555 | async def new_user(self, username, permissions, token): | ||
556 | try: | ||
557 | async with self.db.begin(): | ||
558 | await self._execute( | ||
559 | insert(Users).values( | ||
560 | username=username, | ||
561 | permissions=" ".join(permissions), | ||
562 | token=token, | ||
563 | ) | ||
564 | ) | ||
565 | return True | ||
566 | except IntegrityError as e: | ||
567 | self.logger.debug("Cannot create new user %s: %s", username, e) | ||
568 | return False | ||
569 | |||
570 | async def delete_user(self, username): | ||
571 | async with self.db.begin(): | ||
572 | result = await self._execute( | ||
573 | delete(Users).where(Users.username == username) | ||
574 | ) | ||
575 | return result.rowcount != 0 | ||
576 | |||
577 | async def get_usage(self): | ||
578 | usage = {} | ||
579 | async with self.db.begin() as session: | ||
580 | for name, table in Base.metadata.tables.items(): | ||
581 | result = await self._execute( | ||
582 | statement=select(func.count()).select_from(table) | ||
583 | ) | ||
584 | usage[name] = { | ||
585 | "rows": result.scalar(), | ||
586 | } | ||
587 | |||
588 | return usage | ||
589 | |||
590 | async def get_query_columns(self): | ||
591 | columns = set() | ||
592 | for table in (UnihashesV3, OuthashesV2): | ||
593 | for c in table.__table__.columns: | ||
594 | if not isinstance(c.type, Text): | ||
595 | continue | ||
596 | columns.add(c.key) | ||
597 | |||
598 | return list(columns) | ||
diff --git a/bitbake/lib/hashserv/sqlite.py b/bitbake/lib/hashserv/sqlite.py new file mode 100644 index 0000000000..da2e844a03 --- /dev/null +++ b/bitbake/lib/hashserv/sqlite.py | |||
@@ -0,0 +1,562 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright (C) 2023 Garmin Ltd. | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | import sqlite3 | ||
8 | import logging | ||
9 | from contextlib import closing | ||
10 | from . import User | ||
11 | |||
12 | logger = logging.getLogger("hashserv.sqlite") | ||
13 | |||
14 | UNIHASH_TABLE_DEFINITION = ( | ||
15 | ("method", "TEXT NOT NULL", "UNIQUE"), | ||
16 | ("taskhash", "TEXT NOT NULL", "UNIQUE"), | ||
17 | ("unihash", "TEXT NOT NULL", ""), | ||
18 | ("gc_mark", "TEXT NOT NULL", ""), | ||
19 | ) | ||
20 | |||
21 | UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION) | ||
22 | |||
23 | OUTHASH_TABLE_DEFINITION = ( | ||
24 | ("method", "TEXT NOT NULL", "UNIQUE"), | ||
25 | ("taskhash", "TEXT NOT NULL", "UNIQUE"), | ||
26 | ("outhash", "TEXT NOT NULL", "UNIQUE"), | ||
27 | ("created", "DATETIME", ""), | ||
28 | # Optional fields | ||
29 | ("owner", "TEXT", ""), | ||
30 | ("PN", "TEXT", ""), | ||
31 | ("PV", "TEXT", ""), | ||
32 | ("PR", "TEXT", ""), | ||
33 | ("task", "TEXT", ""), | ||
34 | ("outhash_siginfo", "TEXT", ""), | ||
35 | ) | ||
36 | |||
37 | OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION) | ||
38 | |||
39 | USERS_TABLE_DEFINITION = ( | ||
40 | ("username", "TEXT NOT NULL", "UNIQUE"), | ||
41 | ("token", "TEXT NOT NULL", ""), | ||
42 | ("permissions", "TEXT NOT NULL", ""), | ||
43 | ) | ||
44 | |||
45 | USERS_TABLE_COLUMNS = tuple(name for name, _, _ in USERS_TABLE_DEFINITION) | ||
46 | |||
47 | |||
48 | CONFIG_TABLE_DEFINITION = ( | ||
49 | ("name", "TEXT NOT NULL", "UNIQUE"), | ||
50 | ("value", "TEXT", ""), | ||
51 | ) | ||
52 | |||
53 | CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION) | ||
54 | |||
55 | |||
56 | def _make_table(cursor, name, definition): | ||
57 | cursor.execute( | ||
58 | """ | ||
59 | CREATE TABLE IF NOT EXISTS {name} ( | ||
60 | id INTEGER PRIMARY KEY AUTOINCREMENT, | ||
61 | {fields} | ||
62 | UNIQUE({unique}) | ||
63 | ) | ||
64 | """.format( | ||
65 | name=name, | ||
66 | fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition), | ||
67 | unique=", ".join( | ||
68 | name for name, _, flags in definition if "UNIQUE" in flags | ||
69 | ), | ||
70 | ) | ||
71 | ) | ||
72 | |||
73 | |||
74 | def map_user(row): | ||
75 | if row is None: | ||
76 | return None | ||
77 | return User( | ||
78 | username=row["username"], | ||
79 | permissions=set(row["permissions"].split()), | ||
80 | ) | ||
81 | |||
82 | |||
83 | def _make_condition_statement(columns, condition): | ||
84 | where = {} | ||
85 | for c in columns: | ||
86 | if c in condition and condition[c] is not None: | ||
87 | where[c] = condition[c] | ||
88 | |||
89 | return where, " AND ".join("%s=:%s" % (k, k) for k in where.keys()) | ||
90 | |||
91 | |||
92 | def _get_sqlite_version(cursor): | ||
93 | cursor.execute("SELECT sqlite_version()") | ||
94 | |||
95 | version = [] | ||
96 | for v in cursor.fetchone()[0].split("."): | ||
97 | try: | ||
98 | version.append(int(v)) | ||
99 | except ValueError: | ||
100 | version.append(v) | ||
101 | |||
102 | return tuple(version) | ||
103 | |||
104 | |||
105 | def _schema_table_name(version): | ||
106 | if version >= (3, 33): | ||
107 | return "sqlite_schema" | ||
108 | |||
109 | return "sqlite_master" | ||
110 | |||
111 | |||
112 | class DatabaseEngine(object): | ||
113 | def __init__(self, dbname, sync): | ||
114 | self.dbname = dbname | ||
115 | self.logger = logger | ||
116 | self.sync = sync | ||
117 | |||
118 | async def create(self): | ||
119 | db = sqlite3.connect(self.dbname) | ||
120 | db.row_factory = sqlite3.Row | ||
121 | |||
122 | with closing(db.cursor()) as cursor: | ||
123 | _make_table(cursor, "unihashes_v3", UNIHASH_TABLE_DEFINITION) | ||
124 | _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION) | ||
125 | _make_table(cursor, "users", USERS_TABLE_DEFINITION) | ||
126 | _make_table(cursor, "config", CONFIG_TABLE_DEFINITION) | ||
127 | |||
128 | cursor.execute("PRAGMA journal_mode = WAL") | ||
129 | cursor.execute( | ||
130 | "PRAGMA synchronous = %s" % ("NORMAL" if self.sync else "OFF") | ||
131 | ) | ||
132 | |||
133 | # Drop old indexes | ||
134 | cursor.execute("DROP INDEX IF EXISTS taskhash_lookup") | ||
135 | cursor.execute("DROP INDEX IF EXISTS outhash_lookup") | ||
136 | cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2") | ||
137 | cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2") | ||
138 | cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v3") | ||
139 | |||
140 | # TODO: Upgrade from tasks_v2? | ||
141 | cursor.execute("DROP TABLE IF EXISTS tasks_v2") | ||
142 | |||
143 | # Create new indexes | ||
144 | cursor.execute( | ||
145 | "CREATE INDEX IF NOT EXISTS taskhash_lookup_v4 ON unihashes_v3 (method, taskhash)" | ||
146 | ) | ||
147 | cursor.execute( | ||
148 | "CREATE INDEX IF NOT EXISTS unihash_lookup_v1 ON unihashes_v3 (unihash)" | ||
149 | ) | ||
150 | cursor.execute( | ||
151 | "CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)" | ||
152 | ) | ||
153 | cursor.execute("CREATE INDEX IF NOT EXISTS config_lookup ON config (name)") | ||
154 | |||
155 | sqlite_version = _get_sqlite_version(cursor) | ||
156 | |||
157 | cursor.execute( | ||
158 | f""" | ||
159 | SELECT name FROM {_schema_table_name(sqlite_version)} WHERE type = 'table' AND name = 'unihashes_v2' | ||
160 | """ | ||
161 | ) | ||
162 | if cursor.fetchone(): | ||
163 | self.logger.info("Upgrading Unihashes V2 -> V3...") | ||
164 | cursor.execute( | ||
165 | """ | ||
166 | INSERT INTO unihashes_v3 (id, method, unihash, taskhash, gc_mark) | ||
167 | SELECT id, method, unihash, taskhash, '' FROM unihashes_v2 | ||
168 | """ | ||
169 | ) | ||
170 | cursor.execute("DROP TABLE unihashes_v2") | ||
171 | db.commit() | ||
172 | self.logger.info("Upgrade complete") | ||
173 | |||
174 | def connect(self, logger): | ||
175 | return Database(logger, self.dbname, self.sync) | ||
176 | |||
177 | |||
178 | class Database(object): | ||
179 | def __init__(self, logger, dbname, sync): | ||
180 | self.dbname = dbname | ||
181 | self.logger = logger | ||
182 | |||
183 | self.db = sqlite3.connect(self.dbname) | ||
184 | self.db.row_factory = sqlite3.Row | ||
185 | |||
186 | with closing(self.db.cursor()) as cursor: | ||
187 | cursor.execute("PRAGMA journal_mode = WAL") | ||
188 | cursor.execute( | ||
189 | "PRAGMA synchronous = %s" % ("NORMAL" if sync else "OFF") | ||
190 | ) | ||
191 | |||
192 | self.sqlite_version = _get_sqlite_version(cursor) | ||
193 | |||
194 | async def __aenter__(self): | ||
195 | return self | ||
196 | |||
197 | async def __aexit__(self, exc_type, exc_value, traceback): | ||
198 | await self.close() | ||
199 | |||
200 | async def _set_config(self, cursor, name, value): | ||
201 | cursor.execute( | ||
202 | """ | ||
203 | INSERT OR REPLACE INTO config (id, name, value) VALUES | ||
204 | ((SELECT id FROM config WHERE name=:name), :name, :value) | ||
205 | """, | ||
206 | { | ||
207 | "name": name, | ||
208 | "value": value, | ||
209 | }, | ||
210 | ) | ||
211 | |||
212 | async def _get_config(self, cursor, name): | ||
213 | cursor.execute( | ||
214 | "SELECT value FROM config WHERE name=:name", | ||
215 | { | ||
216 | "name": name, | ||
217 | }, | ||
218 | ) | ||
219 | row = cursor.fetchone() | ||
220 | if row is None: | ||
221 | return None | ||
222 | return row["value"] | ||
223 | |||
224 | async def close(self): | ||
225 | self.db.close() | ||
226 | |||
227 | async def get_unihash_by_taskhash_full(self, method, taskhash): | ||
228 | with closing(self.db.cursor()) as cursor: | ||
229 | cursor.execute( | ||
230 | """ | ||
231 | SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2 | ||
232 | INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash | ||
233 | WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash | ||
234 | ORDER BY outhashes_v2.created ASC | ||
235 | LIMIT 1 | ||
236 | """, | ||
237 | { | ||
238 | "method": method, | ||
239 | "taskhash": taskhash, | ||
240 | }, | ||
241 | ) | ||
242 | return cursor.fetchone() | ||
243 | |||
244 | async def get_unihash_by_outhash(self, method, outhash): | ||
245 | with closing(self.db.cursor()) as cursor: | ||
246 | cursor.execute( | ||
247 | """ | ||
248 | SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2 | ||
249 | INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash | ||
250 | WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash | ||
251 | ORDER BY outhashes_v2.created ASC | ||
252 | LIMIT 1 | ||
253 | """, | ||
254 | { | ||
255 | "method": method, | ||
256 | "outhash": outhash, | ||
257 | }, | ||
258 | ) | ||
259 | return cursor.fetchone() | ||
260 | |||
261 | async def unihash_exists(self, unihash): | ||
262 | with closing(self.db.cursor()) as cursor: | ||
263 | cursor.execute( | ||
264 | """ | ||
265 | SELECT * FROM unihashes_v3 WHERE unihash=:unihash | ||
266 | LIMIT 1 | ||
267 | """, | ||
268 | { | ||
269 | "unihash": unihash, | ||
270 | }, | ||
271 | ) | ||
272 | return cursor.fetchone() is not None | ||
273 | |||
274 | async def get_outhash(self, method, outhash): | ||
275 | with closing(self.db.cursor()) as cursor: | ||
276 | cursor.execute( | ||
277 | """ | ||
278 | SELECT * FROM outhashes_v2 | ||
279 | WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash | ||
280 | ORDER BY outhashes_v2.created ASC | ||
281 | LIMIT 1 | ||
282 | """, | ||
283 | { | ||
284 | "method": method, | ||
285 | "outhash": outhash, | ||
286 | }, | ||
287 | ) | ||
288 | return cursor.fetchone() | ||
289 | |||
290 | async def get_equivalent_for_outhash(self, method, outhash, taskhash): | ||
291 | with closing(self.db.cursor()) as cursor: | ||
292 | cursor.execute( | ||
293 | """ | ||
294 | SELECT outhashes_v2.taskhash AS taskhash, unihashes_v3.unihash AS unihash FROM outhashes_v2 | ||
295 | INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash | ||
296 | -- Select any matching output hash except the one we just inserted | ||
297 | WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash | ||
298 | -- Pick the oldest hash | ||
299 | ORDER BY outhashes_v2.created ASC | ||
300 | LIMIT 1 | ||
301 | """, | ||
302 | { | ||
303 | "method": method, | ||
304 | "outhash": outhash, | ||
305 | "taskhash": taskhash, | ||
306 | }, | ||
307 | ) | ||
308 | return cursor.fetchone() | ||
309 | |||
310 | async def get_equivalent(self, method, taskhash): | ||
311 | with closing(self.db.cursor()) as cursor: | ||
312 | cursor.execute( | ||
313 | "SELECT taskhash, method, unihash FROM unihashes_v3 WHERE method=:method AND taskhash=:taskhash", | ||
314 | { | ||
315 | "method": method, | ||
316 | "taskhash": taskhash, | ||
317 | }, | ||
318 | ) | ||
319 | return cursor.fetchone() | ||
320 | |||
321 | async def remove(self, condition): | ||
322 | def do_remove(columns, table_name, cursor): | ||
323 | where, clause = _make_condition_statement(columns, condition) | ||
324 | if where: | ||
325 | query = f"DELETE FROM {table_name} WHERE {clause}" | ||
326 | cursor.execute(query, where) | ||
327 | return cursor.rowcount | ||
328 | |||
329 | return 0 | ||
330 | |||
331 | count = 0 | ||
332 | with closing(self.db.cursor()) as cursor: | ||
333 | count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor) | ||
334 | count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v3", cursor) | ||
335 | self.db.commit() | ||
336 | |||
337 | return count | ||
338 | |||
339 | async def get_current_gc_mark(self): | ||
340 | with closing(self.db.cursor()) as cursor: | ||
341 | return await self._get_config(cursor, "gc-mark") | ||
342 | |||
343 | async def gc_status(self): | ||
344 | with closing(self.db.cursor()) as cursor: | ||
345 | cursor.execute( | ||
346 | """ | ||
347 | SELECT COUNT() FROM unihashes_v3 WHERE | ||
348 | gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '') | ||
349 | """ | ||
350 | ) | ||
351 | keep_rows = cursor.fetchone()[0] | ||
352 | |||
353 | cursor.execute( | ||
354 | """ | ||
355 | SELECT COUNT() FROM unihashes_v3 WHERE | ||
356 | gc_mark!=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '') | ||
357 | """ | ||
358 | ) | ||
359 | remove_rows = cursor.fetchone()[0] | ||
360 | |||
361 | current_mark = await self._get_config(cursor, "gc-mark") | ||
362 | |||
363 | return (keep_rows, remove_rows, current_mark) | ||
364 | |||
365 | async def gc_mark(self, mark, condition): | ||
366 | with closing(self.db.cursor()) as cursor: | ||
367 | await self._set_config(cursor, "gc-mark", mark) | ||
368 | |||
369 | where, clause = _make_condition_statement(UNIHASH_TABLE_COLUMNS, condition) | ||
370 | |||
371 | new_rows = 0 | ||
372 | if where: | ||
373 | cursor.execute( | ||
374 | f""" | ||
375 | UPDATE unihashes_v3 SET | ||
376 | gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '') | ||
377 | WHERE {clause} | ||
378 | """, | ||
379 | where, | ||
380 | ) | ||
381 | new_rows = cursor.rowcount | ||
382 | |||
383 | self.db.commit() | ||
384 | return new_rows | ||
385 | |||
386 | async def gc_sweep(self): | ||
387 | with closing(self.db.cursor()) as cursor: | ||
388 | # NOTE: COALESCE is not used in this query so that if the current | ||
389 | # mark is NULL, nothing will happen | ||
390 | cursor.execute( | ||
391 | """ | ||
392 | DELETE FROM unihashes_v3 WHERE | ||
393 | gc_mark!=(SELECT value FROM config WHERE name='gc-mark') | ||
394 | """ | ||
395 | ) | ||
396 | count = cursor.rowcount | ||
397 | await self._set_config(cursor, "gc-mark", None) | ||
398 | |||
399 | self.db.commit() | ||
400 | return count | ||
401 | |||
402 | async def clean_unused(self, oldest): | ||
403 | with closing(self.db.cursor()) as cursor: | ||
404 | cursor.execute( | ||
405 | """ | ||
406 | DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS ( | ||
407 | SELECT unihashes_v3.id FROM unihashes_v3 WHERE unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash LIMIT 1 | ||
408 | ) | ||
409 | """, | ||
410 | { | ||
411 | "oldest": oldest, | ||
412 | }, | ||
413 | ) | ||
414 | self.db.commit() | ||
415 | return cursor.rowcount | ||
416 | |||
417 | async def insert_unihash(self, method, taskhash, unihash): | ||
418 | with closing(self.db.cursor()) as cursor: | ||
419 | prevrowid = cursor.lastrowid | ||
420 | cursor.execute( | ||
421 | """ | ||
422 | INSERT OR IGNORE INTO unihashes_v3 (method, taskhash, unihash, gc_mark) VALUES | ||
423 | ( | ||
424 | :method, | ||
425 | :taskhash, | ||
426 | :unihash, | ||
427 | COALESCE((SELECT value FROM config WHERE name='gc-mark'), '') | ||
428 | ) | ||
429 | """, | ||
430 | { | ||
431 | "method": method, | ||
432 | "taskhash": taskhash, | ||
433 | "unihash": unihash, | ||
434 | }, | ||
435 | ) | ||
436 | self.db.commit() | ||
437 | return cursor.lastrowid != prevrowid | ||
438 | |||
439 | async def insert_outhash(self, data): | ||
440 | data = {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS} | ||
441 | keys = sorted(data.keys()) | ||
442 | query = "INSERT OR IGNORE INTO outhashes_v2 ({fields}) VALUES({values})".format( | ||
443 | fields=", ".join(keys), | ||
444 | values=", ".join(":" + k for k in keys), | ||
445 | ) | ||
446 | with closing(self.db.cursor()) as cursor: | ||
447 | prevrowid = cursor.lastrowid | ||
448 | cursor.execute(query, data) | ||
449 | self.db.commit() | ||
450 | return cursor.lastrowid != prevrowid | ||
451 | |||
452 | def _get_user(self, username): | ||
453 | with closing(self.db.cursor()) as cursor: | ||
454 | cursor.execute( | ||
455 | """ | ||
456 | SELECT username, permissions, token FROM users WHERE username=:username | ||
457 | """, | ||
458 | { | ||
459 | "username": username, | ||
460 | }, | ||
461 | ) | ||
462 | return cursor.fetchone() | ||
463 | |||
464 | async def lookup_user_token(self, username): | ||
465 | row = self._get_user(username) | ||
466 | if row is None: | ||
467 | return None, None | ||
468 | return map_user(row), row["token"] | ||
469 | |||
470 | async def lookup_user(self, username): | ||
471 | return map_user(self._get_user(username)) | ||
472 | |||
473 | async def set_user_token(self, username, token): | ||
474 | with closing(self.db.cursor()) as cursor: | ||
475 | cursor.execute( | ||
476 | """ | ||
477 | UPDATE users SET token=:token WHERE username=:username | ||
478 | """, | ||
479 | { | ||
480 | "username": username, | ||
481 | "token": token, | ||
482 | }, | ||
483 | ) | ||
484 | self.db.commit() | ||
485 | return cursor.rowcount != 0 | ||
486 | |||
487 | async def set_user_perms(self, username, permissions): | ||
488 | with closing(self.db.cursor()) as cursor: | ||
489 | cursor.execute( | ||
490 | """ | ||
491 | UPDATE users SET permissions=:permissions WHERE username=:username | ||
492 | """, | ||
493 | { | ||
494 | "username": username, | ||
495 | "permissions": " ".join(permissions), | ||
496 | }, | ||
497 | ) | ||
498 | self.db.commit() | ||
499 | return cursor.rowcount != 0 | ||
500 | |||
501 | async def get_all_users(self): | ||
502 | with closing(self.db.cursor()) as cursor: | ||
503 | cursor.execute("SELECT username, permissions FROM users") | ||
504 | return [map_user(r) for r in cursor.fetchall()] | ||
505 | |||
506 | async def new_user(self, username, permissions, token): | ||
507 | with closing(self.db.cursor()) as cursor: | ||
508 | try: | ||
509 | cursor.execute( | ||
510 | """ | ||
511 | INSERT INTO users (username, token, permissions) VALUES (:username, :token, :permissions) | ||
512 | """, | ||
513 | { | ||
514 | "username": username, | ||
515 | "token": token, | ||
516 | "permissions": " ".join(permissions), | ||
517 | }, | ||
518 | ) | ||
519 | self.db.commit() | ||
520 | return True | ||
521 | except sqlite3.IntegrityError: | ||
522 | return False | ||
523 | |||
524 | async def delete_user(self, username): | ||
525 | with closing(self.db.cursor()) as cursor: | ||
526 | cursor.execute( | ||
527 | """ | ||
528 | DELETE FROM users WHERE username=:username | ||
529 | """, | ||
530 | { | ||
531 | "username": username, | ||
532 | }, | ||
533 | ) | ||
534 | self.db.commit() | ||
535 | return cursor.rowcount != 0 | ||
536 | |||
537 | async def get_usage(self): | ||
538 | usage = {} | ||
539 | with closing(self.db.cursor()) as cursor: | ||
540 | cursor.execute( | ||
541 | f""" | ||
542 | SELECT name FROM {_schema_table_name(self.sqlite_version)} WHERE type = 'table' AND name NOT LIKE 'sqlite_%' | ||
543 | """ | ||
544 | ) | ||
545 | for row in cursor.fetchall(): | ||
546 | cursor.execute( | ||
547 | """ | ||
548 | SELECT COUNT() FROM %s | ||
549 | """ | ||
550 | % row["name"], | ||
551 | ) | ||
552 | usage[row["name"]] = { | ||
553 | "rows": cursor.fetchone()[0], | ||
554 | } | ||
555 | return usage | ||
556 | |||
557 | async def get_query_columns(self): | ||
558 | columns = set() | ||
559 | for name, typ, _ in UNIHASH_TABLE_DEFINITION + OUTHASH_TABLE_DEFINITION: | ||
560 | if typ.startswith("TEXT"): | ||
561 | columns.add(name) | ||
562 | return list(columns) | ||
diff --git a/bitbake/lib/hashserv/tests.py b/bitbake/lib/hashserv/tests.py index 1a696481e3..0809453cf8 100644 --- a/bitbake/lib/hashserv/tests.py +++ b/bitbake/lib/hashserv/tests.py | |||
@@ -6,7 +6,9 @@ | |||
6 | # | 6 | # |
7 | 7 | ||
8 | from . import create_server, create_client | 8 | from . import create_server, create_client |
9 | from .client import HashConnectionError | 9 | from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS |
10 | from bb.asyncrpc import InvokeError | ||
11 | from .client import ClientPool | ||
10 | import hashlib | 12 | import hashlib |
11 | import logging | 13 | import logging |
12 | import multiprocessing | 14 | import multiprocessing |
@@ -16,46 +18,80 @@ import tempfile | |||
16 | import threading | 18 | import threading |
17 | import unittest | 19 | import unittest |
18 | import socket | 20 | import socket |
19 | 21 | import time | |
20 | def _run_server(server, idx): | 22 | import signal |
21 | # logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w', | 23 | import subprocess |
22 | # format='%(levelname)s %(filename)s:%(lineno)d %(message)s') | 24 | import json |
23 | sys.stdout = open('bbhashserv-%d.log' % idx, 'w') | 25 | import re |
26 | from pathlib import Path | ||
27 | |||
28 | |||
29 | THIS_DIR = Path(__file__).parent | ||
30 | BIN_DIR = THIS_DIR.parent.parent / "bin" | ||
31 | |||
32 | def server_prefunc(server, idx): | ||
33 | logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w', | ||
34 | format='%(levelname)s %(filename)s:%(lineno)d %(message)s') | ||
35 | server.logger.debug("Running server %d" % idx) | ||
36 | sys.stdout = open('bbhashserv-stdout-%d.log' % idx, 'w') | ||
24 | sys.stderr = sys.stdout | 37 | sys.stderr = sys.stdout |
25 | server.serve_forever() | ||
26 | |||
27 | 38 | ||
28 | class HashEquivalenceTestSetup(object): | 39 | class HashEquivalenceTestSetup(object): |
29 | METHOD = 'TestMethod' | 40 | METHOD = 'TestMethod' |
30 | 41 | ||
31 | server_index = 0 | 42 | server_index = 0 |
43 | client_index = 0 | ||
32 | 44 | ||
33 | def start_server(self, dbpath=None, upstream=None, read_only=False): | 45 | def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc, anon_perms=DEFAULT_ANON_PERMS, admin_username=None, admin_password=None): |
34 | self.server_index += 1 | 46 | self.server_index += 1 |
35 | if dbpath is None: | 47 | if dbpath is None: |
36 | dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index) | 48 | dbpath = self.make_dbpath() |
49 | |||
50 | def cleanup_server(server): | ||
51 | if server.process.exitcode is not None: | ||
52 | return | ||
37 | 53 | ||
38 | def cleanup_thread(thread): | 54 | server.process.terminate() |
39 | thread.terminate() | 55 | server.process.join() |
40 | thread.join() | ||
41 | 56 | ||
42 | server = create_server(self.get_server_addr(self.server_index), | 57 | server = create_server(self.get_server_addr(self.server_index), |
43 | dbpath, | 58 | dbpath, |
44 | upstream=upstream, | 59 | upstream=upstream, |
45 | read_only=read_only) | 60 | read_only=read_only, |
61 | anon_perms=anon_perms, | ||
62 | admin_username=admin_username, | ||
63 | admin_password=admin_password) | ||
46 | server.dbpath = dbpath | 64 | server.dbpath = dbpath |
47 | 65 | ||
48 | server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index)) | 66 | server.serve_as_process(prefunc=prefunc, args=(self.server_index,)) |
49 | server.thread.start() | 67 | self.addCleanup(cleanup_server, server) |
50 | self.addCleanup(cleanup_thread, server.thread) | 68 | |
69 | return server | ||
70 | |||
71 | def make_dbpath(self): | ||
72 | return os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index) | ||
51 | 73 | ||
74 | def start_client(self, server_address, username=None, password=None): | ||
52 | def cleanup_client(client): | 75 | def cleanup_client(client): |
53 | client.close() | 76 | client.close() |
54 | 77 | ||
55 | client = create_client(server.address) | 78 | client = create_client(server_address, username=username, password=password) |
56 | self.addCleanup(cleanup_client, client) | 79 | self.addCleanup(cleanup_client, client) |
57 | 80 | ||
58 | return (client, server) | 81 | return client |
82 | |||
83 | def start_test_server(self): | ||
84 | self.server = self.start_server() | ||
85 | return self.server.address | ||
86 | |||
87 | def start_auth_server(self): | ||
88 | auth_server = self.start_server(self.server.dbpath, anon_perms=[], admin_username="admin", admin_password="password") | ||
89 | self.auth_server_address = auth_server.address | ||
90 | self.admin_client = self.start_client(auth_server.address, username="admin", password="password") | ||
91 | return self.admin_client | ||
92 | |||
93 | def auth_client(self, user): | ||
94 | return self.start_client(self.auth_server_address, user["username"], user["token"]) | ||
59 | 95 | ||
60 | def setUp(self): | 96 | def setUp(self): |
61 | if sys.version_info < (3, 5, 0): | 97 | if sys.version_info < (3, 5, 0): |
@@ -64,24 +100,82 @@ class HashEquivalenceTestSetup(object): | |||
64 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') | 100 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') |
65 | self.addCleanup(self.temp_dir.cleanup) | 101 | self.addCleanup(self.temp_dir.cleanup) |
66 | 102 | ||
67 | (self.client, self.server) = self.start_server() | 103 | self.server_address = self.start_test_server() |
104 | |||
105 | self.client = self.start_client(self.server_address) | ||
68 | 106 | ||
69 | def assertClientGetHash(self, client, taskhash, unihash): | 107 | def assertClientGetHash(self, client, taskhash, unihash): |
70 | result = client.get_unihash(self.METHOD, taskhash) | 108 | result = client.get_unihash(self.METHOD, taskhash) |
71 | self.assertEqual(result, unihash) | 109 | self.assertEqual(result, unihash) |
72 | 110 | ||
111 | def assertUserPerms(self, user, permissions): | ||
112 | with self.auth_client(user) as client: | ||
113 | info = client.get_user() | ||
114 | self.assertEqual(info, { | ||
115 | "username": user["username"], | ||
116 | "permissions": permissions, | ||
117 | }) | ||
73 | 118 | ||
74 | class HashEquivalenceCommonTests(object): | 119 | def assertUserCanAuth(self, user): |
75 | def test_create_hash(self): | 120 | with self.start_client(self.auth_server_address) as client: |
121 | client.auth(user["username"], user["token"]) | ||
122 | |||
123 | def assertUserCannotAuth(self, user): | ||
124 | with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError): | ||
125 | client.auth(user["username"], user["token"]) | ||
126 | |||
127 | def create_test_hash(self, client): | ||
76 | # Simple test that hashes can be created | 128 | # Simple test that hashes can be created |
77 | taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' | 129 | taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' |
78 | outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' | 130 | outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' |
79 | unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' | 131 | unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' |
80 | 132 | ||
81 | self.assertClientGetHash(self.client, taskhash, None) | 133 | self.assertClientGetHash(client, taskhash, None) |
82 | 134 | ||
83 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | 135 | result = client.report_unihash(taskhash, self.METHOD, outhash, unihash) |
84 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | 136 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') |
137 | return taskhash, outhash, unihash | ||
138 | |||
139 | def run_hashclient(self, args, **kwargs): | ||
140 | try: | ||
141 | p = subprocess.run( | ||
142 | [BIN_DIR / "bitbake-hashclient"] + args, | ||
143 | stdout=subprocess.PIPE, | ||
144 | stderr=subprocess.STDOUT, | ||
145 | encoding="utf-8", | ||
146 | **kwargs | ||
147 | ) | ||
148 | except subprocess.CalledProcessError as e: | ||
149 | print(e.output) | ||
150 | raise e | ||
151 | |||
152 | print(p.stdout) | ||
153 | return p | ||
154 | |||
155 | |||
156 | class HashEquivalenceCommonTests(object): | ||
157 | def auth_perms(self, *permissions): | ||
158 | self.client_index += 1 | ||
159 | user = self.create_user(f"user-{self.client_index}", permissions) | ||
160 | return self.auth_client(user) | ||
161 | |||
162 | def create_user(self, username, permissions, *, client=None): | ||
163 | def remove_user(username): | ||
164 | try: | ||
165 | self.admin_client.delete_user(username) | ||
166 | except bb.asyncrpc.InvokeError: | ||
167 | pass | ||
168 | |||
169 | if client is None: | ||
170 | client = self.admin_client | ||
171 | |||
172 | user = client.new_user(username, permissions) | ||
173 | self.addCleanup(remove_user, username) | ||
174 | |||
175 | return user | ||
176 | |||
177 | def test_create_hash(self): | ||
178 | return self.create_test_hash(self.client) | ||
85 | 179 | ||
86 | def test_create_equivalent(self): | 180 | def test_create_equivalent(self): |
87 | # Tests that a second reported task with the same outhash will be | 181 | # Tests that a second reported task with the same outhash will be |
@@ -123,6 +217,57 @@ class HashEquivalenceCommonTests(object): | |||
123 | 217 | ||
124 | self.assertClientGetHash(self.client, taskhash, unihash) | 218 | self.assertClientGetHash(self.client, taskhash, unihash) |
125 | 219 | ||
220 | def test_remove_taskhash(self): | ||
221 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
222 | result = self.client.remove({"taskhash": taskhash}) | ||
223 | self.assertGreater(result["count"], 0) | ||
224 | self.assertClientGetHash(self.client, taskhash, None) | ||
225 | |||
226 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) | ||
227 | self.assertIsNone(result_outhash) | ||
228 | |||
229 | def test_remove_unihash(self): | ||
230 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
231 | result = self.client.remove({"unihash": unihash}) | ||
232 | self.assertGreater(result["count"], 0) | ||
233 | self.assertClientGetHash(self.client, taskhash, None) | ||
234 | |||
235 | def test_remove_outhash(self): | ||
236 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
237 | result = self.client.remove({"outhash": outhash}) | ||
238 | self.assertGreater(result["count"], 0) | ||
239 | |||
240 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) | ||
241 | self.assertIsNone(result_outhash) | ||
242 | |||
243 | def test_remove_method(self): | ||
244 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
245 | result = self.client.remove({"method": self.METHOD}) | ||
246 | self.assertGreater(result["count"], 0) | ||
247 | self.assertClientGetHash(self.client, taskhash, None) | ||
248 | |||
249 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) | ||
250 | self.assertIsNone(result_outhash) | ||
251 | |||
252 | def test_clean_unused(self): | ||
253 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
254 | |||
255 | # Clean the database, which should not remove anything because all hashes an in-use | ||
256 | result = self.client.clean_unused(0) | ||
257 | self.assertEqual(result["count"], 0) | ||
258 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
259 | |||
260 | # Remove the unihash. The row in the outhash table should still be present | ||
261 | self.client.remove({"unihash": unihash}) | ||
262 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False) | ||
263 | self.assertIsNotNone(result_outhash) | ||
264 | |||
265 | # Now clean with no minimum age which will remove the outhash | ||
266 | result = self.client.clean_unused(0) | ||
267 | self.assertEqual(result["count"], 1) | ||
268 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False) | ||
269 | self.assertIsNone(result_outhash) | ||
270 | |||
126 | def test_huge_message(self): | 271 | def test_huge_message(self): |
127 | # Simple test that hashes can be created | 272 | # Simple test that hashes can be created |
128 | taskhash = 'c665584ee6817aa99edfc77a44dd853828279370' | 273 | taskhash = 'c665584ee6817aa99edfc77a44dd853828279370' |
@@ -138,16 +283,21 @@ class HashEquivalenceCommonTests(object): | |||
138 | }) | 283 | }) |
139 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | 284 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') |
140 | 285 | ||
141 | result = self.client.get_taskhash(self.METHOD, taskhash, True) | 286 | result_unihash = self.client.get_taskhash(self.METHOD, taskhash, True) |
142 | self.assertEqual(result['taskhash'], taskhash) | 287 | self.assertEqual(result_unihash['taskhash'], taskhash) |
143 | self.assertEqual(result['unihash'], unihash) | 288 | self.assertEqual(result_unihash['unihash'], unihash) |
144 | self.assertEqual(result['method'], self.METHOD) | 289 | self.assertEqual(result_unihash['method'], self.METHOD) |
145 | self.assertEqual(result['outhash'], outhash) | 290 | |
146 | self.assertEqual(result['outhash_siginfo'], siginfo) | 291 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) |
292 | self.assertEqual(result_outhash['taskhash'], taskhash) | ||
293 | self.assertEqual(result_outhash['method'], self.METHOD) | ||
294 | self.assertEqual(result_outhash['unihash'], unihash) | ||
295 | self.assertEqual(result_outhash['outhash'], outhash) | ||
296 | self.assertEqual(result_outhash['outhash_siginfo'], siginfo) | ||
147 | 297 | ||
148 | def test_stress(self): | 298 | def test_stress(self): |
149 | def query_server(failures): | 299 | def query_server(failures): |
150 | client = Client(self.server.address) | 300 | client = Client(self.server_address) |
151 | try: | 301 | try: |
152 | for i in range(1000): | 302 | for i in range(1000): |
153 | taskhash = hashlib.sha256() | 303 | taskhash = hashlib.sha256() |
@@ -186,8 +336,10 @@ class HashEquivalenceCommonTests(object): | |||
186 | # the side client. It also verifies that the results are pulled into | 336 | # the side client. It also verifies that the results are pulled into |
187 | # the downstream database by checking that the downstream and side servers | 337 | # the downstream database by checking that the downstream and side servers |
188 | # match after the downstream is done waiting for all backfill tasks | 338 | # match after the downstream is done waiting for all backfill tasks |
189 | (down_client, down_server) = self.start_server(upstream=self.server.address) | 339 | down_server = self.start_server(upstream=self.server_address) |
190 | (side_client, side_server) = self.start_server(dbpath=down_server.dbpath) | 340 | down_client = self.start_client(down_server.address) |
341 | side_server = self.start_server(dbpath=down_server.dbpath) | ||
342 | side_client = self.start_client(side_server.address) | ||
191 | 343 | ||
192 | def check_hash(taskhash, unihash, old_sidehash): | 344 | def check_hash(taskhash, unihash, old_sidehash): |
193 | nonlocal down_client | 345 | nonlocal down_client |
@@ -258,15 +410,57 @@ class HashEquivalenceCommonTests(object): | |||
258 | result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6) | 410 | result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6) |
259 | self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream') | 411 | self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream') |
260 | 412 | ||
413 | # Tests read through from server with | ||
414 | taskhash7 = '9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74' | ||
415 | outhash7 = '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69' | ||
416 | unihash7 = '05d2a63c81e32f0a36542ca677e8ad852365c538' | ||
417 | self.client.report_unihash(taskhash7, self.METHOD, outhash7, unihash7) | ||
418 | |||
419 | result = down_client.get_taskhash(self.METHOD, taskhash7, True) | ||
420 | self.assertEqual(result['unihash'], unihash7, 'Server failed to copy unihash from upstream') | ||
421 | self.assertEqual(result['outhash'], outhash7, 'Server failed to copy unihash from upstream') | ||
422 | self.assertEqual(result['taskhash'], taskhash7, 'Server failed to copy unihash from upstream') | ||
423 | self.assertEqual(result['method'], self.METHOD) | ||
424 | |||
425 | taskhash8 = '86978a4c8c71b9b487330b0152aade10c1ee58aa' | ||
426 | outhash8 = 'ca8c128e9d9e4a28ef24d0508aa20b5cf880604eacd8f65c0e366f7e0cc5fbcf' | ||
427 | unihash8 = 'd8bcf25369d40590ad7d08c84d538982f2023e01' | ||
428 | self.client.report_unihash(taskhash8, self.METHOD, outhash8, unihash8) | ||
429 | |||
430 | result = down_client.get_outhash(self.METHOD, outhash8, taskhash8) | ||
431 | self.assertEqual(result['unihash'], unihash8, 'Server failed to copy unihash from upstream') | ||
432 | self.assertEqual(result['outhash'], outhash8, 'Server failed to copy unihash from upstream') | ||
433 | self.assertEqual(result['taskhash'], taskhash8, 'Server failed to copy unihash from upstream') | ||
434 | self.assertEqual(result['method'], self.METHOD) | ||
435 | |||
436 | taskhash9 = 'ae6339531895ddf5b67e663e6a374ad8ec71d81c' | ||
437 | outhash9 = 'afc78172c81880ae10a1fec994b5b4ee33d196a001a1b66212a15ebe573e00b5' | ||
438 | unihash9 = '6662e699d6e3d894b24408ff9a4031ef9b038ee8' | ||
439 | self.client.report_unihash(taskhash9, self.METHOD, outhash9, unihash9) | ||
440 | |||
441 | result = down_client.get_taskhash(self.METHOD, taskhash9, False) | ||
442 | self.assertEqual(result['unihash'], unihash9, 'Server failed to copy unihash from upstream') | ||
443 | self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream') | ||
444 | self.assertEqual(result['method'], self.METHOD) | ||
445 | |||
446 | def test_unihash_exsits(self): | ||
447 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
448 | self.assertTrue(self.client.unihash_exists(unihash)) | ||
449 | self.assertFalse(self.client.unihash_exists('6662e699d6e3d894b24408ff9a4031ef9b038ee8')) | ||
450 | |||
261 | def test_ro_server(self): | 451 | def test_ro_server(self): |
262 | (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True) | 452 | rw_server = self.start_server() |
453 | rw_client = self.start_client(rw_server.address) | ||
454 | |||
455 | ro_server = self.start_server(dbpath=rw_server.dbpath, read_only=True) | ||
456 | ro_client = self.start_client(ro_server.address) | ||
263 | 457 | ||
264 | # Report a hash via the read-write server | 458 | # Report a hash via the read-write server |
265 | taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' | 459 | taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' |
266 | outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' | 460 | outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' |
267 | unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' | 461 | unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' |
268 | 462 | ||
269 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | 463 | result = rw_client.report_unihash(taskhash, self.METHOD, outhash, unihash) |
270 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | 464 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') |
271 | 465 | ||
272 | # Check the hash via the read-only server | 466 | # Check the hash via the read-only server |
@@ -277,11 +471,940 @@ class HashEquivalenceCommonTests(object): | |||
277 | outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44' | 471 | outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44' |
278 | unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824' | 472 | unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824' |
279 | 473 | ||
280 | with self.assertRaises(HashConnectionError): | 474 | result = ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) |
281 | ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | 475 | self.assertEqual(result['unihash'], unihash2) |
282 | 476 | ||
283 | # Ensure that the database was not modified | 477 | # Ensure that the database was not modified |
478 | self.assertClientGetHash(rw_client, taskhash2, None) | ||
479 | |||
480 | |||
481 | def test_slow_server_start(self): | ||
482 | # Ensures that the server will exit correctly even if it gets a SIGTERM | ||
483 | # before entering the main loop | ||
484 | |||
485 | event = multiprocessing.Event() | ||
486 | |||
487 | def prefunc(server, idx): | ||
488 | nonlocal event | ||
489 | server_prefunc(server, idx) | ||
490 | event.wait() | ||
491 | |||
492 | def do_nothing(signum, frame): | ||
493 | pass | ||
494 | |||
495 | old_signal = signal.signal(signal.SIGTERM, do_nothing) | ||
496 | self.addCleanup(signal.signal, signal.SIGTERM, old_signal) | ||
497 | |||
498 | server = self.start_server(prefunc=prefunc) | ||
499 | server.process.terminate() | ||
500 | time.sleep(30) | ||
501 | event.set() | ||
502 | server.process.join(300) | ||
503 | self.assertIsNotNone(server.process.exitcode, "Server did not exit in a timely manner!") | ||
504 | |||
505 | def test_diverging_report_race(self): | ||
506 | # Tests that a reported task will correctly pick up an updated unihash | ||
507 | |||
508 | # This is a baseline report added to the database to ensure that there | ||
509 | # is something to match against as equivalent | ||
510 | outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa' | ||
511 | taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' | ||
512 | unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' | ||
513 | result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1) | ||
514 | |||
515 | # Add a report that is equivalent to Task 1. It should ignore the | ||
516 | # provided unihash and report the unihash from task 1 | ||
517 | taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273' | ||
518 | unihash2 = taskhash2 | ||
519 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2) | ||
520 | self.assertEqual(result['unihash'], unihash1) | ||
521 | |||
522 | # Add another report for Task 2, but with a different outhash (e.g. the | ||
523 | # task is non-deterministic). It should still be marked with the Task 1 | ||
524 | # unihash because it has the Task 2 taskhash, which is equivalent to | ||
525 | # Task 1 | ||
526 | outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c' | ||
527 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2) | ||
528 | self.assertEqual(result['unihash'], unihash1) | ||
529 | |||
530 | |||
531 | def test_diverging_report_reverse_race(self): | ||
532 | # Same idea as the previous test, but Tasks 2 and 3 are reported in | ||
533 | # reverse order the opposite order | ||
534 | |||
535 | outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa' | ||
536 | taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' | ||
537 | unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' | ||
538 | result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1) | ||
539 | |||
540 | taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273' | ||
541 | unihash2 = taskhash2 | ||
542 | |||
543 | # Report Task 3 first. Since there is nothing else in the database it | ||
544 | # will use the client provided unihash | ||
545 | outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c' | ||
546 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2) | ||
547 | self.assertEqual(result['unihash'], unihash2) | ||
548 | |||
549 | # Report Task 2. This is equivalent to Task 1 but there is already a mapping for | ||
550 | # taskhash2 so it will report unihash2 | ||
551 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2) | ||
552 | self.assertEqual(result['unihash'], unihash2) | ||
553 | |||
554 | # The originally reported unihash for Task 3 should be unchanged even if it | ||
555 | # shares a taskhash with Task 2 | ||
556 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
557 | |||
558 | |||
559 | def test_client_pool_get_unihashes(self): | ||
560 | TEST_INPUT = ( | ||
561 | # taskhash outhash unihash | ||
562 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), | ||
563 | # Duplicated taskhash with multiple output hashes and unihashes. | ||
564 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'), | ||
565 | # Equivalent hash | ||
566 | ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"), | ||
567 | ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"), | ||
568 | ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'), | ||
569 | ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'), | ||
570 | ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'), | ||
571 | ) | ||
572 | EXTRA_QUERIES = ( | ||
573 | "6b6be7a84ab179b4240c4302518dc3f6", | ||
574 | ) | ||
575 | |||
576 | with ClientPool(self.server_address, 10) as client_pool: | ||
577 | for taskhash, outhash, unihash in TEST_INPUT: | ||
578 | self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
579 | |||
580 | query = {idx: (self.METHOD, data[0]) for idx, data in enumerate(TEST_INPUT)} | ||
581 | for idx, taskhash in enumerate(EXTRA_QUERIES): | ||
582 | query[idx + len(TEST_INPUT)] = (self.METHOD, taskhash) | ||
583 | |||
584 | result = client_pool.get_unihashes(query) | ||
585 | |||
586 | self.assertDictEqual(result, { | ||
587 | 0: "218e57509998197d570e2c98512d0105985dffc9", | ||
588 | 1: "218e57509998197d570e2c98512d0105985dffc9", | ||
589 | 2: "218e57509998197d570e2c98512d0105985dffc9", | ||
590 | 3: "3b5d3d83f07f259e9086fcb422c855286e18a57d", | ||
591 | 4: "f46d3fbb439bd9b921095da657a4de906510d2cd", | ||
592 | 5: "f46d3fbb439bd9b921095da657a4de906510d2cd", | ||
593 | 6: "05d2a63c81e32f0a36542ca677e8ad852365c538", | ||
594 | 7: None, | ||
595 | }) | ||
596 | |||
597 | def test_client_pool_unihash_exists(self): | ||
598 | TEST_INPUT = ( | ||
599 | # taskhash outhash unihash | ||
600 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), | ||
601 | # Duplicated taskhash with multiple output hashes and unihashes. | ||
602 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'), | ||
603 | # Equivalent hash | ||
604 | ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"), | ||
605 | ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"), | ||
606 | ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'), | ||
607 | ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'), | ||
608 | ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'), | ||
609 | ) | ||
610 | EXTRA_QUERIES = ( | ||
611 | "6b6be7a84ab179b4240c4302518dc3f6", | ||
612 | ) | ||
613 | |||
614 | result_unihashes = set() | ||
615 | |||
616 | |||
617 | with ClientPool(self.server_address, 10) as client_pool: | ||
618 | for taskhash, outhash, unihash in TEST_INPUT: | ||
619 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
620 | result_unihashes.add(result["unihash"]) | ||
621 | |||
622 | query = {} | ||
623 | expected = {} | ||
624 | |||
625 | for _, _, unihash in TEST_INPUT: | ||
626 | idx = len(query) | ||
627 | query[idx] = unihash | ||
628 | expected[idx] = unihash in result_unihashes | ||
629 | |||
630 | |||
631 | for unihash in EXTRA_QUERIES: | ||
632 | idx = len(query) | ||
633 | query[idx] = unihash | ||
634 | expected[idx] = False | ||
635 | |||
636 | result = client_pool.unihashes_exist(query) | ||
637 | self.assertDictEqual(result, expected) | ||
638 | |||
639 | |||
640 | def test_auth_read_perms(self): | ||
641 | admin_client = self.start_auth_server() | ||
642 | |||
643 | # Create hashes with non-authenticated server | ||
644 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
645 | |||
646 | # Validate hash can be retrieved using authenticated client | ||
647 | with self.auth_perms("@read") as client: | ||
648 | self.assertClientGetHash(client, taskhash, unihash) | ||
649 | |||
650 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
651 | self.assertClientGetHash(client, taskhash, unihash) | ||
652 | |||
653 | def test_auth_report_perms(self): | ||
654 | admin_client = self.start_auth_server() | ||
655 | |||
656 | # Without read permission, the user is completely denied | ||
657 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
658 | self.create_test_hash(client) | ||
659 | |||
660 | # Read permission allows the call to succeed, but it doesn't record | ||
661 | # anythin in the database | ||
662 | with self.auth_perms("@read") as client: | ||
663 | taskhash, outhash, unihash = self.create_test_hash(client) | ||
664 | self.assertClientGetHash(client, taskhash, None) | ||
665 | |||
666 | # Report permission alone is insufficient | ||
667 | with self.auth_perms("@report") as client, self.assertRaises(InvokeError): | ||
668 | self.create_test_hash(client) | ||
669 | |||
670 | # Read and report permission actually modify the database | ||
671 | with self.auth_perms("@read", "@report") as client: | ||
672 | taskhash, outhash, unihash = self.create_test_hash(client) | ||
673 | self.assertClientGetHash(client, taskhash, unihash) | ||
674 | |||
675 | def test_auth_no_token_refresh_from_anon_user(self): | ||
676 | self.start_auth_server() | ||
677 | |||
678 | with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError): | ||
679 | client.refresh_token() | ||
680 | |||
681 | def test_auth_self_token_refresh(self): | ||
682 | admin_client = self.start_auth_server() | ||
683 | |||
684 | # Create a new user with no permissions | ||
685 | user = self.create_user("test-user", []) | ||
686 | |||
687 | with self.auth_client(user) as client: | ||
688 | new_user = client.refresh_token() | ||
689 | |||
690 | self.assertEqual(user["username"], new_user["username"]) | ||
691 | self.assertNotEqual(user["token"], new_user["token"]) | ||
692 | self.assertUserCanAuth(new_user) | ||
693 | self.assertUserCannotAuth(user) | ||
694 | |||
695 | # Explicitly specifying with your own username is fine also | ||
696 | with self.auth_client(new_user) as client: | ||
697 | new_user2 = client.refresh_token(user["username"]) | ||
698 | |||
699 | self.assertEqual(user["username"], new_user2["username"]) | ||
700 | self.assertNotEqual(user["token"], new_user2["token"]) | ||
701 | self.assertUserCanAuth(new_user2) | ||
702 | self.assertUserCannotAuth(new_user) | ||
703 | self.assertUserCannotAuth(user) | ||
704 | |||
705 | def test_auth_token_refresh(self): | ||
706 | admin_client = self.start_auth_server() | ||
707 | |||
708 | user = self.create_user("test-user", []) | ||
709 | |||
710 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
711 | client.refresh_token(user["username"]) | ||
712 | |||
713 | with self.auth_perms("@user-admin") as client: | ||
714 | new_user = client.refresh_token(user["username"]) | ||
715 | |||
716 | self.assertEqual(user["username"], new_user["username"]) | ||
717 | self.assertNotEqual(user["token"], new_user["token"]) | ||
718 | self.assertUserCanAuth(new_user) | ||
719 | self.assertUserCannotAuth(user) | ||
720 | |||
721 | def test_auth_self_get_user(self): | ||
722 | admin_client = self.start_auth_server() | ||
723 | |||
724 | user = self.create_user("test-user", []) | ||
725 | user_info = user.copy() | ||
726 | del user_info["token"] | ||
727 | |||
728 | with self.auth_client(user) as client: | ||
729 | info = client.get_user() | ||
730 | self.assertEqual(info, user_info) | ||
731 | |||
732 | # Explicitly asking for your own username is fine also | ||
733 | info = client.get_user(user["username"]) | ||
734 | self.assertEqual(info, user_info) | ||
735 | |||
736 | def test_auth_get_user(self): | ||
737 | admin_client = self.start_auth_server() | ||
738 | |||
739 | user = self.create_user("test-user", []) | ||
740 | user_info = user.copy() | ||
741 | del user_info["token"] | ||
742 | |||
743 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
744 | client.get_user(user["username"]) | ||
745 | |||
746 | with self.auth_perms("@user-admin") as client: | ||
747 | info = client.get_user(user["username"]) | ||
748 | self.assertEqual(info, user_info) | ||
749 | |||
750 | info = client.get_user("nonexist-user") | ||
751 | self.assertIsNone(info) | ||
752 | |||
753 | def test_auth_reconnect(self): | ||
754 | admin_client = self.start_auth_server() | ||
755 | |||
756 | user = self.create_user("test-user", []) | ||
757 | user_info = user.copy() | ||
758 | del user_info["token"] | ||
759 | |||
760 | with self.auth_client(user) as client: | ||
761 | info = client.get_user() | ||
762 | self.assertEqual(info, user_info) | ||
763 | |||
764 | client.disconnect() | ||
765 | |||
766 | info = client.get_user() | ||
767 | self.assertEqual(info, user_info) | ||
768 | |||
769 | def test_auth_delete_user(self): | ||
770 | admin_client = self.start_auth_server() | ||
771 | |||
772 | user = self.create_user("test-user", []) | ||
773 | |||
774 | # self service | ||
775 | with self.auth_client(user) as client: | ||
776 | client.delete_user(user["username"]) | ||
777 | |||
778 | self.assertIsNone(admin_client.get_user(user["username"])) | ||
779 | user = self.create_user("test-user", []) | ||
780 | |||
781 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
782 | client.delete_user(user["username"]) | ||
783 | |||
784 | with self.auth_perms("@user-admin") as client: | ||
785 | client.delete_user(user["username"]) | ||
786 | |||
787 | # User doesn't exist, so even though the permission is correct, it's an | ||
788 | # error | ||
789 | with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError): | ||
790 | client.delete_user(user["username"]) | ||
791 | |||
792 | def test_auth_set_user_perms(self): | ||
793 | admin_client = self.start_auth_server() | ||
794 | |||
795 | user = self.create_user("test-user", []) | ||
796 | |||
797 | self.assertUserPerms(user, []) | ||
798 | |||
799 | # No self service to change permissions | ||
800 | with self.auth_client(user) as client, self.assertRaises(InvokeError): | ||
801 | client.set_user_perms(user["username"], ["@all"]) | ||
802 | self.assertUserPerms(user, []) | ||
803 | |||
804 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
805 | client.set_user_perms(user["username"], ["@all"]) | ||
806 | self.assertUserPerms(user, []) | ||
807 | |||
808 | with self.auth_perms("@user-admin") as client: | ||
809 | client.set_user_perms(user["username"], ["@all"]) | ||
810 | self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS))) | ||
811 | |||
812 | # Bad permissions | ||
813 | with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError): | ||
814 | client.set_user_perms(user["username"], ["@this-is-not-a-permission"]) | ||
815 | self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS))) | ||
816 | |||
817 | def test_auth_get_all_users(self): | ||
818 | admin_client = self.start_auth_server() | ||
819 | |||
820 | user = self.create_user("test-user", []) | ||
821 | |||
822 | with self.auth_client(user) as client, self.assertRaises(InvokeError): | ||
823 | client.get_all_users() | ||
824 | |||
825 | # Give the test user the correct permission | ||
826 | admin_client.set_user_perms(user["username"], ["@user-admin"]) | ||
827 | |||
828 | with self.auth_client(user) as client: | ||
829 | all_users = client.get_all_users() | ||
830 | |||
831 | # Convert to a dictionary for easier comparison | ||
832 | all_users = {u["username"]: u for u in all_users} | ||
833 | |||
834 | self.assertEqual(all_users, | ||
835 | { | ||
836 | "admin": { | ||
837 | "username": "admin", | ||
838 | "permissions": sorted(list(ALL_PERMISSIONS)), | ||
839 | }, | ||
840 | "test-user": { | ||
841 | "username": "test-user", | ||
842 | "permissions": ["@user-admin"], | ||
843 | } | ||
844 | } | ||
845 | ) | ||
846 | |||
847 | def test_auth_new_user(self): | ||
848 | self.start_auth_server() | ||
849 | |||
850 | permissions = ["@read", "@report", "@db-admin", "@user-admin"] | ||
851 | permissions.sort() | ||
852 | |||
853 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
854 | self.create_user("test-user", permissions, client=client) | ||
855 | |||
856 | with self.auth_perms("@user-admin") as client: | ||
857 | user = self.create_user("test-user", permissions, client=client) | ||
858 | self.assertIn("token", user) | ||
859 | self.assertEqual(user["username"], "test-user") | ||
860 | self.assertEqual(user["permissions"], permissions) | ||
861 | |||
862 | def test_auth_become_user(self): | ||
863 | admin_client = self.start_auth_server() | ||
864 | |||
865 | user = self.create_user("test-user", ["@read", "@report"]) | ||
866 | user_info = user.copy() | ||
867 | del user_info["token"] | ||
868 | |||
869 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
870 | client.become_user(user["username"]) | ||
871 | |||
872 | with self.auth_perms("@user-admin") as client: | ||
873 | become = client.become_user(user["username"]) | ||
874 | self.assertEqual(become, user_info) | ||
875 | |||
876 | info = client.get_user() | ||
877 | self.assertEqual(info, user_info) | ||
878 | |||
879 | # Verify become user is preserved across disconnect | ||
880 | client.disconnect() | ||
881 | |||
882 | info = client.get_user() | ||
883 | self.assertEqual(info, user_info) | ||
884 | |||
885 | # test-user doesn't have become_user permissions, so this should | ||
886 | # not work | ||
887 | with self.assertRaises(InvokeError): | ||
888 | client.become_user(user["username"]) | ||
889 | |||
890 | # No self-service of become | ||
891 | with self.auth_client(user) as client, self.assertRaises(InvokeError): | ||
892 | client.become_user(user["username"]) | ||
893 | |||
894 | # Give test user permissions to become | ||
895 | admin_client.set_user_perms(user["username"], ["@user-admin"]) | ||
896 | |||
897 | # It's possible to become yourself (effectively a noop) | ||
898 | with self.auth_perms("@user-admin") as client: | ||
899 | become = client.become_user(client.username) | ||
900 | |||
901 | def test_auth_gc(self): | ||
902 | admin_client = self.start_auth_server() | ||
903 | |||
904 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
905 | client.gc_mark("ABC", {"unihash": "123"}) | ||
906 | |||
907 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
908 | client.gc_status() | ||
909 | |||
910 | with self.auth_perms() as client, self.assertRaises(InvokeError): | ||
911 | client.gc_sweep("ABC") | ||
912 | |||
913 | with self.auth_perms("@db-admin") as client: | ||
914 | client.gc_mark("ABC", {"unihash": "123"}) | ||
915 | |||
916 | with self.auth_perms("@db-admin") as client: | ||
917 | client.gc_status() | ||
918 | |||
919 | with self.auth_perms("@db-admin") as client: | ||
920 | client.gc_sweep("ABC") | ||
921 | |||
922 | def test_get_db_usage(self): | ||
923 | usage = self.client.get_db_usage() | ||
924 | |||
925 | self.assertTrue(isinstance(usage, dict)) | ||
926 | for name in usage.keys(): | ||
927 | self.assertTrue(isinstance(usage[name], dict)) | ||
928 | self.assertIn("rows", usage[name]) | ||
929 | self.assertTrue(isinstance(usage[name]["rows"], int)) | ||
930 | |||
931 | def test_get_db_query_columns(self): | ||
932 | columns = self.client.get_db_query_columns() | ||
933 | |||
934 | self.assertTrue(isinstance(columns, list)) | ||
935 | self.assertTrue(len(columns) > 0) | ||
936 | |||
937 | for col in columns: | ||
938 | self.client.remove({col: ""}) | ||
939 | |||
940 | def test_auth_is_owner(self): | ||
941 | admin_client = self.start_auth_server() | ||
942 | |||
943 | user = self.create_user("test-user", ["@read", "@report"]) | ||
944 | with self.auth_client(user) as client: | ||
945 | taskhash, outhash, unihash = self.create_test_hash(client) | ||
946 | data = client.get_taskhash(self.METHOD, taskhash, True) | ||
947 | self.assertEqual(data["owner"], user["username"]) | ||
948 | |||
949 | def test_gc(self): | ||
950 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | ||
951 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | ||
952 | unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646' | ||
953 | |||
954 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
955 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | ||
956 | |||
957 | taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4' | ||
958 | outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4' | ||
959 | unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b' | ||
960 | |||
961 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | ||
962 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
963 | |||
964 | # Mark the first unihash to be kept | ||
965 | ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD}) | ||
966 | self.assertEqual(ret, {"count": 1}) | ||
967 | |||
968 | ret = self.client.gc_status() | ||
969 | self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1}) | ||
970 | |||
971 | # Second hash is still there; mark doesn't delete hashes | ||
972 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
973 | |||
974 | ret = self.client.gc_sweep("ABC") | ||
975 | self.assertEqual(ret, {"count": 1}) | ||
976 | |||
977 | # Hash is gone. Taskhash is returned for second hash | ||
284 | self.assertClientGetHash(self.client, taskhash2, None) | 978 | self.assertClientGetHash(self.client, taskhash2, None) |
979 | # First hash is still present | ||
980 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
981 | |||
982 | def test_gc_switch_mark(self): | ||
983 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | ||
984 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | ||
985 | unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646' | ||
986 | |||
987 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
988 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | ||
989 | |||
990 | taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4' | ||
991 | outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4' | ||
992 | unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b' | ||
993 | |||
994 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | ||
995 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
996 | |||
997 | # Mark the first unihash to be kept | ||
998 | ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD}) | ||
999 | self.assertEqual(ret, {"count": 1}) | ||
1000 | |||
1001 | ret = self.client.gc_status() | ||
1002 | self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1}) | ||
1003 | |||
1004 | # Second hash is still there; mark doesn't delete hashes | ||
1005 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1006 | |||
1007 | # Switch to a different mark and mark the second hash. This will start | ||
1008 | # a new collection cycle | ||
1009 | ret = self.client.gc_mark("DEF", {"unihash": unihash2, "method": self.METHOD}) | ||
1010 | self.assertEqual(ret, {"count": 1}) | ||
1011 | |||
1012 | ret = self.client.gc_status() | ||
1013 | self.assertEqual(ret, {"mark": "DEF", "keep": 1, "remove": 1}) | ||
1014 | |||
1015 | # Both hashes are still present | ||
1016 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1017 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
1018 | |||
1019 | # Sweep with the new mark | ||
1020 | ret = self.client.gc_sweep("DEF") | ||
1021 | self.assertEqual(ret, {"count": 1}) | ||
1022 | |||
1023 | # First hash is gone, second is kept | ||
1024 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1025 | self.assertClientGetHash(self.client, taskhash, None) | ||
1026 | |||
1027 | def test_gc_switch_sweep_mark(self): | ||
1028 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | ||
1029 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | ||
1030 | unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646' | ||
1031 | |||
1032 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
1033 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | ||
1034 | |||
1035 | taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4' | ||
1036 | outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4' | ||
1037 | unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b' | ||
1038 | |||
1039 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | ||
1040 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1041 | |||
1042 | # Mark the first unihash to be kept | ||
1043 | ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD}) | ||
1044 | self.assertEqual(ret, {"count": 1}) | ||
1045 | |||
1046 | ret = self.client.gc_status() | ||
1047 | self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1}) | ||
1048 | |||
1049 | # Sweeping with a different mark raises an error | ||
1050 | with self.assertRaises(InvokeError): | ||
1051 | self.client.gc_sweep("DEF") | ||
1052 | |||
1053 | # Both hashes are present | ||
1054 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1055 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
1056 | |||
1057 | def test_gc_new_hashes(self): | ||
1058 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | ||
1059 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | ||
1060 | unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646' | ||
1061 | |||
1062 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
1063 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | ||
1064 | |||
1065 | # Start a new garbage collection | ||
1066 | ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD}) | ||
1067 | self.assertEqual(ret, {"count": 1}) | ||
1068 | |||
1069 | ret = self.client.gc_status() | ||
1070 | self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 0}) | ||
1071 | |||
1072 | # Add second hash. It should inherit the mark from the current garbage | ||
1073 | # collection operation | ||
1074 | |||
1075 | taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4' | ||
1076 | outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4' | ||
1077 | unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b' | ||
1078 | |||
1079 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | ||
1080 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1081 | |||
1082 | # Sweep should remove nothing | ||
1083 | ret = self.client.gc_sweep("ABC") | ||
1084 | self.assertEqual(ret, {"count": 0}) | ||
1085 | |||
1086 | # Both hashes are present | ||
1087 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1088 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
1089 | |||
1090 | |||
1091 | class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase): | ||
1092 | def get_server_addr(self, server_idx): | ||
1093 | return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx) | ||
1094 | |||
1095 | def test_get(self): | ||
1096 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1097 | |||
1098 | p = self.run_hashclient(["--address", self.server_address, "get", self.METHOD, taskhash]) | ||
1099 | data = json.loads(p.stdout) | ||
1100 | self.assertEqual(data["unihash"], unihash) | ||
1101 | self.assertEqual(data["outhash"], outhash) | ||
1102 | self.assertEqual(data["taskhash"], taskhash) | ||
1103 | self.assertEqual(data["method"], self.METHOD) | ||
1104 | |||
1105 | def test_get_outhash(self): | ||
1106 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1107 | |||
1108 | p = self.run_hashclient(["--address", self.server_address, "get-outhash", self.METHOD, outhash, taskhash]) | ||
1109 | data = json.loads(p.stdout) | ||
1110 | self.assertEqual(data["unihash"], unihash) | ||
1111 | self.assertEqual(data["outhash"], outhash) | ||
1112 | self.assertEqual(data["taskhash"], taskhash) | ||
1113 | self.assertEqual(data["method"], self.METHOD) | ||
1114 | |||
1115 | def test_stats(self): | ||
1116 | p = self.run_hashclient(["--address", self.server_address, "stats"], check=True) | ||
1117 | json.loads(p.stdout) | ||
1118 | |||
1119 | def test_stress(self): | ||
1120 | self.run_hashclient(["--address", self.server_address, "stress"], check=True) | ||
1121 | |||
1122 | def test_unihash_exsits(self): | ||
1123 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1124 | |||
1125 | p = self.run_hashclient([ | ||
1126 | "--address", self.server_address, | ||
1127 | "unihash-exists", unihash, | ||
1128 | ], check=True) | ||
1129 | self.assertEqual(p.stdout.strip(), "true") | ||
1130 | |||
1131 | p = self.run_hashclient([ | ||
1132 | "--address", self.server_address, | ||
1133 | "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8', | ||
1134 | ], check=True) | ||
1135 | self.assertEqual(p.stdout.strip(), "false") | ||
1136 | |||
1137 | def test_unihash_exsits_quiet(self): | ||
1138 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1139 | |||
1140 | p = self.run_hashclient([ | ||
1141 | "--address", self.server_address, | ||
1142 | "unihash-exists", unihash, | ||
1143 | "--quiet", | ||
1144 | ]) | ||
1145 | self.assertEqual(p.returncode, 0) | ||
1146 | self.assertEqual(p.stdout.strip(), "") | ||
1147 | |||
1148 | p = self.run_hashclient([ | ||
1149 | "--address", self.server_address, | ||
1150 | "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8', | ||
1151 | "--quiet", | ||
1152 | ]) | ||
1153 | self.assertEqual(p.returncode, 1) | ||
1154 | self.assertEqual(p.stdout.strip(), "") | ||
1155 | |||
1156 | def test_remove_taskhash(self): | ||
1157 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1158 | self.run_hashclient([ | ||
1159 | "--address", self.server_address, | ||
1160 | "remove", | ||
1161 | "--where", "taskhash", taskhash, | ||
1162 | ], check=True) | ||
1163 | self.assertClientGetHash(self.client, taskhash, None) | ||
1164 | |||
1165 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) | ||
1166 | self.assertIsNone(result_outhash) | ||
1167 | |||
1168 | def test_remove_unihash(self): | ||
1169 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1170 | self.run_hashclient([ | ||
1171 | "--address", self.server_address, | ||
1172 | "remove", | ||
1173 | "--where", "unihash", unihash, | ||
1174 | ], check=True) | ||
1175 | self.assertClientGetHash(self.client, taskhash, None) | ||
1176 | |||
1177 | def test_remove_outhash(self): | ||
1178 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1179 | self.run_hashclient([ | ||
1180 | "--address", self.server_address, | ||
1181 | "remove", | ||
1182 | "--where", "outhash", outhash, | ||
1183 | ], check=True) | ||
1184 | |||
1185 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) | ||
1186 | self.assertIsNone(result_outhash) | ||
1187 | |||
1188 | def test_remove_method(self): | ||
1189 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1190 | self.run_hashclient([ | ||
1191 | "--address", self.server_address, | ||
1192 | "remove", | ||
1193 | "--where", "method", self.METHOD, | ||
1194 | ], check=True) | ||
1195 | self.assertClientGetHash(self.client, taskhash, None) | ||
1196 | |||
1197 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) | ||
1198 | self.assertIsNone(result_outhash) | ||
1199 | |||
1200 | def test_clean_unused(self): | ||
1201 | taskhash, outhash, unihash = self.create_test_hash(self.client) | ||
1202 | |||
1203 | # Clean the database, which should not remove anything because all hashes an in-use | ||
1204 | self.run_hashclient([ | ||
1205 | "--address", self.server_address, | ||
1206 | "clean-unused", "0", | ||
1207 | ], check=True) | ||
1208 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
1209 | |||
1210 | # Remove the unihash. The row in the outhash table should still be present | ||
1211 | self.run_hashclient([ | ||
1212 | "--address", self.server_address, | ||
1213 | "remove", | ||
1214 | "--where", "unihash", unihash, | ||
1215 | ], check=True) | ||
1216 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False) | ||
1217 | self.assertIsNotNone(result_outhash) | ||
1218 | |||
1219 | # Now clean with no minimum age which will remove the outhash | ||
1220 | self.run_hashclient([ | ||
1221 | "--address", self.server_address, | ||
1222 | "clean-unused", "0", | ||
1223 | ], check=True) | ||
1224 | result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False) | ||
1225 | self.assertIsNone(result_outhash) | ||
1226 | |||
1227 | def test_refresh_token(self): | ||
1228 | admin_client = self.start_auth_server() | ||
1229 | |||
1230 | user = admin_client.new_user("test-user", ["@read", "@report"]) | ||
1231 | |||
1232 | p = self.run_hashclient([ | ||
1233 | "--address", self.auth_server_address, | ||
1234 | "--login", user["username"], | ||
1235 | "--password", user["token"], | ||
1236 | "refresh-token" | ||
1237 | ], check=True) | ||
1238 | |||
1239 | new_token = None | ||
1240 | for l in p.stdout.splitlines(): | ||
1241 | l = l.rstrip() | ||
1242 | m = re.match(r'Token: +(.*)$', l) | ||
1243 | if m is not None: | ||
1244 | new_token = m.group(1) | ||
1245 | |||
1246 | self.assertTrue(new_token) | ||
1247 | |||
1248 | print("New token is %r" % new_token) | ||
1249 | |||
1250 | self.run_hashclient([ | ||
1251 | "--address", self.auth_server_address, | ||
1252 | "--login", user["username"], | ||
1253 | "--password", new_token, | ||
1254 | "get-user" | ||
1255 | ], check=True) | ||
1256 | |||
1257 | def test_set_user_perms(self): | ||
1258 | admin_client = self.start_auth_server() | ||
1259 | |||
1260 | user = admin_client.new_user("test-user", ["@read"]) | ||
1261 | |||
1262 | self.run_hashclient([ | ||
1263 | "--address", self.auth_server_address, | ||
1264 | "--login", admin_client.username, | ||
1265 | "--password", admin_client.password, | ||
1266 | "set-user-perms", | ||
1267 | "-u", user["username"], | ||
1268 | "@read", "@report", | ||
1269 | ], check=True) | ||
1270 | |||
1271 | new_user = admin_client.get_user(user["username"]) | ||
1272 | |||
1273 | self.assertEqual(set(new_user["permissions"]), {"@read", "@report"}) | ||
1274 | |||
1275 | def test_get_user(self): | ||
1276 | admin_client = self.start_auth_server() | ||
1277 | |||
1278 | user = admin_client.new_user("test-user", ["@read"]) | ||
1279 | |||
1280 | p = self.run_hashclient([ | ||
1281 | "--address", self.auth_server_address, | ||
1282 | "--login", admin_client.username, | ||
1283 | "--password", admin_client.password, | ||
1284 | "get-user", | ||
1285 | "-u", user["username"], | ||
1286 | ], check=True) | ||
1287 | |||
1288 | self.assertIn("Username:", p.stdout) | ||
1289 | self.assertIn("Permissions:", p.stdout) | ||
1290 | |||
1291 | p = self.run_hashclient([ | ||
1292 | "--address", self.auth_server_address, | ||
1293 | "--login", user["username"], | ||
1294 | "--password", user["token"], | ||
1295 | "get-user", | ||
1296 | ], check=True) | ||
1297 | |||
1298 | self.assertIn("Username:", p.stdout) | ||
1299 | self.assertIn("Permissions:", p.stdout) | ||
1300 | |||
1301 | def test_get_all_users(self): | ||
1302 | admin_client = self.start_auth_server() | ||
1303 | |||
1304 | admin_client.new_user("test-user1", ["@read"]) | ||
1305 | admin_client.new_user("test-user2", ["@read"]) | ||
1306 | |||
1307 | p = self.run_hashclient([ | ||
1308 | "--address", self.auth_server_address, | ||
1309 | "--login", admin_client.username, | ||
1310 | "--password", admin_client.password, | ||
1311 | "get-all-users", | ||
1312 | ], check=True) | ||
1313 | |||
1314 | self.assertIn("admin", p.stdout) | ||
1315 | self.assertIn("test-user1", p.stdout) | ||
1316 | self.assertIn("test-user2", p.stdout) | ||
1317 | |||
1318 | def test_new_user(self): | ||
1319 | admin_client = self.start_auth_server() | ||
1320 | |||
1321 | p = self.run_hashclient([ | ||
1322 | "--address", self.auth_server_address, | ||
1323 | "--login", admin_client.username, | ||
1324 | "--password", admin_client.password, | ||
1325 | "new-user", | ||
1326 | "-u", "test-user", | ||
1327 | "@read", "@report", | ||
1328 | ], check=True) | ||
1329 | |||
1330 | new_token = None | ||
1331 | for l in p.stdout.splitlines(): | ||
1332 | l = l.rstrip() | ||
1333 | m = re.match(r'Token: +(.*)$', l) | ||
1334 | if m is not None: | ||
1335 | new_token = m.group(1) | ||
1336 | |||
1337 | self.assertTrue(new_token) | ||
1338 | |||
1339 | user = { | ||
1340 | "username": "test-user", | ||
1341 | "token": new_token, | ||
1342 | } | ||
1343 | |||
1344 | self.assertUserPerms(user, ["@read", "@report"]) | ||
1345 | |||
1346 | def test_delete_user(self): | ||
1347 | admin_client = self.start_auth_server() | ||
1348 | |||
1349 | user = admin_client.new_user("test-user", ["@read"]) | ||
1350 | |||
1351 | p = self.run_hashclient([ | ||
1352 | "--address", self.auth_server_address, | ||
1353 | "--login", admin_client.username, | ||
1354 | "--password", admin_client.password, | ||
1355 | "delete-user", | ||
1356 | "-u", user["username"], | ||
1357 | ], check=True) | ||
1358 | |||
1359 | self.assertIsNone(admin_client.get_user(user["username"])) | ||
1360 | |||
1361 | def test_get_db_usage(self): | ||
1362 | p = self.run_hashclient([ | ||
1363 | "--address", self.server_address, | ||
1364 | "get-db-usage", | ||
1365 | ], check=True) | ||
1366 | |||
1367 | def test_get_db_query_columns(self): | ||
1368 | p = self.run_hashclient([ | ||
1369 | "--address", self.server_address, | ||
1370 | "get-db-query-columns", | ||
1371 | ], check=True) | ||
1372 | |||
1373 | def test_gc(self): | ||
1374 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | ||
1375 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | ||
1376 | unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646' | ||
1377 | |||
1378 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
1379 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | ||
1380 | |||
1381 | taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4' | ||
1382 | outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4' | ||
1383 | unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b' | ||
1384 | |||
1385 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | ||
1386 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1387 | |||
1388 | # Mark the first unihash to be kept | ||
1389 | self.run_hashclient([ | ||
1390 | "--address", self.server_address, | ||
1391 | "gc-mark", "ABC", | ||
1392 | "--where", "unihash", unihash, | ||
1393 | "--where", "method", self.METHOD | ||
1394 | ], check=True) | ||
1395 | |||
1396 | # Second hash is still there; mark doesn't delete hashes | ||
1397 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1398 | |||
1399 | self.run_hashclient([ | ||
1400 | "--address", self.server_address, | ||
1401 | "gc-sweep", "ABC", | ||
1402 | ], check=True) | ||
1403 | |||
1404 | # Hash is gone. Taskhash is returned for second hash | ||
1405 | self.assertClientGetHash(self.client, taskhash2, None) | ||
1406 | # First hash is still present | ||
1407 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
285 | 1408 | ||
286 | 1409 | ||
287 | class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): | 1410 | class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): |
@@ -314,3 +1437,77 @@ class TestHashEquivalenceTCPServer(HashEquivalenceTestSetup, HashEquivalenceComm | |||
314 | # If IPv6 is enabled, it should be safe to use localhost directly, in general | 1437 | # If IPv6 is enabled, it should be safe to use localhost directly, in general |
315 | # case it is more reliable to resolve the IP address explicitly. | 1438 | # case it is more reliable to resolve the IP address explicitly. |
316 | return socket.gethostbyname("localhost") + ":0" | 1439 | return socket.gethostbyname("localhost") + ":0" |
1440 | |||
1441 | |||
1442 | class TestHashEquivalenceWebsocketServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): | ||
1443 | def setUp(self): | ||
1444 | try: | ||
1445 | import websockets | ||
1446 | except ImportError as e: | ||
1447 | self.skipTest(str(e)) | ||
1448 | |||
1449 | super().setUp() | ||
1450 | |||
1451 | def get_server_addr(self, server_idx): | ||
1452 | # Some hosts cause asyncio module to misbehave, when IPv6 is not enabled. | ||
1453 | # If IPv6 is enabled, it should be safe to use localhost directly, in general | ||
1454 | # case it is more reliable to resolve the IP address explicitly. | ||
1455 | host = socket.gethostbyname("localhost") | ||
1456 | return "ws://%s:0" % host | ||
1457 | |||
1458 | |||
1459 | class TestHashEquivalenceWebsocketsSQLAlchemyServer(TestHashEquivalenceWebsocketServer): | ||
1460 | def setUp(self): | ||
1461 | try: | ||
1462 | import sqlalchemy | ||
1463 | import aiosqlite | ||
1464 | except ImportError as e: | ||
1465 | self.skipTest(str(e)) | ||
1466 | |||
1467 | super().setUp() | ||
1468 | |||
1469 | def make_dbpath(self): | ||
1470 | return "sqlite+aiosqlite:///%s" % os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index) | ||
1471 | |||
1472 | |||
1473 | class TestHashEquivalenceExternalServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): | ||
1474 | def get_env(self, name): | ||
1475 | v = os.environ.get(name) | ||
1476 | if not v: | ||
1477 | self.skipTest(f'{name} not defined to test an external server') | ||
1478 | return v | ||
1479 | |||
1480 | def start_test_server(self): | ||
1481 | return self.get_env('BB_TEST_HASHSERV') | ||
1482 | |||
1483 | def start_server(self, *args, **kwargs): | ||
1484 | self.skipTest('Cannot start local server when testing external servers') | ||
1485 | |||
1486 | def start_auth_server(self): | ||
1487 | |||
1488 | self.auth_server_address = self.server_address | ||
1489 | self.admin_client = self.start_client( | ||
1490 | self.server_address, | ||
1491 | username=self.get_env('BB_TEST_HASHSERV_USERNAME'), | ||
1492 | password=self.get_env('BB_TEST_HASHSERV_PASSWORD'), | ||
1493 | ) | ||
1494 | return self.admin_client | ||
1495 | |||
1496 | def setUp(self): | ||
1497 | super().setUp() | ||
1498 | if "BB_TEST_HASHSERV_USERNAME" in os.environ: | ||
1499 | self.client = self.start_client( | ||
1500 | self.server_address, | ||
1501 | username=os.environ["BB_TEST_HASHSERV_USERNAME"], | ||
1502 | password=os.environ["BB_TEST_HASHSERV_PASSWORD"], | ||
1503 | ) | ||
1504 | self.client.remove({"method": self.METHOD}) | ||
1505 | |||
1506 | def tearDown(self): | ||
1507 | self.client.remove({"method": self.METHOD}) | ||
1508 | super().tearDown() | ||
1509 | |||
1510 | |||
1511 | def test_auth_get_all_users(self): | ||
1512 | self.skipTest("Cannot test all users with external server") | ||
1513 | |||
diff --git a/bitbake/lib/layerindexlib/__init__.py b/bitbake/lib/layerindexlib/__init__.py index 9ca127b9df..c3265ddaa1 100644 --- a/bitbake/lib/layerindexlib/__init__.py +++ b/bitbake/lib/layerindexlib/__init__.py | |||
@@ -6,7 +6,6 @@ | |||
6 | import datetime | 6 | import datetime |
7 | 7 | ||
8 | import logging | 8 | import logging |
9 | import imp | ||
10 | import os | 9 | import os |
11 | 10 | ||
12 | from collections import OrderedDict | 11 | from collections import OrderedDict |
@@ -179,9 +178,9 @@ class LayerIndex(): | |||
179 | '''Load the layerindex. | 178 | '''Load the layerindex. |
180 | 179 | ||
181 | indexURI - An index to load. (Use multiple calls to load multiple indexes) | 180 | indexURI - An index to load. (Use multiple calls to load multiple indexes) |
182 | 181 | ||
183 | reload - If reload is True, then any previously loaded indexes will be forgotten. | 182 | reload - If reload is True, then any previously loaded indexes will be forgotten. |
184 | 183 | ||
185 | load - List of elements to load. Default loads all items. | 184 | load - List of elements to load. Default loads all items. |
186 | Note: plugs may ignore this. | 185 | Note: plugs may ignore this. |
187 | 186 | ||
@@ -199,7 +198,7 @@ The format of the indexURI: | |||
199 | 198 | ||
200 | For example: | 199 | For example: |
201 | 200 | ||
202 | http://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index | 201 | https://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index |
203 | cooker:// | 202 | cooker:// |
204 | ''' | 203 | ''' |
205 | if reload: | 204 | if reload: |
@@ -384,7 +383,14 @@ layerBranches set. If not, they are effectively blank.''' | |||
384 | 383 | ||
385 | # Get a list of dependencies and then recursively process them | 384 | # Get a list of dependencies and then recursively process them |
386 | for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]: | 385 | for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]: |
387 | deplayerbranch = layerdependency.dependency_layerBranch | 386 | try: |
387 | deplayerbranch = layerdependency.dependency_layerBranch | ||
388 | except AttributeError as e: | ||
389 | logger.error('LayerBranch does not exist for dependent layer {}:{}\n' \ | ||
390 | ' Cannot continue successfully.\n' \ | ||
391 | ' You might be able to resolve this by checking out the layer locally.\n' \ | ||
392 | ' Consider reaching out the to the layer maintainers or the layerindex admins' \ | ||
393 | .format(layerdependency.dependency.name, layerbranch.branch.name)) | ||
388 | 394 | ||
389 | if ignores and deplayerbranch.layer.name in ignores: | 395 | if ignores and deplayerbranch.layer.name in ignores: |
390 | continue | 396 | continue |
@@ -577,7 +583,7 @@ This function is used to implement debugging and provide the user info. | |||
577 | # index['config'] - configuration data for this index | 583 | # index['config'] - configuration data for this index |
578 | # index['branches'] - dictionary of Branch objects, by id number | 584 | # index['branches'] - dictionary of Branch objects, by id number |
579 | # index['layerItems'] - dictionary of layerItem objects, by id number | 585 | # index['layerItems'] - dictionary of layerItem objects, by id number |
580 | # ...etc... (See: http://layers.openembedded.org/layerindex/api/) | 586 | # ...etc... (See: https://layers.openembedded.org/layerindex/api/) |
581 | # | 587 | # |
582 | # The class needs to manage the 'index' entries and allow easily adding | 588 | # The class needs to manage the 'index' entries and allow easily adding |
583 | # of new items, as well as simply loading of the items. | 589 | # of new items, as well as simply loading of the items. |
@@ -847,7 +853,7 @@ class LayerIndexObj(): | |||
847 | continue | 853 | continue |
848 | 854 | ||
849 | for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]: | 855 | for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]: |
850 | deplayerbranch = layerdependency.dependency_layerBranch | 856 | deplayerbranch = layerdependency.dependency_layerBranch or None |
851 | 857 | ||
852 | if ignores and deplayerbranch.layer.name in ignores: | 858 | if ignores and deplayerbranch.layer.name in ignores: |
853 | continue | 859 | continue |
@@ -1279,7 +1285,7 @@ class Recipe(LayerIndexItemObj_LayerBranch): | |||
1279 | filename, filepath, pn, pv, layerbranch, | 1285 | filename, filepath, pn, pv, layerbranch, |
1280 | summary="", description="", section="", license="", | 1286 | summary="", description="", section="", license="", |
1281 | homepage="", bugtracker="", provides="", bbclassextend="", | 1287 | homepage="", bugtracker="", provides="", bbclassextend="", |
1282 | inherits="", blacklisted="", updated=None): | 1288 | inherits="", disallowed="", updated=None): |
1283 | self.id = id | 1289 | self.id = id |
1284 | self.filename = filename | 1290 | self.filename = filename |
1285 | self.filepath = filepath | 1291 | self.filepath = filepath |
@@ -1295,7 +1301,7 @@ class Recipe(LayerIndexItemObj_LayerBranch): | |||
1295 | self.bbclassextend = bbclassextend | 1301 | self.bbclassextend = bbclassextend |
1296 | self.inherits = inherits | 1302 | self.inherits = inherits |
1297 | self.updated = updated or datetime.datetime.today().isoformat() | 1303 | self.updated = updated or datetime.datetime.today().isoformat() |
1298 | self.blacklisted = blacklisted | 1304 | self.disallowed = disallowed |
1299 | if isinstance(layerbranch, LayerBranch): | 1305 | if isinstance(layerbranch, LayerBranch): |
1300 | self.layerbranch = layerbranch | 1306 | self.layerbranch = layerbranch |
1301 | else: | 1307 | else: |
diff --git a/bitbake/lib/layerindexlib/cooker.py b/bitbake/lib/layerindexlib/cooker.py index 2de6e5faa0..ced3e06360 100644 --- a/bitbake/lib/layerindexlib/cooker.py +++ b/bitbake/lib/layerindexlib/cooker.py | |||
@@ -279,7 +279,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin): | |||
279 | summary=pn, description=pn, section='?', | 279 | summary=pn, description=pn, section='?', |
280 | license='?', homepage='?', bugtracker='?', | 280 | license='?', homepage='?', bugtracker='?', |
281 | provides='?', bbclassextend='?', inherits='?', | 281 | provides='?', bbclassextend='?', inherits='?', |
282 | blacklisted='?', layerbranch=depBranchId) | 282 | disallowed='?', layerbranch=depBranchId) |
283 | 283 | ||
284 | index = addElement("recipes", [recipe], index) | 284 | index = addElement("recipes", [recipe], index) |
285 | 285 | ||
diff --git a/bitbake/lib/layerindexlib/restapi.py b/bitbake/lib/layerindexlib/restapi.py index 26a1c9674e..81d99b02ea 100644 --- a/bitbake/lib/layerindexlib/restapi.py +++ b/bitbake/lib/layerindexlib/restapi.py | |||
@@ -31,7 +31,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
31 | The return value is a LayerIndexObj. | 31 | The return value is a LayerIndexObj. |
32 | 32 | ||
33 | url is the url to the rest api of the layer index, such as: | 33 | url is the url to the rest api of the layer index, such as: |
34 | http://layers.openembedded.org/layerindex/api/ | 34 | https://layers.openembedded.org/layerindex/api/ |
35 | 35 | ||
36 | Or a local file... | 36 | Or a local file... |
37 | """ | 37 | """ |
@@ -138,7 +138,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
138 | The return value is a LayerIndexObj. | 138 | The return value is a LayerIndexObj. |
139 | 139 | ||
140 | ud is the parsed url to the rest api of the layer index, such as: | 140 | ud is the parsed url to the rest api of the layer index, such as: |
141 | http://layers.openembedded.org/layerindex/api/ | 141 | https://layers.openembedded.org/layerindex/api/ |
142 | """ | 142 | """ |
143 | 143 | ||
144 | def _get_json_response(apiurl=None, username=None, password=None, retry=True): | 144 | def _get_json_response(apiurl=None, username=None, password=None, retry=True): |
diff --git a/bitbake/lib/layerindexlib/tests/restapi.py b/bitbake/lib/layerindexlib/tests/restapi.py index 33b5c1c4c8..71f0ae8a9d 100644 --- a/bitbake/lib/layerindexlib/tests/restapi.py +++ b/bitbake/lib/layerindexlib/tests/restapi.py | |||
@@ -22,7 +22,7 @@ class LayerIndexWebRestApiTest(LayersTest): | |||
22 | self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway") | 22 | self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway") |
23 | LayersTest.setUp(self) | 23 | LayersTest.setUp(self) |
24 | self.layerindex = layerindexlib.LayerIndex(self.d) | 24 | self.layerindex = layerindexlib.LayerIndex(self.d) |
25 | self.layerindex.load_layerindex('http://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies']) | 25 | self.layerindex.load_layerindex('https://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies']) |
26 | 26 | ||
27 | @skipIfNoNetwork() | 27 | @skipIfNoNetwork() |
28 | def test_layerindex_is_empty(self): | 28 | def test_layerindex_is_empty(self): |
diff --git a/bitbake/lib/ply/yacc.py b/bitbake/lib/ply/yacc.py index 46e7dc96f6..381b50cf0b 100644 --- a/bitbake/lib/ply/yacc.py +++ b/bitbake/lib/ply/yacc.py | |||
@@ -2797,11 +2797,15 @@ class ParserReflect(object): | |||
2797 | # Compute a signature over the grammar | 2797 | # Compute a signature over the grammar |
2798 | def signature(self): | 2798 | def signature(self): |
2799 | try: | 2799 | try: |
2800 | from hashlib import md5 | 2800 | import hashlib |
2801 | except ImportError: | 2801 | except ImportError: |
2802 | from md5 import md5 | 2802 | raise RuntimeError("Unable to import hashlib") |
2803 | try: | ||
2804 | sig = hashlib.new('MD5', usedforsecurity=False) | ||
2805 | except TypeError: | ||
2806 | # Some configurations don't appear to support two arguments | ||
2807 | sig = hashlib.new('MD5') | ||
2803 | try: | 2808 | try: |
2804 | sig = md5() | ||
2805 | if self.start: | 2809 | if self.start: |
2806 | sig.update(self.start.encode('latin-1')) | 2810 | sig.update(self.start.encode('latin-1')) |
2807 | if self.prec: | 2811 | if self.prec: |
diff --git a/bitbake/lib/progressbar/progressbar.py b/bitbake/lib/progressbar/progressbar.py index e2b6ba1083..d4da10ab75 100644 --- a/bitbake/lib/progressbar/progressbar.py +++ b/bitbake/lib/progressbar/progressbar.py | |||
@@ -253,7 +253,7 @@ class ProgressBar(object): | |||
253 | if (self.maxval is not UnknownLength | 253 | if (self.maxval is not UnknownLength |
254 | and not 0 <= value <= self.maxval): | 254 | and not 0 <= value <= self.maxval): |
255 | 255 | ||
256 | raise ValueError('Value out of range') | 256 | self.maxval = value |
257 | 257 | ||
258 | self.currval = value | 258 | self.currval = value |
259 | 259 | ||
diff --git a/bitbake/lib/prserv/__init__.py b/bitbake/lib/prserv/__init__.py index 9961040b58..0e0aa34d0e 100644 --- a/bitbake/lib/prserv/__init__.py +++ b/bitbake/lib/prserv/__init__.py | |||
@@ -1,17 +1,19 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
5 | __version__ = "1.0.0" | 7 | __version__ = "1.0.0" |
6 | 8 | ||
7 | import os, time | 9 | import os, time |
8 | import sys,logging | 10 | import sys, logging |
9 | 11 | ||
10 | def init_logger(logfile, loglevel): | 12 | def init_logger(logfile, loglevel): |
11 | numeric_level = getattr(logging, loglevel.upper(), None) | 13 | numeric_level = getattr(logging, loglevel.upper(), None) |
12 | if not isinstance(numeric_level, int): | 14 | if not isinstance(numeric_level, int): |
13 | raise ValueError('Invalid log level: %s' % loglevel) | 15 | raise ValueError("Invalid log level: %s" % loglevel) |
14 | FORMAT = '%(asctime)-15s %(message)s' | 16 | FORMAT = "%(asctime)-15s %(message)s" |
15 | logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT) | 17 | logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT) |
16 | 18 | ||
17 | class NotFoundError(Exception): | 19 | class NotFoundError(Exception): |
diff --git a/bitbake/lib/prserv/client.py b/bitbake/lib/prserv/client.py new file mode 100644 index 0000000000..8471ee3046 --- /dev/null +++ b/bitbake/lib/prserv/client.py | |||
@@ -0,0 +1,71 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import logging | ||
8 | import bb.asyncrpc | ||
9 | |||
10 | logger = logging.getLogger("BitBake.PRserv") | ||
11 | |||
12 | class PRAsyncClient(bb.asyncrpc.AsyncClient): | ||
13 | def __init__(self): | ||
14 | super().__init__("PRSERVICE", "1.0", logger) | ||
15 | |||
16 | async def getPR(self, version, pkgarch, checksum): | ||
17 | response = await self.invoke( | ||
18 | {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} | ||
19 | ) | ||
20 | if response: | ||
21 | return response["value"] | ||
22 | |||
23 | async def test_pr(self, version, pkgarch, checksum): | ||
24 | response = await self.invoke( | ||
25 | {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} | ||
26 | ) | ||
27 | if response: | ||
28 | return response["value"] | ||
29 | |||
30 | async def test_package(self, version, pkgarch): | ||
31 | response = await self.invoke( | ||
32 | {"test-package": {"version": version, "pkgarch": pkgarch}} | ||
33 | ) | ||
34 | if response: | ||
35 | return response["value"] | ||
36 | |||
37 | async def max_package_pr(self, version, pkgarch): | ||
38 | response = await self.invoke( | ||
39 | {"max-package-pr": {"version": version, "pkgarch": pkgarch}} | ||
40 | ) | ||
41 | if response: | ||
42 | return response["value"] | ||
43 | |||
44 | async def importone(self, version, pkgarch, checksum, value): | ||
45 | response = await self.invoke( | ||
46 | {"import-one": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "value": value}} | ||
47 | ) | ||
48 | if response: | ||
49 | return response["value"] | ||
50 | |||
51 | async def export(self, version, pkgarch, checksum, colinfo): | ||
52 | response = await self.invoke( | ||
53 | {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo}} | ||
54 | ) | ||
55 | if response: | ||
56 | return (response["metainfo"], response["datainfo"]) | ||
57 | |||
58 | async def is_readonly(self): | ||
59 | response = await self.invoke( | ||
60 | {"is-readonly": {}} | ||
61 | ) | ||
62 | if response: | ||
63 | return response["readonly"] | ||
64 | |||
65 | class PRClient(bb.asyncrpc.Client): | ||
66 | def __init__(self): | ||
67 | super().__init__() | ||
68 | self._add_methods("getPR", "test_pr", "test_package", "importone", "export", "is_readonly") | ||
69 | |||
70 | def _get_async_client(self): | ||
71 | return PRAsyncClient() | ||
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py index cb2a2461e0..eb41508198 100644 --- a/bitbake/lib/prserv/db.py +++ b/bitbake/lib/prserv/db.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
@@ -30,21 +32,29 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | |||
30 | # | 32 | # |
31 | 33 | ||
32 | class PRTable(object): | 34 | class PRTable(object): |
33 | def __init__(self, conn, table, nohist): | 35 | def __init__(self, conn, table, nohist, read_only): |
34 | self.conn = conn | 36 | self.conn = conn |
35 | self.nohist = nohist | 37 | self.nohist = nohist |
38 | self.read_only = read_only | ||
36 | self.dirty = False | 39 | self.dirty = False |
37 | if nohist: | 40 | if nohist: |
38 | self.table = "%s_nohist" % table | 41 | self.table = "%s_nohist" % table |
39 | else: | 42 | else: |
40 | self.table = "%s_hist" % table | 43 | self.table = "%s_hist" % table |
41 | 44 | ||
42 | self._execute("CREATE TABLE IF NOT EXISTS %s \ | 45 | if self.read_only: |
43 | (version TEXT NOT NULL, \ | 46 | table_exists = self._execute( |
44 | pkgarch TEXT NOT NULL, \ | 47 | "SELECT count(*) FROM sqlite_master \ |
45 | checksum TEXT NOT NULL, \ | 48 | WHERE type='table' AND name='%s'" % (self.table)) |
46 | value INTEGER, \ | 49 | if not table_exists: |
47 | PRIMARY KEY (version, pkgarch, checksum));" % self.table) | 50 | raise prserv.NotFoundError |
51 | else: | ||
52 | self._execute("CREATE TABLE IF NOT EXISTS %s \ | ||
53 | (version TEXT NOT NULL, \ | ||
54 | pkgarch TEXT NOT NULL, \ | ||
55 | checksum TEXT NOT NULL, \ | ||
56 | value INTEGER, \ | ||
57 | PRIMARY KEY (version, pkgarch, checksum));" % self.table) | ||
48 | 58 | ||
49 | def _execute(self, *query): | 59 | def _execute(self, *query): |
50 | """Execute a query, waiting to acquire a lock if necessary""" | 60 | """Execute a query, waiting to acquire a lock if necessary""" |
@@ -54,20 +64,67 @@ class PRTable(object): | |||
54 | try: | 64 | try: |
55 | return self.conn.execute(*query) | 65 | return self.conn.execute(*query) |
56 | except sqlite3.OperationalError as exc: | 66 | except sqlite3.OperationalError as exc: |
57 | if 'is locked' in str(exc) and end > time.time(): | 67 | if "is locked" in str(exc) and end > time.time(): |
58 | continue | 68 | continue |
59 | raise exc | 69 | raise exc |
60 | 70 | ||
61 | def sync(self): | 71 | def sync(self): |
62 | self.conn.commit() | 72 | if not self.read_only: |
63 | self._execute("BEGIN EXCLUSIVE TRANSACTION") | 73 | self.conn.commit() |
74 | self._execute("BEGIN EXCLUSIVE TRANSACTION") | ||
64 | 75 | ||
65 | def sync_if_dirty(self): | 76 | def sync_if_dirty(self): |
66 | if self.dirty: | 77 | if self.dirty: |
67 | self.sync() | 78 | self.sync() |
68 | self.dirty = False | 79 | self.dirty = False |
69 | 80 | ||
70 | def _getValueHist(self, version, pkgarch, checksum): | 81 | def test_package(self, version, pkgarch): |
82 | """Returns whether the specified package version is found in the database for the specified architecture""" | ||
83 | |||
84 | # Just returns the value if found or None otherwise | ||
85 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table, | ||
86 | (version, pkgarch)) | ||
87 | row=data.fetchone() | ||
88 | if row is not None: | ||
89 | return True | ||
90 | else: | ||
91 | return False | ||
92 | |||
93 | def test_value(self, version, pkgarch, value): | ||
94 | """Returns whether the specified value is found in the database for the specified package and architecture""" | ||
95 | |||
96 | # Just returns the value if found or None otherwise | ||
97 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table, | ||
98 | (version, pkgarch, value)) | ||
99 | row=data.fetchone() | ||
100 | if row is not None: | ||
101 | return True | ||
102 | else: | ||
103 | return False | ||
104 | |||
105 | def find_value(self, version, pkgarch, checksum): | ||
106 | """Returns the value for the specified checksum if found or None otherwise.""" | ||
107 | |||
108 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
109 | (version, pkgarch, checksum)) | ||
110 | row=data.fetchone() | ||
111 | if row is not None: | ||
112 | return row[0] | ||
113 | else: | ||
114 | return None | ||
115 | |||
116 | def find_max_value(self, version, pkgarch): | ||
117 | """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value""" | ||
118 | |||
119 | data = self._execute("SELECT max(value) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
120 | (version, pkgarch)) | ||
121 | row = data.fetchone() | ||
122 | if row is not None: | ||
123 | return row[0] | ||
124 | else: | ||
125 | return None | ||
126 | |||
127 | def _get_value_hist(self, version, pkgarch, checksum): | ||
71 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 128 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, |
72 | (version, pkgarch, checksum)) | 129 | (version, pkgarch, checksum)) |
73 | row=data.fetchone() | 130 | row=data.fetchone() |
@@ -75,10 +132,19 @@ class PRTable(object): | |||
75 | return row[0] | 132 | return row[0] |
76 | else: | 133 | else: |
77 | #no value found, try to insert | 134 | #no value found, try to insert |
135 | if self.read_only: | ||
136 | data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
137 | (version, pkgarch)) | ||
138 | row = data.fetchone() | ||
139 | if row is not None: | ||
140 | return row[0] | ||
141 | else: | ||
142 | return 0 | ||
143 | |||
78 | try: | 144 | try: |
79 | self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));" | 145 | self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" |
80 | % (self.table,self.table), | 146 | % (self.table, self.table), |
81 | (version,pkgarch, checksum,version, pkgarch)) | 147 | (version, pkgarch, checksum, version, pkgarch)) |
82 | except sqlite3.IntegrityError as exc: | 148 | except sqlite3.IntegrityError as exc: |
83 | logger.error(str(exc)) | 149 | logger.error(str(exc)) |
84 | 150 | ||
@@ -92,10 +158,10 @@ class PRTable(object): | |||
92 | else: | 158 | else: |
93 | raise prserv.NotFoundError | 159 | raise prserv.NotFoundError |
94 | 160 | ||
95 | def _getValueNohist(self, version, pkgarch, checksum): | 161 | def _get_value_no_hist(self, version, pkgarch, checksum): |
96 | data=self._execute("SELECT value FROM %s \ | 162 | data=self._execute("SELECT value FROM %s \ |
97 | WHERE version=? AND pkgarch=? AND checksum=? AND \ | 163 | WHERE version=? AND pkgarch=? AND checksum=? AND \ |
98 | value >= (select max(value) from %s where version=? AND pkgarch=?);" | 164 | value >= (select max(value) from %s where version=? AND pkgarch=?);" |
99 | % (self.table, self.table), | 165 | % (self.table, self.table), |
100 | (version, pkgarch, checksum, version, pkgarch)) | 166 | (version, pkgarch, checksum, version, pkgarch)) |
101 | row=data.fetchone() | 167 | row=data.fetchone() |
@@ -103,9 +169,14 @@ class PRTable(object): | |||
103 | return row[0] | 169 | return row[0] |
104 | else: | 170 | else: |
105 | #no value found, try to insert | 171 | #no value found, try to insert |
172 | if self.read_only: | ||
173 | data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
174 | (version, pkgarch)) | ||
175 | return data.fetchone()[0] | ||
176 | |||
106 | try: | 177 | try: |
107 | self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));" | 178 | self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" |
108 | % (self.table,self.table), | 179 | % (self.table, self.table), |
109 | (version, pkgarch, checksum, version, pkgarch)) | 180 | (version, pkgarch, checksum, version, pkgarch)) |
110 | except sqlite3.IntegrityError as exc: | 181 | except sqlite3.IntegrityError as exc: |
111 | logger.error(str(exc)) | 182 | logger.error(str(exc)) |
@@ -121,14 +192,17 @@ class PRTable(object): | |||
121 | else: | 192 | else: |
122 | raise prserv.NotFoundError | 193 | raise prserv.NotFoundError |
123 | 194 | ||
124 | def getValue(self, version, pkgarch, checksum): | 195 | def get_value(self, version, pkgarch, checksum): |
125 | if self.nohist: | 196 | if self.nohist: |
126 | return self._getValueNohist(version, pkgarch, checksum) | 197 | return self._get_value_no_hist(version, pkgarch, checksum) |
127 | else: | 198 | else: |
128 | return self._getValueHist(version, pkgarch, checksum) | 199 | return self._get_value_hist(version, pkgarch, checksum) |
129 | 200 | ||
130 | def _importHist(self, version, pkgarch, checksum, value): | 201 | def _import_hist(self, version, pkgarch, checksum, value): |
131 | val = None | 202 | if self.read_only: |
203 | return None | ||
204 | |||
205 | val = None | ||
132 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 206 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, |
133 | (version, pkgarch, checksum)) | 207 | (version, pkgarch, checksum)) |
134 | row = data.fetchone() | 208 | row = data.fetchone() |
@@ -151,24 +225,27 @@ class PRTable(object): | |||
151 | val = row[0] | 225 | val = row[0] |
152 | return val | 226 | return val |
153 | 227 | ||
154 | def _importNohist(self, version, pkgarch, checksum, value): | 228 | def _import_no_hist(self, version, pkgarch, checksum, value): |
229 | if self.read_only: | ||
230 | return None | ||
231 | |||
155 | try: | 232 | try: |
156 | #try to insert | 233 | #try to insert |
157 | self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | 234 | self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), |
158 | (version, pkgarch, checksum,value)) | 235 | (version, pkgarch, checksum, value)) |
159 | except sqlite3.IntegrityError as exc: | 236 | except sqlite3.IntegrityError as exc: |
160 | #already have the record, try to update | 237 | #already have the record, try to update |
161 | try: | 238 | try: |
162 | self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?" | 239 | self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?" |
163 | % (self.table), | 240 | % (self.table), |
164 | (value,version,pkgarch,checksum,value)) | 241 | (value, version, pkgarch, checksum, value)) |
165 | except sqlite3.IntegrityError as exc: | 242 | except sqlite3.IntegrityError as exc: |
166 | logger.error(str(exc)) | 243 | logger.error(str(exc)) |
167 | 244 | ||
168 | self.dirty = True | 245 | self.dirty = True |
169 | 246 | ||
170 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table, | 247 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table, |
171 | (version,pkgarch,checksum,value)) | 248 | (version, pkgarch, checksum, value)) |
172 | row=data.fetchone() | 249 | row=data.fetchone() |
173 | if row is not None: | 250 | if row is not None: |
174 | return row[0] | 251 | return row[0] |
@@ -177,33 +254,33 @@ class PRTable(object): | |||
177 | 254 | ||
178 | def importone(self, version, pkgarch, checksum, value): | 255 | def importone(self, version, pkgarch, checksum, value): |
179 | if self.nohist: | 256 | if self.nohist: |
180 | return self._importNohist(version, pkgarch, checksum, value) | 257 | return self._import_no_hist(version, pkgarch, checksum, value) |
181 | else: | 258 | else: |
182 | return self._importHist(version, pkgarch, checksum, value) | 259 | return self._import_hist(version, pkgarch, checksum, value) |
183 | 260 | ||
184 | def export(self, version, pkgarch, checksum, colinfo): | 261 | def export(self, version, pkgarch, checksum, colinfo): |
185 | metainfo = {} | 262 | metainfo = {} |
186 | #column info | 263 | #column info |
187 | if colinfo: | 264 | if colinfo: |
188 | metainfo['tbl_name'] = self.table | 265 | metainfo["tbl_name"] = self.table |
189 | metainfo['core_ver'] = prserv.__version__ | 266 | metainfo["core_ver"] = prserv.__version__ |
190 | metainfo['col_info'] = [] | 267 | metainfo["col_info"] = [] |
191 | data = self._execute("PRAGMA table_info(%s);" % self.table) | 268 | data = self._execute("PRAGMA table_info(%s);" % self.table) |
192 | for row in data: | 269 | for row in data: |
193 | col = {} | 270 | col = {} |
194 | col['name'] = row['name'] | 271 | col["name"] = row["name"] |
195 | col['type'] = row['type'] | 272 | col["type"] = row["type"] |
196 | col['notnull'] = row['notnull'] | 273 | col["notnull"] = row["notnull"] |
197 | col['dflt_value'] = row['dflt_value'] | 274 | col["dflt_value"] = row["dflt_value"] |
198 | col['pk'] = row['pk'] | 275 | col["pk"] = row["pk"] |
199 | metainfo['col_info'].append(col) | 276 | metainfo["col_info"].append(col) |
200 | 277 | ||
201 | #data info | 278 | #data info |
202 | datainfo = [] | 279 | datainfo = [] |
203 | 280 | ||
204 | if self.nohist: | 281 | if self.nohist: |
205 | sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \ | 282 | sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \ |
206 | (SELECT version,pkgarch,max(value) as maxvalue FROM %s GROUP BY version,pkgarch) as T2 \ | 283 | (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \ |
207 | WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table) | 284 | WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table) |
208 | else: | 285 | else: |
209 | sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table | 286 | sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table |
@@ -226,12 +303,12 @@ class PRTable(object): | |||
226 | else: | 303 | else: |
227 | data = self._execute(sqlstmt) | 304 | data = self._execute(sqlstmt) |
228 | for row in data: | 305 | for row in data: |
229 | if row['version']: | 306 | if row["version"]: |
230 | col = {} | 307 | col = {} |
231 | col['version'] = row['version'] | 308 | col["version"] = row["version"] |
232 | col['pkgarch'] = row['pkgarch'] | 309 | col["pkgarch"] = row["pkgarch"] |
233 | col['checksum'] = row['checksum'] | 310 | col["checksum"] = row["checksum"] |
234 | col['value'] = row['value'] | 311 | col["value"] = row["value"] |
235 | datainfo.append(col) | 312 | datainfo.append(col) |
236 | return (metainfo, datainfo) | 313 | return (metainfo, datainfo) |
237 | 314 | ||
@@ -240,41 +317,45 @@ class PRTable(object): | |||
240 | for line in self.conn.iterdump(): | 317 | for line in self.conn.iterdump(): |
241 | writeCount = writeCount + len(line) + 1 | 318 | writeCount = writeCount + len(line) + 1 |
242 | fd.write(line) | 319 | fd.write(line) |
243 | fd.write('\n') | 320 | fd.write("\n") |
244 | return writeCount | 321 | return writeCount |
245 | 322 | ||
246 | class PRData(object): | 323 | class PRData(object): |
247 | """Object representing the PR database""" | 324 | """Object representing the PR database""" |
248 | def __init__(self, filename, nohist=True): | 325 | def __init__(self, filename, nohist=True, read_only=False): |
249 | self.filename=os.path.abspath(filename) | 326 | self.filename=os.path.abspath(filename) |
250 | self.nohist=nohist | 327 | self.nohist=nohist |
328 | self.read_only = read_only | ||
251 | #build directory hierarchy | 329 | #build directory hierarchy |
252 | try: | 330 | try: |
253 | os.makedirs(os.path.dirname(self.filename)) | 331 | os.makedirs(os.path.dirname(self.filename)) |
254 | except OSError as e: | 332 | except OSError as e: |
255 | if e.errno != errno.EEXIST: | 333 | if e.errno != errno.EEXIST: |
256 | raise e | 334 | raise e |
257 | self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False) | 335 | uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "") |
336 | logger.debug("Opening PRServ database '%s'" % (uri)) | ||
337 | self.connection=sqlite3.connect(uri, uri=True, isolation_level="EXCLUSIVE", check_same_thread = False) | ||
258 | self.connection.row_factory=sqlite3.Row | 338 | self.connection.row_factory=sqlite3.Row |
259 | self.connection.execute("pragma synchronous = off;") | 339 | if not self.read_only: |
260 | self.connection.execute("PRAGMA journal_mode = MEMORY;") | 340 | self.connection.execute("pragma synchronous = off;") |
341 | self.connection.execute("PRAGMA journal_mode = MEMORY;") | ||
261 | self._tables={} | 342 | self._tables={} |
262 | 343 | ||
263 | def disconnect(self): | 344 | def disconnect(self): |
264 | self.connection.close() | 345 | self.connection.close() |
265 | 346 | ||
266 | def __getitem__(self,tblname): | 347 | def __getitem__(self, tblname): |
267 | if not isinstance(tblname, str): | 348 | if not isinstance(tblname, str): |
268 | raise TypeError("tblname argument must be a string, not '%s'" % | 349 | raise TypeError("tblname argument must be a string, not '%s'" % |
269 | type(tblname)) | 350 | type(tblname)) |
270 | if tblname in self._tables: | 351 | if tblname in self._tables: |
271 | return self._tables[tblname] | 352 | return self._tables[tblname] |
272 | else: | 353 | else: |
273 | tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist) | 354 | tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist, self.read_only) |
274 | return tableobj | 355 | return tableobj |
275 | 356 | ||
276 | def __delitem__(self, tblname): | 357 | def __delitem__(self, tblname): |
277 | if tblname in self._tables: | 358 | if tblname in self._tables: |
278 | del self._tables[tblname] | 359 | del self._tables[tblname] |
279 | logger.info("drop table %s" % (tblname)) | 360 | logger.info("drop table %s" % (tblname)) |
280 | self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) | 361 | self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) |
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py index 25dcf8a0ee..dc4be5b620 100644 --- a/bitbake/lib/prserv/serv.py +++ b/bitbake/lib/prserv/serv.py | |||
@@ -1,354 +1,239 @@ | |||
1 | # | 1 | # |
2 | # Copyright BitBake Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
5 | import os,sys,logging | 7 | import os,sys,logging |
6 | import signal, time | 8 | import signal, time |
7 | from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | ||
8 | import threading | ||
9 | import queue | ||
10 | import socket | 9 | import socket |
11 | import io | 10 | import io |
12 | import sqlite3 | 11 | import sqlite3 |
13 | import bb.server.xmlrpcclient | ||
14 | import prserv | 12 | import prserv |
15 | import prserv.db | 13 | import prserv.db |
16 | import errno | 14 | import errno |
17 | import select | 15 | import bb.asyncrpc |
18 | 16 | ||
19 | logger = logging.getLogger("BitBake.PRserv") | 17 | logger = logging.getLogger("BitBake.PRserv") |
20 | 18 | ||
21 | if sys.hexversion < 0x020600F0: | 19 | PIDPREFIX = "/tmp/PRServer_%s_%s.pid" |
22 | print("Sorry, python 2.6 or later is required.") | 20 | singleton = None |
23 | sys.exit(1) | ||
24 | 21 | ||
25 | class Handler(SimpleXMLRPCRequestHandler): | 22 | class PRServerClient(bb.asyncrpc.AsyncServerConnection): |
26 | def _dispatch(self,method,params): | 23 | def __init__(self, socket, server): |
24 | super().__init__(socket, "PRSERVICE", server.logger) | ||
25 | self.server = server | ||
26 | |||
27 | self.handlers.update({ | ||
28 | "get-pr": self.handle_get_pr, | ||
29 | "test-pr": self.handle_test_pr, | ||
30 | "test-package": self.handle_test_package, | ||
31 | "max-package-pr": self.handle_max_package_pr, | ||
32 | "import-one": self.handle_import_one, | ||
33 | "export": self.handle_export, | ||
34 | "is-readonly": self.handle_is_readonly, | ||
35 | }) | ||
36 | |||
37 | def validate_proto_version(self): | ||
38 | return (self.proto_version == (1, 0)) | ||
39 | |||
40 | async def dispatch_message(self, msg): | ||
27 | try: | 41 | try: |
28 | value=self.server.funcs[method](*params) | 42 | return await super().dispatch_message(msg) |
29 | except: | 43 | except: |
30 | import traceback | 44 | self.server.table.sync() |
31 | traceback.print_exc() | ||
32 | raise | 45 | raise |
33 | return value | 46 | else: |
47 | self.server.table.sync_if_dirty() | ||
34 | 48 | ||
35 | PIDPREFIX = "/tmp/PRServer_%s_%s.pid" | 49 | async def handle_test_pr(self, request): |
36 | singleton = None | 50 | '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value''' |
51 | version = request["version"] | ||
52 | pkgarch = request["pkgarch"] | ||
53 | checksum = request["checksum"] | ||
37 | 54 | ||
55 | value = self.server.table.find_value(version, pkgarch, checksum) | ||
56 | return {"value": value} | ||
38 | 57 | ||
39 | class PRServer(SimpleXMLRPCServer): | 58 | async def handle_test_package(self, request): |
40 | def __init__(self, dbfile, logfile, interface, daemon=True): | 59 | '''Tells whether there are entries for (version, pkgarch) in the db. Returns True or False''' |
41 | ''' constructor ''' | 60 | version = request["version"] |
42 | try: | 61 | pkgarch = request["pkgarch"] |
43 | SimpleXMLRPCServer.__init__(self, interface, | ||
44 | logRequests=False, allow_none=True) | ||
45 | except socket.error: | ||
46 | ip=socket.gethostbyname(interface[0]) | ||
47 | port=interface[1] | ||
48 | msg="PR Server unable to bind to %s:%s\n" % (ip, port) | ||
49 | sys.stderr.write(msg) | ||
50 | raise PRServiceConfigError | ||
51 | 62 | ||
52 | self.dbfile=dbfile | 63 | value = self.server.table.test_package(version, pkgarch) |
53 | self.daemon=daemon | 64 | return {"value": value} |
54 | self.logfile=logfile | ||
55 | self.working_thread=None | ||
56 | self.host, self.port = self.socket.getsockname() | ||
57 | self.pidfile=PIDPREFIX % (self.host, self.port) | ||
58 | |||
59 | self.register_function(self.getPR, "getPR") | ||
60 | self.register_function(self.quit, "quit") | ||
61 | self.register_function(self.ping, "ping") | ||
62 | self.register_function(self.export, "export") | ||
63 | self.register_function(self.dump_db, "dump_db") | ||
64 | self.register_function(self.importone, "importone") | ||
65 | self.register_introspection_functions() | ||
66 | |||
67 | self.quitpipein, self.quitpipeout = os.pipe() | ||
68 | |||
69 | self.requestqueue = queue.Queue() | ||
70 | self.handlerthread = threading.Thread(target = self.process_request_thread) | ||
71 | self.handlerthread.daemon = False | ||
72 | |||
73 | def process_request_thread(self): | ||
74 | """Same as in BaseServer but as a thread. | ||
75 | |||
76 | In addition, exception handling is done here. | ||
77 | |||
78 | """ | ||
79 | iter_count = 1 | ||
80 | # 60 iterations between syncs or sync if dirty every ~30 seconds | ||
81 | iterations_between_sync = 60 | ||
82 | |||
83 | bb.utils.set_process_name("PRServ Handler") | ||
84 | |||
85 | while not self.quitflag: | ||
86 | try: | ||
87 | (request, client_address) = self.requestqueue.get(True, 30) | ||
88 | except queue.Empty: | ||
89 | self.table.sync_if_dirty() | ||
90 | continue | ||
91 | if request is None: | ||
92 | continue | ||
93 | try: | ||
94 | self.finish_request(request, client_address) | ||
95 | self.shutdown_request(request) | ||
96 | iter_count = (iter_count + 1) % iterations_between_sync | ||
97 | if iter_count == 0: | ||
98 | self.table.sync_if_dirty() | ||
99 | except: | ||
100 | self.handle_error(request, client_address) | ||
101 | self.shutdown_request(request) | ||
102 | self.table.sync() | ||
103 | self.table.sync_if_dirty() | ||
104 | |||
105 | def sigint_handler(self, signum, stack): | ||
106 | if self.table: | ||
107 | self.table.sync() | ||
108 | 65 | ||
109 | def sigterm_handler(self, signum, stack): | 66 | async def handle_max_package_pr(self, request): |
110 | if self.table: | 67 | '''Finds the greatest PR value for (version, pkgarch) in the db. Returns None if no entry was found''' |
111 | self.table.sync() | 68 | version = request["version"] |
112 | self.quit() | 69 | pkgarch = request["pkgarch"] |
113 | self.requestqueue.put((None, None)) | ||
114 | 70 | ||
115 | def process_request(self, request, client_address): | 71 | value = self.server.table.find_max_value(version, pkgarch) |
116 | self.requestqueue.put((request, client_address)) | 72 | return {"value": value} |
117 | 73 | ||
118 | def export(self, version=None, pkgarch=None, checksum=None, colinfo=True): | 74 | async def handle_get_pr(self, request): |
119 | try: | 75 | version = request["version"] |
120 | return self.table.export(version, pkgarch, checksum, colinfo) | 76 | pkgarch = request["pkgarch"] |
121 | except sqlite3.Error as exc: | 77 | checksum = request["checksum"] |
122 | logger.error(str(exc)) | 78 | |
123 | return None | 79 | response = None |
124 | |||
125 | def dump_db(self): | ||
126 | """ | ||
127 | Returns a script (string) that reconstructs the state of the | ||
128 | entire database at the time this function is called. The script | ||
129 | language is defined by the backing database engine, which is a | ||
130 | function of server configuration. | ||
131 | Returns None if the database engine does not support dumping to | ||
132 | script or if some other error is encountered in processing. | ||
133 | """ | ||
134 | buff = io.StringIO() | ||
135 | try: | 80 | try: |
136 | self.table.sync() | 81 | value = self.server.table.get_value(version, pkgarch, checksum) |
137 | self.table.dump_db(buff) | 82 | response = {"value": value} |
138 | return buff.getvalue() | 83 | except prserv.NotFoundError: |
139 | except Exception as exc: | 84 | self.logger.error("failure storing value in database for (%s, %s)",version, checksum) |
140 | logger.error(str(exc)) | 85 | |
141 | return None | 86 | return response |
142 | finally: | ||
143 | buff.close() | ||
144 | 87 | ||
145 | def importone(self, version, pkgarch, checksum, value): | 88 | async def handle_import_one(self, request): |
146 | return self.table.importone(version, pkgarch, checksum, value) | 89 | response = None |
90 | if not self.server.read_only: | ||
91 | version = request["version"] | ||
92 | pkgarch = request["pkgarch"] | ||
93 | checksum = request["checksum"] | ||
94 | value = request["value"] | ||
147 | 95 | ||
148 | def ping(self): | 96 | value = self.server.table.importone(version, pkgarch, checksum, value) |
149 | return not self.quitflag | 97 | if value is not None: |
98 | response = {"value": value} | ||
150 | 99 | ||
151 | def getinfo(self): | 100 | return response |
152 | return (self.host, self.port) | 101 | |
102 | async def handle_export(self, request): | ||
103 | version = request["version"] | ||
104 | pkgarch = request["pkgarch"] | ||
105 | checksum = request["checksum"] | ||
106 | colinfo = request["colinfo"] | ||
153 | 107 | ||
154 | def getPR(self, version, pkgarch, checksum): | ||
155 | try: | 108 | try: |
156 | return self.table.getValue(version, pkgarch, checksum) | 109 | (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo) |
157 | except prserv.NotFoundError: | ||
158 | logger.error("can not find value for (%s, %s)",version, checksum) | ||
159 | return None | ||
160 | except sqlite3.Error as exc: | 110 | except sqlite3.Error as exc: |
161 | logger.error(str(exc)) | 111 | self.logger.error(str(exc)) |
162 | return None | 112 | metainfo = datainfo = None |
163 | |||
164 | def quit(self): | ||
165 | self.quitflag=True | ||
166 | os.write(self.quitpipeout, b"q") | ||
167 | os.close(self.quitpipeout) | ||
168 | return | ||
169 | |||
170 | def work_forever(self,): | ||
171 | self.quitflag = False | ||
172 | # This timeout applies to the poll in TCPServer, we need the select | ||
173 | # below to wake on our quit pipe closing. We only ever call into handle_request | ||
174 | # if there is data there. | ||
175 | self.timeout = 0.01 | ||
176 | |||
177 | bb.utils.set_process_name("PRServ") | ||
178 | |||
179 | # DB connection must be created after all forks | ||
180 | self.db = prserv.db.PRData(self.dbfile) | ||
181 | self.table = self.db["PRMAIN"] | ||
182 | 113 | ||
183 | logger.info("Started PRServer with DBfile: %s, IP: %s, PORT: %s, PID: %s" % | 114 | return {"metainfo": metainfo, "datainfo": datainfo} |
184 | (self.dbfile, self.host, self.port, str(os.getpid()))) | ||
185 | |||
186 | self.handlerthread.start() | ||
187 | while not self.quitflag: | ||
188 | ready = select.select([self.fileno(), self.quitpipein], [], [], 30) | ||
189 | if self.quitflag: | ||
190 | break | ||
191 | if self.fileno() in ready[0]: | ||
192 | self.handle_request() | ||
193 | self.handlerthread.join() | ||
194 | self.db.disconnect() | ||
195 | logger.info("PRServer: stopping...") | ||
196 | self.server_close() | ||
197 | os.close(self.quitpipein) | ||
198 | return | ||
199 | 115 | ||
200 | def start(self): | 116 | async def handle_is_readonly(self, request): |
201 | if self.daemon: | 117 | return {"readonly": self.server.read_only} |
202 | pid = self.daemonize() | ||
203 | else: | ||
204 | pid = self.fork() | ||
205 | self.pid = pid | ||
206 | 118 | ||
207 | # Ensure both the parent sees this and the child from the work_forever log entry above | 119 | class PRServer(bb.asyncrpc.AsyncServer): |
208 | logger.info("Started PRServer with DBfile: %s, IP: %s, PORT: %s, PID: %s" % | 120 | def __init__(self, dbfile, read_only=False): |
209 | (self.dbfile, self.host, self.port, str(pid))) | 121 | super().__init__(logger) |
122 | self.dbfile = dbfile | ||
123 | self.table = None | ||
124 | self.read_only = read_only | ||
210 | 125 | ||
211 | def delpid(self): | 126 | def accept_client(self, socket): |
212 | os.remove(self.pidfile) | 127 | return PRServerClient(socket, self) |
213 | 128 | ||
214 | def daemonize(self): | 129 | def start(self): |
215 | """ | 130 | tasks = super().start() |
216 | See Advanced Programming in the UNIX, Sec 13.3 | 131 | self.db = prserv.db.PRData(self.dbfile, read_only=self.read_only) |
217 | """ | 132 | self.table = self.db["PRMAIN"] |
218 | try: | ||
219 | pid = os.fork() | ||
220 | if pid > 0: | ||
221 | os.waitpid(pid, 0) | ||
222 | #parent return instead of exit to give control | ||
223 | return pid | ||
224 | except OSError as e: | ||
225 | raise Exception("%s [%d]" % (e.strerror, e.errno)) | ||
226 | |||
227 | os.setsid() | ||
228 | """ | ||
229 | fork again to make sure the daemon is not session leader, | ||
230 | which prevents it from acquiring controlling terminal | ||
231 | """ | ||
232 | try: | ||
233 | pid = os.fork() | ||
234 | if pid > 0: #parent | ||
235 | os._exit(0) | ||
236 | except OSError as e: | ||
237 | raise Exception("%s [%d]" % (e.strerror, e.errno)) | ||
238 | 133 | ||
239 | self.cleanup_handles() | 134 | self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" % |
240 | os._exit(0) | 135 | (self.dbfile, self.address, str(os.getpid()))) |
241 | 136 | ||
242 | def fork(self): | 137 | return tasks |
243 | try: | 138 | |
244 | pid = os.fork() | 139 | async def stop(self): |
245 | if pid > 0: | 140 | self.table.sync_if_dirty() |
246 | self.socket.close() # avoid ResourceWarning in parent | 141 | self.db.disconnect() |
247 | return pid | 142 | await super().stop() |
248 | except OSError as e: | 143 | |
249 | raise Exception("%s [%d]" % (e.strerror, e.errno)) | 144 | def signal_handler(self): |
250 | 145 | super().signal_handler() | |
251 | bb.utils.signal_on_parent_exit("SIGTERM") | 146 | if self.table: |
252 | self.cleanup_handles() | 147 | self.table.sync() |
253 | os._exit(0) | ||
254 | |||
255 | def cleanup_handles(self): | ||
256 | signal.signal(signal.SIGINT, self.sigint_handler) | ||
257 | signal.signal(signal.SIGTERM, self.sigterm_handler) | ||
258 | os.chdir("/") | ||
259 | |||
260 | sys.stdout.flush() | ||
261 | sys.stderr.flush() | ||
262 | |||
263 | # We could be called from a python thread with io.StringIO as | ||
264 | # stdout/stderr or it could be 'real' unix fd forking where we need | ||
265 | # to physically close the fds to prevent the program launching us from | ||
266 | # potentially hanging on a pipe. Handle both cases. | ||
267 | si = open('/dev/null', 'r') | ||
268 | try: | ||
269 | os.dup2(si.fileno(),sys.stdin.fileno()) | ||
270 | except (AttributeError, io.UnsupportedOperation): | ||
271 | sys.stdin = si | ||
272 | so = open(self.logfile, 'a+') | ||
273 | try: | ||
274 | os.dup2(so.fileno(),sys.stdout.fileno()) | ||
275 | except (AttributeError, io.UnsupportedOperation): | ||
276 | sys.stdout = so | ||
277 | try: | ||
278 | os.dup2(so.fileno(),sys.stderr.fileno()) | ||
279 | except (AttributeError, io.UnsupportedOperation): | ||
280 | sys.stderr = so | ||
281 | |||
282 | # Clear out all log handlers prior to the fork() to avoid calling | ||
283 | # event handlers not part of the PRserver | ||
284 | for logger_iter in logging.Logger.manager.loggerDict.keys(): | ||
285 | logging.getLogger(logger_iter).handlers = [] | ||
286 | |||
287 | # Ensure logging makes it to the logfile | ||
288 | streamhandler = logging.StreamHandler() | ||
289 | streamhandler.setLevel(logging.DEBUG) | ||
290 | formatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") | ||
291 | streamhandler.setFormatter(formatter) | ||
292 | logger.addHandler(streamhandler) | ||
293 | |||
294 | # write pidfile | ||
295 | pid = str(os.getpid()) | ||
296 | with open(self.pidfile, 'w') as pf: | ||
297 | pf.write("%s\n" % pid) | ||
298 | |||
299 | self.work_forever() | ||
300 | self.delpid() | ||
301 | 148 | ||
302 | class PRServSingleton(object): | 149 | class PRServSingleton(object): |
303 | def __init__(self, dbfile, logfile, interface): | 150 | def __init__(self, dbfile, logfile, host, port): |
304 | self.dbfile = dbfile | 151 | self.dbfile = dbfile |
305 | self.logfile = logfile | 152 | self.logfile = logfile |
306 | self.interface = interface | ||
307 | self.host = None | ||
308 | self.port = None | ||
309 | |||
310 | def start(self): | ||
311 | self.prserv = PRServer(self.dbfile, self.logfile, self.interface, daemon=False) | ||
312 | self.prserv.start() | ||
313 | self.host, self.port = self.prserv.getinfo() | ||
314 | |||
315 | def getinfo(self): | ||
316 | return (self.host, self.port) | ||
317 | |||
318 | class PRServerConnection(object): | ||
319 | def __init__(self, host, port): | ||
320 | if is_local_special(host, port): | ||
321 | host, port = singleton.getinfo() | ||
322 | self.host = host | 153 | self.host = host |
323 | self.port = port | 154 | self.port = port |
324 | self.connection, self.transport = bb.server.xmlrpcclient._create_server(self.host, self.port) | ||
325 | |||
326 | def terminate(self): | ||
327 | try: | ||
328 | logger.info("Terminating PRServer...") | ||
329 | self.connection.quit() | ||
330 | except Exception as exc: | ||
331 | sys.stderr.write("%s\n" % str(exc)) | ||
332 | 155 | ||
333 | def getPR(self, version, pkgarch, checksum): | 156 | def start(self): |
334 | return self.connection.getPR(version, pkgarch, checksum) | 157 | self.prserv = PRServer(self.dbfile) |
158 | self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port) | ||
159 | self.process = self.prserv.serve_as_process(log_level=logging.WARNING) | ||
335 | 160 | ||
336 | def ping(self): | 161 | if not self.prserv.address: |
337 | return self.connection.ping() | 162 | raise PRServiceConfigError |
163 | if not self.port: | ||
164 | self.port = int(self.prserv.address.rsplit(":", 1)[1]) | ||
338 | 165 | ||
339 | def export(self,version=None, pkgarch=None, checksum=None, colinfo=True): | 166 | def run_as_daemon(func, pidfile, logfile): |
340 | return self.connection.export(version, pkgarch, checksum, colinfo) | 167 | """ |
168 | See Advanced Programming in the UNIX, Sec 13.3 | ||
169 | """ | ||
170 | try: | ||
171 | pid = os.fork() | ||
172 | if pid > 0: | ||
173 | os.waitpid(pid, 0) | ||
174 | #parent return instead of exit to give control | ||
175 | return pid | ||
176 | except OSError as e: | ||
177 | raise Exception("%s [%d]" % (e.strerror, e.errno)) | ||
341 | 178 | ||
342 | def dump_db(self): | 179 | os.setsid() |
343 | return self.connection.dump_db() | 180 | """ |
181 | fork again to make sure the daemon is not session leader, | ||
182 | which prevents it from acquiring controlling terminal | ||
183 | """ | ||
184 | try: | ||
185 | pid = os.fork() | ||
186 | if pid > 0: #parent | ||
187 | os._exit(0) | ||
188 | except OSError as e: | ||
189 | raise Exception("%s [%d]" % (e.strerror, e.errno)) | ||
344 | 190 | ||
345 | def importone(self, version, pkgarch, checksum, value): | 191 | os.chdir("/") |
346 | return self.connection.importone(version, pkgarch, checksum, value) | ||
347 | 192 | ||
348 | def getinfo(self): | 193 | sys.stdout.flush() |
349 | return self.host, self.port | 194 | sys.stderr.flush() |
350 | 195 | ||
351 | def start_daemon(dbfile, host, port, logfile): | 196 | # We could be called from a python thread with io.StringIO as |
197 | # stdout/stderr or it could be 'real' unix fd forking where we need | ||
198 | # to physically close the fds to prevent the program launching us from | ||
199 | # potentially hanging on a pipe. Handle both cases. | ||
200 | si = open("/dev/null", "r") | ||
201 | try: | ||
202 | os.dup2(si.fileno(), sys.stdin.fileno()) | ||
203 | except (AttributeError, io.UnsupportedOperation): | ||
204 | sys.stdin = si | ||
205 | so = open(logfile, "a+") | ||
206 | try: | ||
207 | os.dup2(so.fileno(), sys.stdout.fileno()) | ||
208 | except (AttributeError, io.UnsupportedOperation): | ||
209 | sys.stdout = so | ||
210 | try: | ||
211 | os.dup2(so.fileno(), sys.stderr.fileno()) | ||
212 | except (AttributeError, io.UnsupportedOperation): | ||
213 | sys.stderr = so | ||
214 | |||
215 | # Clear out all log handlers prior to the fork() to avoid calling | ||
216 | # event handlers not part of the PRserver | ||
217 | for logger_iter in logging.Logger.manager.loggerDict.keys(): | ||
218 | logging.getLogger(logger_iter).handlers = [] | ||
219 | |||
220 | # Ensure logging makes it to the logfile | ||
221 | streamhandler = logging.StreamHandler() | ||
222 | streamhandler.setLevel(logging.DEBUG) | ||
223 | formatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") | ||
224 | streamhandler.setFormatter(formatter) | ||
225 | logger.addHandler(streamhandler) | ||
226 | |||
227 | # write pidfile | ||
228 | pid = str(os.getpid()) | ||
229 | with open(pidfile, "w") as pf: | ||
230 | pf.write("%s\n" % pid) | ||
231 | |||
232 | func() | ||
233 | os.remove(pidfile) | ||
234 | os._exit(0) | ||
235 | |||
236 | def start_daemon(dbfile, host, port, logfile, read_only=False): | ||
352 | ip = socket.gethostbyname(host) | 237 | ip = socket.gethostbyname(host) |
353 | pidfile = PIDPREFIX % (ip, port) | 238 | pidfile = PIDPREFIX % (ip, port) |
354 | try: | 239 | try: |
@@ -362,15 +247,13 @@ def start_daemon(dbfile, host, port, logfile): | |||
362 | % pidfile) | 247 | % pidfile) |
363 | return 1 | 248 | return 1 |
364 | 249 | ||
365 | server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (ip,port)) | 250 | dbfile = os.path.abspath(dbfile) |
366 | server.start() | 251 | def daemon_main(): |
252 | server = PRServer(dbfile, read_only=read_only) | ||
253 | server.start_tcp_server(ip, port) | ||
254 | server.serve_forever() | ||
367 | 255 | ||
368 | # Sometimes, the port (i.e. localhost:0) indicated by the user does not match with | 256 | run_as_daemon(daemon_main, pidfile, os.path.abspath(logfile)) |
369 | # the one the server actually is listening, so at least warn the user about it | ||
370 | _,rport = server.getinfo() | ||
371 | if port != rport: | ||
372 | sys.stdout.write("Server is listening at port %s instead of %s\n" | ||
373 | % (rport,port)) | ||
374 | return 0 | 257 | return 0 |
375 | 258 | ||
376 | def stop_daemon(host, port): | 259 | def stop_daemon(host, port): |
@@ -388,37 +271,28 @@ def stop_daemon(host, port): | |||
388 | # so at least advise the user which ports the corresponding server is listening | 271 | # so at least advise the user which ports the corresponding server is listening |
389 | ports = [] | 272 | ports = [] |
390 | portstr = "" | 273 | portstr = "" |
391 | for pf in glob.glob(PIDPREFIX % (ip,'*')): | 274 | for pf in glob.glob(PIDPREFIX % (ip, "*")): |
392 | bn = os.path.basename(pf) | 275 | bn = os.path.basename(pf) |
393 | root, _ = os.path.splitext(bn) | 276 | root, _ = os.path.splitext(bn) |
394 | ports.append(root.split('_')[-1]) | 277 | ports.append(root.split("_")[-1]) |
395 | if len(ports): | 278 | if len(ports): |
396 | portstr = "Wrong port? Other ports listening at %s: %s" % (host, ' '.join(ports)) | 279 | portstr = "Wrong port? Other ports listening at %s: %s" % (host, " ".join(ports)) |
397 | 280 | ||
398 | sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n" | 281 | sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n" |
399 | % (pidfile,portstr)) | 282 | % (pidfile, portstr)) |
400 | return 1 | 283 | return 1 |
401 | 284 | ||
402 | try: | 285 | try: |
403 | PRServerConnection(ip, port).terminate() | 286 | if is_running(pid): |
404 | except: | 287 | print("Sending SIGTERM to pr-server.") |
405 | logger.critical("Stop PRService %s:%d failed" % (host,port)) | 288 | os.kill(pid, signal.SIGTERM) |
406 | 289 | time.sleep(0.1) | |
407 | try: | ||
408 | if pid: | ||
409 | wait_timeout = 0 | ||
410 | print("Waiting for pr-server to exit.") | ||
411 | while is_running(pid) and wait_timeout < 50: | ||
412 | time.sleep(0.1) | ||
413 | wait_timeout += 1 | ||
414 | |||
415 | if is_running(pid): | ||
416 | print("Sending SIGTERM to pr-server.") | ||
417 | os.kill(pid,signal.SIGTERM) | ||
418 | time.sleep(0.1) | ||
419 | 290 | ||
420 | if os.path.exists(pidfile): | 291 | try: |
421 | os.remove(pidfile) | 292 | os.remove(pidfile) |
293 | except FileNotFoundError: | ||
294 | # The PID file might have been removed by the exiting process | ||
295 | pass | ||
422 | 296 | ||
423 | except OSError as e: | 297 | except OSError as e: |
424 | err = str(e) | 298 | err = str(e) |
@@ -436,7 +310,7 @@ def is_running(pid): | |||
436 | return True | 310 | return True |
437 | 311 | ||
438 | def is_local_special(host, port): | 312 | def is_local_special(host, port): |
439 | if host.strip().upper() == 'localhost'.upper() and (not port): | 313 | if (host == "localhost" or host == "127.0.0.1") and not port: |
440 | return True | 314 | return True |
441 | else: | 315 | else: |
442 | return False | 316 | return False |
@@ -447,7 +321,7 @@ class PRServiceConfigError(Exception): | |||
447 | def auto_start(d): | 321 | def auto_start(d): |
448 | global singleton | 322 | global singleton |
449 | 323 | ||
450 | host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':'))) | 324 | host_params = list(filter(None, (d.getVar("PRSERV_HOST") or "").split(":"))) |
451 | if not host_params: | 325 | if not host_params: |
452 | # Shutdown any existing PR Server | 326 | # Shutdown any existing PR Server |
453 | auto_shutdown() | 327 | auto_shutdown() |
@@ -456,11 +330,13 @@ def auto_start(d): | |||
456 | if len(host_params) != 2: | 330 | if len(host_params) != 2: |
457 | # Shutdown any existing PR Server | 331 | # Shutdown any existing PR Server |
458 | auto_shutdown() | 332 | auto_shutdown() |
459 | logger.critical('\n'.join(['PRSERV_HOST: incorrect format', | 333 | logger.critical("\n".join(["PRSERV_HOST: incorrect format", |
460 | 'Usage: PRSERV_HOST = "<hostname>:<port>"'])) | 334 | 'Usage: PRSERV_HOST = "<hostname>:<port>"'])) |
461 | raise PRServiceConfigError | 335 | raise PRServiceConfigError |
462 | 336 | ||
463 | if is_local_special(host_params[0], int(host_params[1])): | 337 | host = host_params[0].strip().lower() |
338 | port = int(host_params[1]) | ||
339 | if is_local_special(host, port): | ||
464 | import bb.utils | 340 | import bb.utils |
465 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) | 341 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) |
466 | if not cachedir: | 342 | if not cachedir: |
@@ -474,39 +350,43 @@ def auto_start(d): | |||
474 | auto_shutdown() | 350 | auto_shutdown() |
475 | if not singleton: | 351 | if not singleton: |
476 | bb.utils.mkdirhier(cachedir) | 352 | bb.utils.mkdirhier(cachedir) |
477 | singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), ("localhost",0)) | 353 | singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port) |
478 | singleton.start() | 354 | singleton.start() |
479 | if singleton: | 355 | if singleton: |
480 | host, port = singleton.getinfo() | 356 | host = singleton.host |
481 | else: | 357 | port = singleton.port |
482 | host = host_params[0] | ||
483 | port = int(host_params[1]) | ||
484 | 358 | ||
485 | try: | 359 | try: |
486 | connection = PRServerConnection(host,port) | 360 | ping(host, port) |
487 | connection.ping() | 361 | return str(host) + ":" + str(port) |
488 | realhost, realport = connection.getinfo() | 362 | |
489 | return str(realhost) + ":" + str(realport) | ||
490 | |||
491 | except Exception: | 363 | except Exception: |
492 | logger.critical("PRservice %s:%d not available" % (host, port)) | 364 | logger.critical("PRservice %s:%d not available" % (host, port)) |
493 | raise PRServiceConfigError | 365 | raise PRServiceConfigError |
494 | 366 | ||
495 | def auto_shutdown(): | 367 | def auto_shutdown(): |
496 | global singleton | 368 | global singleton |
497 | if singleton: | 369 | if singleton and singleton.process: |
498 | host, port = singleton.getinfo() | 370 | singleton.process.terminate() |
499 | try: | 371 | singleton.process.join() |
500 | PRServerConnection(host, port).terminate() | ||
501 | except: | ||
502 | logger.critical("Stop PRService %s:%d failed" % (host,port)) | ||
503 | |||
504 | try: | ||
505 | os.waitpid(singleton.prserv.pid, 0) | ||
506 | except ChildProcessError: | ||
507 | pass | ||
508 | singleton = None | 372 | singleton = None |
509 | 373 | ||
510 | def ping(host, port): | 374 | def ping(host, port): |
511 | conn=PRServerConnection(host, port) | 375 | from . import client |
512 | return conn.ping() | 376 | |
377 | with client.PRClient() as conn: | ||
378 | conn.connect_tcp(host, port) | ||
379 | return conn.ping() | ||
380 | |||
381 | def connect(host, port): | ||
382 | from . import client | ||
383 | |||
384 | global singleton | ||
385 | |||
386 | if host.strip().lower() == "localhost" and not port: | ||
387 | host = "localhost" | ||
388 | port = singleton.port | ||
389 | |||
390 | conn = client.PRClient() | ||
391 | conn.connect_tcp(host, port) | ||
392 | return conn | ||
diff --git a/bitbake/lib/pyinotify.py b/bitbake/lib/pyinotify.py index 6ae40a2d76..3c5dab0312 100644 --- a/bitbake/lib/pyinotify.py +++ b/bitbake/lib/pyinotify.py | |||
@@ -52,7 +52,6 @@ from collections import deque | |||
52 | from datetime import datetime, timedelta | 52 | from datetime import datetime, timedelta |
53 | import time | 53 | import time |
54 | import re | 54 | import re |
55 | import asyncore | ||
56 | import glob | 55 | import glob |
57 | import locale | 56 | import locale |
58 | import subprocess | 57 | import subprocess |
@@ -596,14 +595,24 @@ class _ProcessEvent: | |||
596 | @type event: Event object | 595 | @type event: Event object |
597 | @return: By convention when used from the ProcessEvent class: | 596 | @return: By convention when used from the ProcessEvent class: |
598 | - Returning False or None (default value) means keep on | 597 | - Returning False or None (default value) means keep on |
599 | executing next chained functors (see chain.py example). | 598 | executing next chained functors (see chain.py example). |
600 | - Returning True instead means do not execute next | 599 | - Returning True instead means do not execute next |
601 | processing functions. | 600 | processing functions. |
602 | @rtype: bool | 601 | @rtype: bool |
603 | @raise ProcessEventError: Event object undispatchable, | 602 | @raise ProcessEventError: Event object undispatchable, |
604 | unknown event. | 603 | unknown event. |
605 | """ | 604 | """ |
606 | stripped_mask = event.mask - (event.mask & IN_ISDIR) | 605 | stripped_mask = event.mask & ~IN_ISDIR |
606 | # Bitbake hack - we see event masks of 0x6, i.e., IN_MODIFY & IN_ATTRIB. | ||
607 | # The kernel inotify code can set more than one of the bits in the mask, | ||
608 | # fsnotify_change() in linux/fsnotify.h is quite clear that IN_ATTRIB, | ||
609 | # IN_MODIFY and IN_ACCESS can arrive together. | ||
610 | # This breaks the code below which assume only one mask bit is ever | ||
611 | # set in an event. We don't care about attrib or access in bitbake so | ||
612 | # drop those. | ||
613 | if stripped_mask & IN_MODIFY: | ||
614 | stripped_mask &= ~(IN_ATTRIB | IN_ACCESS) | ||
615 | |||
607 | maskname = EventsCodes.ALL_VALUES.get(stripped_mask) | 616 | maskname = EventsCodes.ALL_VALUES.get(stripped_mask) |
608 | if maskname is None: | 617 | if maskname is None: |
609 | raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask) | 618 | raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask) |
@@ -1475,35 +1484,6 @@ class ThreadedNotifier(threading.Thread, Notifier): | |||
1475 | self.loop() | 1484 | self.loop() |
1476 | 1485 | ||
1477 | 1486 | ||
1478 | class AsyncNotifier(asyncore.file_dispatcher, Notifier): | ||
1479 | """ | ||
1480 | This notifier inherits from asyncore.file_dispatcher in order to be able to | ||
1481 | use pyinotify along with the asyncore framework. | ||
1482 | |||
1483 | """ | ||
1484 | def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, | ||
1485 | threshold=0, timeout=None, channel_map=None): | ||
1486 | """ | ||
1487 | Initializes the async notifier. The only additional parameter is | ||
1488 | 'channel_map' which is the optional asyncore private map. See | ||
1489 | Notifier class for the meaning of the others parameters. | ||
1490 | |||
1491 | """ | ||
1492 | Notifier.__init__(self, watch_manager, default_proc_fun, read_freq, | ||
1493 | threshold, timeout) | ||
1494 | asyncore.file_dispatcher.__init__(self, self._fd, channel_map) | ||
1495 | |||
1496 | def handle_read(self): | ||
1497 | """ | ||
1498 | When asyncore tells us we can read from the fd, we proceed processing | ||
1499 | events. This method can be overridden for handling a notification | ||
1500 | differently. | ||
1501 | |||
1502 | """ | ||
1503 | self.read_events() | ||
1504 | self.process_events() | ||
1505 | |||
1506 | |||
1507 | class TornadoAsyncNotifier(Notifier): | 1487 | class TornadoAsyncNotifier(Notifier): |
1508 | """ | 1488 | """ |
1509 | Tornado ioloop adapter. | 1489 | Tornado ioloop adapter. |
diff --git a/bitbake/lib/toaster/bldcollector/urls.py b/bitbake/lib/toaster/bldcollector/urls.py index efd67a81a5..3c34070351 100644 --- a/bitbake/lib/toaster/bldcollector/urls.py +++ b/bitbake/lib/toaster/bldcollector/urls.py | |||
@@ -6,7 +6,7 @@ | |||
6 | # SPDX-License-Identifier: GPL-2.0-only | 6 | # SPDX-License-Identifier: GPL-2.0-only |
7 | # | 7 | # |
8 | 8 | ||
9 | from django.conf.urls import url | 9 | from django.urls import re_path as url |
10 | 10 | ||
11 | import bldcollector.views | 11 | import bldcollector.views |
12 | 12 | ||
diff --git a/bitbake/lib/toaster/bldcollector/views.py b/bitbake/lib/toaster/bldcollector/views.py index 04cd8b3dd4..bdf38ae6e8 100644 --- a/bitbake/lib/toaster/bldcollector/views.py +++ b/bitbake/lib/toaster/bldcollector/views.py | |||
@@ -14,8 +14,11 @@ import subprocess | |||
14 | import toastermain | 14 | import toastermain |
15 | from django.views.decorators.csrf import csrf_exempt | 15 | from django.views.decorators.csrf import csrf_exempt |
16 | 16 | ||
17 | from toastermain.logs import log_view_mixin | ||
18 | |||
17 | 19 | ||
18 | @csrf_exempt | 20 | @csrf_exempt |
21 | @log_view_mixin | ||
19 | def eventfile(request): | 22 | def eventfile(request): |
20 | """ Receives a file by POST, and runs toaster-eventreply on this file """ | 23 | """ Receives a file by POST, and runs toaster-eventreply on this file """ |
21 | if request.method != "POST": | 24 | if request.method != "POST": |
diff --git a/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py b/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py index 75674ccbf1..577e765f11 100644 --- a/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py +++ b/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py | |||
@@ -200,7 +200,7 @@ class LocalhostBEController(BuildEnvironmentController): | |||
200 | localdirpath = os.path.join(localdirname, dirpath) | 200 | localdirpath = os.path.join(localdirname, dirpath) |
201 | logger.debug("localhostbecontroller: localdirpath expects '%s'" % localdirpath) | 201 | logger.debug("localhostbecontroller: localdirpath expects '%s'" % localdirpath) |
202 | if not os.path.exists(localdirpath): | 202 | if not os.path.exists(localdirpath): |
203 | raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit)) | 203 | raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Exiting." % (localdirpath, giturl, commit)) |
204 | 204 | ||
205 | if name != "bitbake": | 205 | if name != "bitbake": |
206 | layerlist.append("%03d:%s" % (index,localdirpath.rstrip("/"))) | 206 | layerlist.append("%03d:%s" % (index,localdirpath.rstrip("/"))) |
@@ -467,7 +467,7 @@ class LocalhostBEController(BuildEnvironmentController): | |||
467 | logger.debug("localhostbecontroller: waiting for bblock content to appear") | 467 | logger.debug("localhostbecontroller: waiting for bblock content to appear") |
468 | time.sleep(1) | 468 | time.sleep(1) |
469 | else: | 469 | else: |
470 | raise BuildSetupException("Cannot find bitbake server lock file '%s'. Aborting." % bblock) | 470 | raise BuildSetupException("Cannot find bitbake server lock file '%s'. Exiting." % bblock) |
471 | 471 | ||
472 | with open(bblock) as fplock: | 472 | with open(bblock) as fplock: |
473 | for line in fplock: | 473 | for line in fplock: |
diff --git a/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py b/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py index 19f659ec41..834e32b36f 100644 --- a/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py +++ b/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py | |||
@@ -180,6 +180,77 @@ class Command(BaseCommand): | |||
180 | except Exception as e: | 180 | except Exception as e: |
181 | logger.warning("runbuilds: schedule exception %s" % str(e)) | 181 | logger.warning("runbuilds: schedule exception %s" % str(e)) |
182 | 182 | ||
183 | # Test to see if a build pre-maturely died due to a bitbake crash | ||
184 | def check_dead_builds(self): | ||
185 | do_cleanup = False | ||
186 | try: | ||
187 | for br in BuildRequest.objects.filter(state=BuildRequest.REQ_INPROGRESS): | ||
188 | # Get the build directory | ||
189 | if br.project.builddir: | ||
190 | builddir = br.project.builddir | ||
191 | else: | ||
192 | builddir = '%s-toaster-%d' % (br.environment.builddir,br.project.id) | ||
193 | # Check log to see if there is a recent traceback | ||
194 | toaster_ui_log = os.path.join(builddir, 'toaster_ui.log') | ||
195 | test_file = os.path.join(builddir, '._toaster_check.txt') | ||
196 | os.system("tail -n 50 %s > %s" % (os.path.join(builddir, 'toaster_ui.log'),test_file)) | ||
197 | traceback_text = '' | ||
198 | is_traceback = False | ||
199 | with open(test_file,'r') as test_file_fd: | ||
200 | test_file_tail = test_file_fd.readlines() | ||
201 | for line in test_file_tail: | ||
202 | if line.startswith('Traceback (most recent call last):'): | ||
203 | traceback_text = line | ||
204 | is_traceback = True | ||
205 | elif line.startswith('NOTE: ToasterUI waiting for events'): | ||
206 | # Ignore any traceback before new build start | ||
207 | traceback_text = '' | ||
208 | is_traceback = False | ||
209 | elif line.startswith('Note: Toaster traceback auto-stop'): | ||
210 | # Ignore any traceback before this previous traceback catch | ||
211 | traceback_text = '' | ||
212 | is_traceback = False | ||
213 | elif is_traceback: | ||
214 | traceback_text += line | ||
215 | # Test the results | ||
216 | is_stop = False | ||
217 | if is_traceback: | ||
218 | # Found a traceback | ||
219 | errtype = 'Bitbake crash' | ||
220 | errmsg = 'Bitbake crash\n' + traceback_text | ||
221 | state = BuildRequest.REQ_FAILED | ||
222 | # Clean up bitbake files | ||
223 | bitbake_lock = os.path.join(builddir, 'bitbake.lock') | ||
224 | if os.path.isfile(bitbake_lock): | ||
225 | os.remove(bitbake_lock) | ||
226 | bitbake_sock = os.path.join(builddir, 'bitbake.sock') | ||
227 | if os.path.isfile(bitbake_sock): | ||
228 | os.remove(bitbake_sock) | ||
229 | if os.path.isfile(test_file): | ||
230 | os.remove(test_file) | ||
231 | # Add note to ignore this traceback on next check | ||
232 | os.system('echo "Note: Toaster traceback auto-stop" >> %s' % toaster_ui_log) | ||
233 | is_stop = True | ||
234 | # Add more tests here | ||
235 | #elif ... | ||
236 | # Stop the build request? | ||
237 | if is_stop: | ||
238 | brerror = BRError( | ||
239 | req = br, | ||
240 | errtype = errtype, | ||
241 | errmsg = errmsg, | ||
242 | traceback = traceback_text, | ||
243 | ) | ||
244 | brerror.save() | ||
245 | br.state = state | ||
246 | br.save() | ||
247 | do_cleanup = True | ||
248 | # Do cleanup | ||
249 | if do_cleanup: | ||
250 | self.cleanup() | ||
251 | except Exception as e: | ||
252 | logger.error("runbuilds: Error in check_dead_builds %s" % e) | ||
253 | |||
183 | def handle(self, **options): | 254 | def handle(self, **options): |
184 | pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), | 255 | pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), |
185 | ".runbuilds.pid") | 256 | ".runbuilds.pid") |
@@ -187,10 +258,18 @@ class Command(BaseCommand): | |||
187 | with open(pidfile_path, 'w') as pidfile: | 258 | with open(pidfile_path, 'w') as pidfile: |
188 | pidfile.write("%s" % os.getpid()) | 259 | pidfile.write("%s" % os.getpid()) |
189 | 260 | ||
261 | # Clean up any stale/failed builds from previous Toaster run | ||
190 | self.runbuild() | 262 | self.runbuild() |
191 | 263 | ||
192 | signal.signal(signal.SIGUSR1, lambda sig, frame: None) | 264 | signal.signal(signal.SIGUSR1, lambda sig, frame: None) |
193 | 265 | ||
194 | while True: | 266 | while True: |
195 | signal.pause() | 267 | sigset = signal.sigtimedwait([signal.SIGUSR1], 5) |
196 | self.runbuild() | 268 | if sigset: |
269 | for sig in sigset: | ||
270 | # Consume each captured pending event | ||
271 | self.runbuild() | ||
272 | else: | ||
273 | # Check for build exceptions | ||
274 | self.check_dead_builds() | ||
275 | |||
diff --git a/bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py b/bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py new file mode 100644 index 0000000000..45b477d02c --- /dev/null +++ b/bitbake/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py | |||
@@ -0,0 +1,48 @@ | |||
1 | # Generated by Django 3.2.12 on 2022-03-06 03:28 | ||
2 | |||
3 | from django.db import migrations, models | ||
4 | |||
5 | |||
6 | class Migration(migrations.Migration): | ||
7 | |||
8 | dependencies = [ | ||
9 | ('bldcontrol', '0007_brlayers_optional_gitinfo'), | ||
10 | ] | ||
11 | |||
12 | operations = [ | ||
13 | migrations.AlterField( | ||
14 | model_name='brbitbake', | ||
15 | name='id', | ||
16 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
17 | ), | ||
18 | migrations.AlterField( | ||
19 | model_name='brerror', | ||
20 | name='id', | ||
21 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
22 | ), | ||
23 | migrations.AlterField( | ||
24 | model_name='brlayer', | ||
25 | name='id', | ||
26 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
27 | ), | ||
28 | migrations.AlterField( | ||
29 | model_name='brtarget', | ||
30 | name='id', | ||
31 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
32 | ), | ||
33 | migrations.AlterField( | ||
34 | model_name='brvariable', | ||
35 | name='id', | ||
36 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
37 | ), | ||
38 | migrations.AlterField( | ||
39 | model_name='buildenvironment', | ||
40 | name='id', | ||
41 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
42 | ), | ||
43 | migrations.AlterField( | ||
44 | model_name='buildrequest', | ||
45 | name='id', | ||
46 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
47 | ), | ||
48 | ] | ||
diff --git a/bitbake/lib/toaster/bldcontrol/models.py b/bitbake/lib/toaster/bldcontrol/models.py index c2f302da24..42750e7180 100644 --- a/bitbake/lib/toaster/bldcontrol/models.py +++ b/bitbake/lib/toaster/bldcontrol/models.py | |||
@@ -4,7 +4,7 @@ | |||
4 | 4 | ||
5 | from __future__ import unicode_literals | 5 | from __future__ import unicode_literals |
6 | from django.db import models | 6 | from django.db import models |
7 | from django.utils.encoding import force_text | 7 | from django.utils.encoding import force_str |
8 | from orm.models import Project, Build, Layer_Version | 8 | from orm.models import Project, Build, Layer_Version |
9 | 9 | ||
10 | import logging | 10 | import logging |
@@ -124,7 +124,7 @@ class BuildRequest(models.Model): | |||
124 | return self.brvariable_set.get(name="MACHINE").value | 124 | return self.brvariable_set.get(name="MACHINE").value |
125 | 125 | ||
126 | def __str__(self): | 126 | def __str__(self): |
127 | return force_text('%s %s' % (self.project, self.get_state_display())) | 127 | return force_str('%s %s' % (self.project, self.get_state_display())) |
128 | 128 | ||
129 | # These tables specify the settings for running an actual build. | 129 | # These tables specify the settings for running an actual build. |
130 | # They MUST be kept in sync with the tables in orm.models.Project* | 130 | # They MUST be kept in sync with the tables in orm.models.Project* |
diff --git a/bitbake/lib/toaster/logs/.gitignore b/bitbake/lib/toaster/logs/.gitignore new file mode 100644 index 0000000000..e5ebf25a49 --- /dev/null +++ b/bitbake/lib/toaster/logs/.gitignore | |||
@@ -0,0 +1 @@ | |||
*.log* | |||
diff --git a/bitbake/lib/toaster/manage.py b/bitbake/lib/toaster/manage.py index ae32619d12..f8de49c264 100755 --- a/bitbake/lib/toaster/manage.py +++ b/bitbake/lib/toaster/manage.py | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/usr/bin/env python3 | 1 | #!/usr/bin/env python3 |
2 | # | 2 | # |
3 | # Copyright BitBake Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | 7 | ||
diff --git a/bitbake/lib/toaster/orm/fixtures/README b/bitbake/lib/toaster/orm/fixtures/README index 1b1c660aac..7cd745e26b 100644 --- a/bitbake/lib/toaster/orm/fixtures/README +++ b/bitbake/lib/toaster/orm/fixtures/README | |||
@@ -27,4 +27,4 @@ Data can be provided in XML, JSON and if installed YAML formats. | |||
27 | 27 | ||
28 | Use the django management command manage.py loaddata <your fixture file> | 28 | Use the django management command manage.py loaddata <your fixture file> |
29 | For further information see the Django command documentation at: | 29 | For further information see the Django command documentation at: |
30 | https://docs.djangoproject.com/en/1.8/ref/django-admin/#django-admin-loaddata | 30 | https://docs.djangoproject.com/en/3.2/ref/django-admin/#django-admin-loaddata |
diff --git a/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py new file mode 100755 index 0000000000..71afe3914e --- /dev/null +++ b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py | |||
@@ -0,0 +1,447 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | # | ||
5 | # Generate Toaster Fixtures for 'poky.xml' and 'oe-core.xml' | ||
6 | # | ||
7 | # Copyright (C) 2022 Wind River Systems | ||
8 | # SPDX-License-Identifier: GPL-2.0-only | ||
9 | # | ||
10 | # Edit the 'current_releases' table for each new release cycle | ||
11 | # | ||
12 | # Usage: ./get_fixtures all | ||
13 | # | ||
14 | |||
15 | import os | ||
16 | import sys | ||
17 | import argparse | ||
18 | |||
19 | verbose = False | ||
20 | |||
21 | #################################### | ||
22 | # Releases | ||
23 | # | ||
24 | # https://wiki.yoctoproject.org/wiki/Releases | ||
25 | # | ||
26 | # NOTE: for the current releases table, it helps to keep continuing releases | ||
27 | # in the same table positions since this minimizes the patch diff for review. | ||
28 | # The order of the table does not matter since Toaster presents them sorted. | ||
29 | # | ||
30 | # Traditionally, the two most current releases are included in addition to the | ||
31 | # 'master' branch and the local installation's 'HEAD'. | ||
32 | # It is also policy to include all active LTS releases. | ||
33 | # | ||
34 | |||
35 | # [Codename, Yocto Project Version, Release Date, Current Version, Support Level, Poky Version, BitBake branch] | ||
36 | current_releases = [ | ||
37 | # Release slot #1 | ||
38 | ['Kirkstone','4.0','April 2022','4.0.8 (March 2023)','Stable - Long Term Support (until Apr. 2024)','','2.0'], | ||
39 | # Release slot #2 'local' | ||
40 | ['HEAD','HEAD','','Local Yocto Project','HEAD','','HEAD'], | ||
41 | # Release slot #3 'master' | ||
42 | ['Master','master','','Yocto Project master','master','','master'], | ||
43 | # Release slot #4 | ||
44 | ['Mickledore','4.2','April 2023','4.2.0 (April 2023)','Support for 7 months (until October 2023)','','2.4'], | ||
45 | # ['Langdale','4.1','October 2022','4.1.2 (January 2023)','Support for 7 months (until May 2023)','','2.2'], | ||
46 | # ['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'], | ||
47 | # ['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'], | ||
48 | # ['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'], | ||
49 | # Optional Release slot #5 | ||
50 | ['Dunfell','3.1','April 2020','3.1.23 (February 2023)','Stable - Long Term Support (until Apr. 2024)','23.0','1.46'], | ||
51 | ] | ||
52 | |||
53 | default_poky_layers = [ | ||
54 | 'openembedded-core', | ||
55 | 'meta-poky', | ||
56 | 'meta-yocto-bsp', | ||
57 | ] | ||
58 | |||
59 | default_oe_core_layers = [ | ||
60 | 'openembedded-core', | ||
61 | ] | ||
62 | |||
63 | #################################### | ||
64 | # Templates | ||
65 | |||
66 | prolog_template = '''\ | ||
67 | <?xml version="1.0" encoding="utf-8"?> | ||
68 | <django-objects version="1.0"> | ||
69 | <!-- Set the project default value for DISTRO --> | ||
70 | <object model="orm.toastersetting" pk="1"> | ||
71 | <field type="CharField" name="name">DEFCONF_DISTRO</field> | ||
72 | <field type="CharField" name="value">{{distro}}</field> | ||
73 | </object> | ||
74 | ''' | ||
75 | |||
76 | #<!-- Bitbake versions which correspond to the metadata release -->') | ||
77 | bitbakeversion_poky_template = '''\ | ||
78 | <object model="orm.bitbakeversion" pk="{{bitbake_id}}"> | ||
79 | <field type="CharField" name="name">{{name}}</field> | ||
80 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | ||
81 | <field type="CharField" name="branch">{{branch}}</field> | ||
82 | <field type="CharField" name="dirpath">bitbake</field> | ||
83 | </object> | ||
84 | ''' | ||
85 | bitbakeversion_oecore_template = '''\ | ||
86 | <object model="orm.bitbakeversion" pk="{{bitbake_id}}"> | ||
87 | <field type="CharField" name="name">{{name}}</field> | ||
88 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | ||
89 | <field type="CharField" name="branch">{{bitbakeversion}}</field> | ||
90 | </object> | ||
91 | ''' | ||
92 | |||
93 | # <!-- Releases available --> | ||
94 | releases_available_template = '''\ | ||
95 | <object model="orm.release" pk="{{ra_count}}"> | ||
96 | <field type="CharField" name="name">{{name}}</field> | ||
97 | <field type="CharField" name="description">{{description}}</field> | ||
98 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">{{ra_count}}</field> | ||
99 | <field type="CharField" name="branch_name">{{release}}</field> | ||
100 | <field type="TextField" name="helptext">Toaster will run your builds {{help_source}}.</field> | ||
101 | </object> | ||
102 | ''' | ||
103 | |||
104 | # <!-- Default project layers for each release --> | ||
105 | default_layers_template = '''\ | ||
106 | <object model="orm.releasedefaultlayer" pk="{{rdl_count}}"> | ||
107 | <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field> | ||
108 | <field type="CharField" name="layer_name">{{layer}}</field> | ||
109 | </object> | ||
110 | ''' | ||
111 | |||
112 | default_layers_preface = '''\ | ||
113 | <!-- Default layers provided by poky | ||
114 | openembedded-core | ||
115 | meta-poky | ||
116 | meta-yocto-bsp | ||
117 | --> | ||
118 | ''' | ||
119 | |||
120 | layer_poky_template = '''\ | ||
121 | <object model="orm.layer" pk="{{layer_id}}"> | ||
122 | <field type="CharField" name="name">{{layer}}</field> | ||
123 | <field type="CharField" name="layer_index_url"></field> | ||
124 | <field type="CharField" name="vcs_url">{{vcs_url}}</field> | ||
125 | <field type="CharField" name="vcs_web_url">{{vcs_web_url}}</field> | ||
126 | <field type="CharField" name="vcs_web_tree_base_url">{{vcs_web_tree_base_url}}</field> | ||
127 | <field type="CharField" name="vcs_web_file_base_url">{{vcs_web_file_base_url}}</field> | ||
128 | </object> | ||
129 | ''' | ||
130 | |||
131 | layer_oe_core_template = '''\ | ||
132 | <object model="orm.layer" pk="{{layer_id}}"> | ||
133 | <field type="CharField" name="name">{{layer}}</field> | ||
134 | <field type="CharField" name="vcs_url">{{vcs_url}}</field> | ||
135 | <field type="CharField" name="vcs_web_url">{{vcs_web_url}}</field> | ||
136 | <field type="CharField" name="vcs_web_tree_base_url">{{vcs_web_tree_base_url}}</field> | ||
137 | <field type="CharField" name="vcs_web_file_base_url">{{vcs_web_file_base_url}}</field> | ||
138 | </object> | ||
139 | ''' | ||
140 | |||
141 | layer_version_template = '''\ | ||
142 | <object model="orm.layer_version" pk="{{lv_count}}"> | ||
143 | <field rel="ManyToOneRel" to="orm.layer" name="layer">{{layer_id}}</field> | ||
144 | <field type="IntegerField" name="layer_source">0</field> | ||
145 | <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field> | ||
146 | <field type="CharField" name="branch">{{branch}}</field> | ||
147 | <field type="CharField" name="dirpath">{{dirpath}}</field> | ||
148 | </object> | ||
149 | ''' | ||
150 | |||
151 | layer_version_HEAD_template = '''\ | ||
152 | <object model="orm.layer_version" pk="{{lv_count}}"> | ||
153 | <field rel="ManyToOneRel" to="orm.layer" name="layer">{{layer_id}}</field> | ||
154 | <field type="IntegerField" name="layer_source">0</field> | ||
155 | <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field> | ||
156 | <field type="CharField" name="branch">{{branch}}</field> | ||
157 | <field type="CharField" name="commit">{{commit}}</field> | ||
158 | <field type="CharField" name="dirpath">{{dirpath}}</field> | ||
159 | </object> | ||
160 | ''' | ||
161 | |||
162 | layer_version_oe_core_template = '''\ | ||
163 | <object model="orm.layer_version" pk="1"> | ||
164 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | ||
165 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> | ||
166 | <field type="CharField" name="local_path">OE-CORE-LAYER-DIR</field> | ||
167 | <field type="CharField" name="branch">HEAD</field> | ||
168 | <field type="CharField" name="dirpath">meta</field> | ||
169 | <field type="IntegerField" name="layer_source">0</field> | ||
170 | </object> | ||
171 | ''' | ||
172 | |||
173 | epilog_template = '''\ | ||
174 | </django-objects> | ||
175 | ''' | ||
176 | |||
177 | ################################# | ||
178 | # Helper Routines | ||
179 | # | ||
180 | |||
181 | def print_str(str,fd): | ||
182 | # Avoid extra newline at end | ||
183 | if str and (str[-1] == '\n'): | ||
184 | str = str[0:-1] | ||
185 | print(str,file=fd) | ||
186 | |||
187 | def print_template(template,params,fd): | ||
188 | for line in template.split('\n'): | ||
189 | p = line.find('{{') | ||
190 | while p > 0: | ||
191 | q = line.find('}}') | ||
192 | key = line[p+2:q] | ||
193 | if key in params: | ||
194 | line = line[0:p] + params[key] + line[q+2:] | ||
195 | else: | ||
196 | line = line[0:p] + '?' + key + '?' + line[q+2:] | ||
197 | p = line.find('{{') | ||
198 | if line: | ||
199 | print(line,file=fd) | ||
200 | |||
201 | ################################# | ||
202 | # Generate poky.xml | ||
203 | # | ||
204 | |||
205 | def generate_poky(): | ||
206 | fd = open('poky.xml','w') | ||
207 | |||
208 | params = {} | ||
209 | params['distro'] = 'poky' | ||
210 | print_template(prolog_template,params,fd) | ||
211 | print_str('',fd) | ||
212 | |||
213 | print_str(' <!-- Bitbake versions which correspond to the metadata release -->',fd) | ||
214 | for i,release in enumerate(current_releases): | ||
215 | params = {} | ||
216 | params['release'] = release[0] | ||
217 | params['Release'] = release[0] | ||
218 | params['release_version'] = release[1] | ||
219 | if not (params['release'] in ('HEAD')): # 'master', | ||
220 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
221 | params['name'] = params['release'] | ||
222 | params['bitbake_id'] = str(i+1) | ||
223 | params['branch'] = params['release'] | ||
224 | print_template(bitbakeversion_poky_template,params,fd) | ||
225 | print_str('',fd) | ||
226 | |||
227 | print_str('',fd) | ||
228 | print_str(' <!-- Releases available -->',fd) | ||
229 | for i,release in enumerate(current_releases): | ||
230 | params = {} | ||
231 | params['release'] = release[0] | ||
232 | params['Release'] = release[0] | ||
233 | params['release_version'] = release[1] | ||
234 | if not (params['release'] in ('HEAD')): #'master', | ||
235 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
236 | params['h_release'] = '?h={{release}}' | ||
237 | params['name'] = params['release'] | ||
238 | params['ra_count'] = str(i+1) | ||
239 | params['branch'] = params['release'] | ||
240 | |||
241 | if 'HEAD' == params['release']: | ||
242 | params['help_source'] = 'with the version of the Yocto Project you have cloned or downloaded to your computer' | ||
243 | params['description'] = 'Local Yocto Project' | ||
244 | params['name'] = 'local' | ||
245 | else: | ||
246 | params['help_source'] = 'using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/{{h_release}}">Yocto Project {{Release}} branch</a>' | ||
247 | params['description'] = 'Yocto Project {{release_version}} "{{Release}}"' | ||
248 | if 'master' == params['release']: | ||
249 | params['h_release'] = '' | ||
250 | params['description'] = 'Yocto Project master' | ||
251 | |||
252 | print_template(releases_available_template,params,fd) | ||
253 | print_str('',fd) | ||
254 | |||
255 | print_str(' <!-- Default project layers for each release -->',fd) | ||
256 | rdl_count = 1 | ||
257 | for i,release in enumerate(current_releases): | ||
258 | for j,layer in enumerate(default_poky_layers): | ||
259 | params = {} | ||
260 | params['layer'] = layer | ||
261 | params['release'] = release[0] | ||
262 | params['Release'] = release[0] | ||
263 | params['release_version'] = release[1] | ||
264 | if not (params['release'] in ('master','HEAD')): | ||
265 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
266 | params['release_id'] = str(i+1) | ||
267 | params['rdl_count'] = str(rdl_count) | ||
268 | params['branch'] = params['release'] | ||
269 | print_template(default_layers_template,params,fd) | ||
270 | rdl_count += 1 | ||
271 | print_str('',fd) | ||
272 | |||
273 | print_str(default_layers_preface,fd) | ||
274 | lv_count = 1 | ||
275 | for i,layer in enumerate(default_poky_layers): | ||
276 | params = {} | ||
277 | params['layer'] = layer | ||
278 | params['layer_id'] = str(i+1) | ||
279 | params['vcs_url'] = 'git://git.yoctoproject.org/poky' | ||
280 | params['vcs_web_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky' | ||
281 | params['vcs_web_tree_base_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%' | ||
282 | params['vcs_web_file_base_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%' | ||
283 | |||
284 | if i: | ||
285 | print_str('',fd) | ||
286 | print_template(layer_poky_template,params,fd) | ||
287 | for j,release in enumerate(current_releases): | ||
288 | params['release'] = release[0] | ||
289 | params['Release'] = release[0] | ||
290 | params['release_version'] = release[1] | ||
291 | if not (params['release'] in ('master','HEAD')): | ||
292 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
293 | params['release_id'] = str(j+1) | ||
294 | params['lv_count'] = str(lv_count) | ||
295 | params['branch'] = params['release'] | ||
296 | params['commit'] = params['release'] | ||
297 | |||
298 | params['dirpath'] = params['layer'] | ||
299 | if params['layer'] in ('openembedded-core'): #'openembedded-core', | ||
300 | params['dirpath'] = 'meta' | ||
301 | |||
302 | if 'HEAD' == params['release']: | ||
303 | print_template(layer_version_HEAD_template,params,fd) | ||
304 | else: | ||
305 | print_template(layer_version_template,params,fd) | ||
306 | lv_count += 1 | ||
307 | |||
308 | print_str(epilog_template,fd) | ||
309 | fd.close() | ||
310 | |||
311 | ################################# | ||
312 | # Generate oe-core.xml | ||
313 | # | ||
314 | |||
315 | def generate_oe_core(): | ||
316 | fd = open('oe-core.xml','w') | ||
317 | |||
318 | params = {} | ||
319 | params['distro'] = 'nodistro' | ||
320 | print_template(prolog_template,params,fd) | ||
321 | print_str('',fd) | ||
322 | |||
323 | print_str(' <!-- Bitbake versions which correspond to the metadata release -->',fd) | ||
324 | for i,release in enumerate(current_releases): | ||
325 | params = {} | ||
326 | params['release'] = release[0] | ||
327 | params['Release'] = release[0] | ||
328 | params['bitbakeversion'] = release[6] | ||
329 | params['release_version'] = release[1] | ||
330 | if not (params['release'] in ('HEAD')): # 'master', | ||
331 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
332 | params['name'] = params['release'] | ||
333 | params['bitbake_id'] = str(i+1) | ||
334 | params['branch'] = params['release'] | ||
335 | print_template(bitbakeversion_oecore_template,params,fd) | ||
336 | print_str('',fd) | ||
337 | |||
338 | print_str(' <!-- Releases available -->',fd) | ||
339 | for i,release in enumerate(current_releases): | ||
340 | params = {} | ||
341 | params['release'] = release[0] | ||
342 | params['Release'] = release[0] | ||
343 | params['release_version'] = release[1] | ||
344 | if not (params['release'] in ('HEAD')): #'master', | ||
345 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
346 | params['h_release'] = '?h={{release}}' | ||
347 | params['name'] = params['release'] | ||
348 | params['ra_count'] = str(i+1) | ||
349 | params['branch'] = params['release'] | ||
350 | |||
351 | if 'HEAD' == params['release']: | ||
352 | params['help_source'] = 'with the version of OpenEmbedded that you have cloned or downloaded to your computer' | ||
353 | params['description'] = 'Local Openembedded' | ||
354 | params['name'] = 'local' | ||
355 | else: | ||
356 | params['help_source'] = 'using the tip of the <a href=\\"https://cgit.openembedded.org/openembedded-core/log/{{h_release}}\\">OpenEmbedded {{Release}}</a> branch' | ||
357 | params['description'] = 'Openembedded {{Release}}' | ||
358 | if 'master' == params['release']: | ||
359 | params['h_release'] = '' | ||
360 | params['description'] = 'OpenEmbedded core master' | ||
361 | params['Release'] = params['release'] | ||
362 | |||
363 | print_template(releases_available_template,params,fd) | ||
364 | print_str('',fd) | ||
365 | |||
366 | print_str(' <!-- Default layers for each release -->',fd) | ||
367 | rdl_count = 1 | ||
368 | for i,release in enumerate(current_releases): | ||
369 | for j,layer in enumerate(default_oe_core_layers): | ||
370 | params = {} | ||
371 | params['layer'] = layer | ||
372 | params['release'] = release[0] | ||
373 | params['Release'] = release[0] | ||
374 | params['release_version'] = release[1] | ||
375 | if not (params['release'] in ('master','HEAD')): | ||
376 | params['release'] = params['release'][0].lower() + params['release'][1:] | ||
377 | params['release_id'] = str(i+1) | ||
378 | params['rdl_count'] = str(rdl_count) | ||
379 | params['branch'] = params['release'] | ||
380 | print_template(default_layers_template,params,fd) | ||
381 | rdl_count += 1 | ||
382 | print_str('',fd) | ||
383 | |||
384 | print_str('',fd) | ||
385 | print_str(' <!-- Layer for the Local release -->',fd) | ||
386 | lv_count = 1 | ||
387 | for i,layer in enumerate(default_oe_core_layers): | ||
388 | params = {} | ||
389 | params['layer'] = layer | ||
390 | params['layer_id'] = str(i+1) | ||
391 | params['vcs_url'] = 'git://git.openembedded.org/openembedded-core' | ||
392 | params['vcs_web_url'] = 'https://cgit.openembedded.org/openembedded-core' | ||
393 | params['vcs_web_tree_base_url'] = 'https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%' | ||
394 | params['vcs_web_file_base_url'] = 'https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%' | ||
395 | if i: | ||
396 | print_str('',fd) | ||
397 | print_template(layer_oe_core_template,params,fd) | ||
398 | |||
399 | print_template(layer_version_oe_core_template,params,fd) | ||
400 | print_str('',fd) | ||
401 | |||
402 | print_str(epilog_template,fd) | ||
403 | fd.close() | ||
404 | |||
405 | ################################# | ||
406 | # Help | ||
407 | # | ||
408 | |||
409 | def list_releases(): | ||
410 | print("Release ReleaseVer BitbakeVer Support Level") | ||
411 | print("========== =========== ========== ==============================================") | ||
412 | for release in current_releases: | ||
413 | print("%10s %10s %11s %s" % (release[0],release[1],release[6],release[4])) | ||
414 | |||
415 | ################################# | ||
416 | # main | ||
417 | # | ||
418 | |||
419 | def main(argv): | ||
420 | global verbose | ||
421 | |||
422 | parser = argparse.ArgumentParser(description='gen_fixtures.py: table generate the fixture files') | ||
423 | parser.add_argument('--poky', '-p', action='store_const', const='poky', dest='command', help='Generate the poky.xml file') | ||
424 | parser.add_argument('--oe-core', '-o', action='store_const', const='oe_core', dest='command', help='Generate the oe-core.xml file') | ||
425 | parser.add_argument('--all', '-a', action='store_const', const='all', dest='command', help='Generate all fixture files') | ||
426 | parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List the release table') | ||
427 | parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Enable verbose debugging output') | ||
428 | args = parser.parse_args() | ||
429 | |||
430 | verbose = args.verbose | ||
431 | if 'poky' == args.command: | ||
432 | generate_poky() | ||
433 | elif 'oe_core' == args.command: | ||
434 | generate_oe_core() | ||
435 | elif 'all' == args.command: | ||
436 | generate_poky() | ||
437 | generate_oe_core() | ||
438 | elif 'all' == args.command: | ||
439 | list_releases() | ||
440 | elif 'list' == args.command: | ||
441 | list_releases() | ||
442 | |||
443 | else: | ||
444 | print("No command for 'gen_fixtures.py' selected") | ||
445 | |||
446 | if __name__ == '__main__': | ||
447 | main(sys.argv[1:]) | ||
diff --git a/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/bitbake/lib/toaster/orm/fixtures/oe-core.xml index 026d94869a..950f2a98af 100644 --- a/bitbake/lib/toaster/orm/fixtures/oe-core.xml +++ b/bitbake/lib/toaster/orm/fixtures/oe-core.xml | |||
@@ -8,9 +8,9 @@ | |||
8 | 8 | ||
9 | <!-- Bitbake versions which correspond to the metadata release --> | 9 | <!-- Bitbake versions which correspond to the metadata release --> |
10 | <object model="orm.bitbakeversion" pk="1"> | 10 | <object model="orm.bitbakeversion" pk="1"> |
11 | <field type="CharField" name="name">dunfell</field> | 11 | <field type="CharField" name="name">kirkstone</field> |
12 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | 12 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> |
13 | <field type="CharField" name="branch">1.46</field> | 13 | <field type="CharField" name="branch">2.0</field> |
14 | </object> | 14 | </object> |
15 | <object model="orm.bitbakeversion" pk="2"> | 15 | <object model="orm.bitbakeversion" pk="2"> |
16 | <field type="CharField" name="name">HEAD</field> | 16 | <field type="CharField" name="name">HEAD</field> |
@@ -23,18 +23,23 @@ | |||
23 | <field type="CharField" name="branch">master</field> | 23 | <field type="CharField" name="branch">master</field> |
24 | </object> | 24 | </object> |
25 | <object model="orm.bitbakeversion" pk="4"> | 25 | <object model="orm.bitbakeversion" pk="4"> |
26 | <field type="CharField" name="name">gatesgarth</field> | 26 | <field type="CharField" name="name">mickledore</field> |
27 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | ||
28 | <field type="CharField" name="branch">2.4</field> | ||
29 | </object> | ||
30 | <object model="orm.bitbakeversion" pk="5"> | ||
31 | <field type="CharField" name="name">dunfell</field> | ||
27 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | 32 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> |
28 | <field type="CharField" name="branch">1.48</field> | 33 | <field type="CharField" name="branch">1.46</field> |
29 | </object> | 34 | </object> |
30 | 35 | ||
31 | <!-- Releases available --> | 36 | <!-- Releases available --> |
32 | <object model="orm.release" pk="1"> | 37 | <object model="orm.release" pk="1"> |
33 | <field type="CharField" name="name">dunfell</field> | 38 | <field type="CharField" name="name">kirkstone</field> |
34 | <field type="CharField" name="description">Openembedded Dunfell</field> | 39 | <field type="CharField" name="description">Openembedded Kirkstone</field> |
35 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> | 40 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> |
36 | <field type="CharField" name="branch_name">dunfell</field> | 41 | <field type="CharField" name="branch_name">kirkstone</field> |
37 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=dunfell\">OpenEmbedded Dunfell</a> branch.</field> | 42 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=kirkstone\">OpenEmbedded Kirkstone</a> branch.</field> |
38 | </object> | 43 | </object> |
39 | <object model="orm.release" pk="2"> | 44 | <object model="orm.release" pk="2"> |
40 | <field type="CharField" name="name">local</field> | 45 | <field type="CharField" name="name">local</field> |
@@ -48,14 +53,21 @@ | |||
48 | <field type="CharField" name="description">OpenEmbedded core master</field> | 53 | <field type="CharField" name="description">OpenEmbedded core master</field> |
49 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> | 54 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> |
50 | <field type="CharField" name="branch_name">master</field> | 55 | <field type="CharField" name="branch_name">master</field> |
51 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch.</field> | 56 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch.</field> |
52 | </object> | 57 | </object> |
53 | <object model="orm.release" pk="4"> | 58 | <object model="orm.release" pk="4"> |
54 | <field type="CharField" name="name">gatesgarth</field> | 59 | <field type="CharField" name="name">mickledore</field> |
55 | <field type="CharField" name="description">Openembedded Gatesgarth</field> | 60 | <field type="CharField" name="description">Openembedded Mickledore</field> |
56 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> | 61 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> |
57 | <field type="CharField" name="branch_name">gatesgarth</field> | 62 | <field type="CharField" name="branch_name">mickledore</field> |
58 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\">OpenEmbedded Gatesgarth</a> branch.</field> | 63 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=mickledore\">OpenEmbedded Mickledore</a> branch.</field> |
64 | </object> | ||
65 | <object model="orm.release" pk="5"> | ||
66 | <field type="CharField" name="name">dunfell</field> | ||
67 | <field type="CharField" name="description">Openembedded Dunfell</field> | ||
68 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field> | ||
69 | <field type="CharField" name="branch_name">dunfell</field> | ||
70 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=dunfell\">OpenEmbedded Dunfell</a> branch.</field> | ||
59 | </object> | 71 | </object> |
60 | 72 | ||
61 | <!-- Default layers for each release --> | 73 | <!-- Default layers for each release --> |
@@ -75,15 +87,19 @@ | |||
75 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 87 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
76 | <field type="CharField" name="layer_name">openembedded-core</field> | 88 | <field type="CharField" name="layer_name">openembedded-core</field> |
77 | </object> | 89 | </object> |
90 | <object model="orm.releasedefaultlayer" pk="5"> | ||
91 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
92 | <field type="CharField" name="layer_name">openembedded-core</field> | ||
93 | </object> | ||
78 | 94 | ||
79 | 95 | ||
80 | <!-- Layer for the Local release --> | 96 | <!-- Layer for the Local release --> |
81 | <object model="orm.layer" pk="1"> | 97 | <object model="orm.layer" pk="1"> |
82 | <field type="CharField" name="name">openembedded-core</field> | 98 | <field type="CharField" name="name">openembedded-core</field> |
83 | <field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field> | 99 | <field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field> |
84 | <field type="CharField" name="vcs_web_url">http://cgit.openembedded.org/openembedded-core</field> | 100 | <field type="CharField" name="vcs_web_url">https://cgit.openembedded.org/openembedded-core</field> |
85 | <field type="CharField" name="vcs_web_tree_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> | 101 | <field type="CharField" name="vcs_web_tree_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> |
86 | <field type="CharField" name="vcs_web_file_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> | 102 | <field type="CharField" name="vcs_web_file_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> |
87 | </object> | 103 | </object> |
88 | <object model="orm.layer_version" pk="1"> | 104 | <object model="orm.layer_version" pk="1"> |
89 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | 105 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> |
diff --git a/bitbake/lib/toaster/orm/fixtures/poky.xml b/bitbake/lib/toaster/orm/fixtures/poky.xml index a468a54c49..121e52fd45 100644 --- a/bitbake/lib/toaster/orm/fixtures/poky.xml +++ b/bitbake/lib/toaster/orm/fixtures/poky.xml | |||
@@ -8,9 +8,9 @@ | |||
8 | 8 | ||
9 | <!-- Bitbake versions which correspond to the metadata release --> | 9 | <!-- Bitbake versions which correspond to the metadata release --> |
10 | <object model="orm.bitbakeversion" pk="1"> | 10 | <object model="orm.bitbakeversion" pk="1"> |
11 | <field type="CharField" name="name">dunfell</field> | 11 | <field type="CharField" name="name">kirkstone</field> |
12 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | 12 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> |
13 | <field type="CharField" name="branch">dunfell</field> | 13 | <field type="CharField" name="branch">kirkstone</field> |
14 | <field type="CharField" name="dirpath">bitbake</field> | 14 | <field type="CharField" name="dirpath">bitbake</field> |
15 | </object> | 15 | </object> |
16 | <object model="orm.bitbakeversion" pk="2"> | 16 | <object model="orm.bitbakeversion" pk="2"> |
@@ -26,20 +26,26 @@ | |||
26 | <field type="CharField" name="dirpath">bitbake</field> | 26 | <field type="CharField" name="dirpath">bitbake</field> |
27 | </object> | 27 | </object> |
28 | <object model="orm.bitbakeversion" pk="4"> | 28 | <object model="orm.bitbakeversion" pk="4"> |
29 | <field type="CharField" name="name">gatesgarth</field> | 29 | <field type="CharField" name="name">mickledore</field> |
30 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | 30 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> |
31 | <field type="CharField" name="branch">gatesgarth</field> | 31 | <field type="CharField" name="branch">mickledore</field> |
32 | <field type="CharField" name="dirpath">bitbake</field> | ||
33 | </object> | ||
34 | <object model="orm.bitbakeversion" pk="5"> | ||
35 | <field type="CharField" name="name">dunfell</field> | ||
36 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | ||
37 | <field type="CharField" name="branch">dunfell</field> | ||
32 | <field type="CharField" name="dirpath">bitbake</field> | 38 | <field type="CharField" name="dirpath">bitbake</field> |
33 | </object> | 39 | </object> |
34 | 40 | ||
35 | 41 | ||
36 | <!-- Releases available --> | 42 | <!-- Releases available --> |
37 | <object model="orm.release" pk="1"> | 43 | <object model="orm.release" pk="1"> |
38 | <field type="CharField" name="name">dunfell</field> | 44 | <field type="CharField" name="name">kirkstone</field> |
39 | <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field> | 45 | <field type="CharField" name="description">Yocto Project 4.0 "Kirkstone"</field> |
40 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> | 46 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> |
41 | <field type="CharField" name="branch_name">dunfell</field> | 47 | <field type="CharField" name="branch_name">kirkstone</field> |
42 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell">Yocto Project Dunfell branch</a>.</field> | 48 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=kirkstone">Yocto Project Kirkstone branch</a>.</field> |
43 | </object> | 49 | </object> |
44 | <object model="orm.release" pk="2"> | 50 | <object model="orm.release" pk="2"> |
45 | <field type="CharField" name="name">local</field> | 51 | <field type="CharField" name="name">local</field> |
@@ -53,14 +59,21 @@ | |||
53 | <field type="CharField" name="description">Yocto Project master</field> | 59 | <field type="CharField" name="description">Yocto Project master</field> |
54 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> | 60 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> |
55 | <field type="CharField" name="branch_name">master</field> | 61 | <field type="CharField" name="branch_name">master</field> |
56 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>.</field> | 62 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>.</field> |
57 | </object> | 63 | </object> |
58 | <object model="orm.release" pk="4"> | 64 | <object model="orm.release" pk="4"> |
59 | <field type="CharField" name="name">gatesgarth</field> | 65 | <field type="CharField" name="name">mickledore</field> |
60 | <field type="CharField" name="description">Yocto Project 3.2 "Gatesgarth"</field> | 66 | <field type="CharField" name="description">Yocto Project 4.2 "Mickledore"</field> |
61 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> | 67 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> |
62 | <field type="CharField" name="branch_name">gatesgarth</field> | 68 | <field type="CharField" name="branch_name">mickledore</field> |
63 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth">Yocto Project Gatesgarth branch</a>.</field> | 69 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=mickledore">Yocto Project Mickledore branch</a>.</field> |
70 | </object> | ||
71 | <object model="orm.release" pk="5"> | ||
72 | <field type="CharField" name="name">dunfell</field> | ||
73 | <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field> | ||
74 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field> | ||
75 | <field type="CharField" name="branch_name">dunfell</field> | ||
76 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell">Yocto Project Dunfell branch</a>.</field> | ||
64 | </object> | 77 | </object> |
65 | 78 | ||
66 | <!-- Default project layers for each release --> | 79 | <!-- Default project layers for each release --> |
@@ -112,6 +125,18 @@ | |||
112 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 125 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
113 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> | 126 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> |
114 | </object> | 127 | </object> |
128 | <object model="orm.releasedefaultlayer" pk="13"> | ||
129 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
130 | <field type="CharField" name="layer_name">openembedded-core</field> | ||
131 | </object> | ||
132 | <object model="orm.releasedefaultlayer" pk="14"> | ||
133 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
134 | <field type="CharField" name="layer_name">meta-poky</field> | ||
135 | </object> | ||
136 | <object model="orm.releasedefaultlayer" pk="15"> | ||
137 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
138 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> | ||
139 | </object> | ||
115 | 140 | ||
116 | <!-- Default layers provided by poky | 141 | <!-- Default layers provided by poky |
117 | openembedded-core | 142 | openembedded-core |
@@ -122,15 +147,15 @@ | |||
122 | <field type="CharField" name="name">openembedded-core</field> | 147 | <field type="CharField" name="name">openembedded-core</field> |
123 | <field type="CharField" name="layer_index_url"></field> | 148 | <field type="CharField" name="layer_index_url"></field> |
124 | <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> | 149 | <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> |
125 | <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> | 150 | <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field> |
126 | <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 151 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
127 | <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 152 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
128 | </object> | 153 | </object> |
129 | <object model="orm.layer_version" pk="1"> | 154 | <object model="orm.layer_version" pk="1"> |
130 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | 155 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> |
131 | <field type="IntegerField" name="layer_source">0</field> | 156 | <field type="IntegerField" name="layer_source">0</field> |
132 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> | 157 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> |
133 | <field type="CharField" name="branch">dunfell</field> | 158 | <field type="CharField" name="branch">kirkstone</field> |
134 | <field type="CharField" name="dirpath">meta</field> | 159 | <field type="CharField" name="dirpath">meta</field> |
135 | </object> | 160 | </object> |
136 | <object model="orm.layer_version" pk="2"> | 161 | <object model="orm.layer_version" pk="2"> |
@@ -152,7 +177,14 @@ | |||
152 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | 177 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> |
153 | <field type="IntegerField" name="layer_source">0</field> | 178 | <field type="IntegerField" name="layer_source">0</field> |
154 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 179 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
155 | <field type="CharField" name="branch">gatesgarth</field> | 180 | <field type="CharField" name="branch">mickledore</field> |
181 | <field type="CharField" name="dirpath">meta</field> | ||
182 | </object> | ||
183 | <object model="orm.layer_version" pk="5"> | ||
184 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | ||
185 | <field type="IntegerField" name="layer_source">0</field> | ||
186 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
187 | <field type="CharField" name="branch">dunfell</field> | ||
156 | <field type="CharField" name="dirpath">meta</field> | 188 | <field type="CharField" name="dirpath">meta</field> |
157 | </object> | 189 | </object> |
158 | 190 | ||
@@ -160,18 +192,18 @@ | |||
160 | <field type="CharField" name="name">meta-poky</field> | 192 | <field type="CharField" name="name">meta-poky</field> |
161 | <field type="CharField" name="layer_index_url"></field> | 193 | <field type="CharField" name="layer_index_url"></field> |
162 | <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> | 194 | <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> |
163 | <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> | 195 | <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field> |
164 | <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 196 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
165 | <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 197 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
166 | </object> | 198 | </object> |
167 | <object model="orm.layer_version" pk="5"> | 199 | <object model="orm.layer_version" pk="6"> |
168 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 200 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
169 | <field type="IntegerField" name="layer_source">0</field> | 201 | <field type="IntegerField" name="layer_source">0</field> |
170 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> | 202 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> |
171 | <field type="CharField" name="branch">dunfell</field> | 203 | <field type="CharField" name="branch">kirkstone</field> |
172 | <field type="CharField" name="dirpath">meta-poky</field> | 204 | <field type="CharField" name="dirpath">meta-poky</field> |
173 | </object> | 205 | </object> |
174 | <object model="orm.layer_version" pk="6"> | 206 | <object model="orm.layer_version" pk="7"> |
175 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 207 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
176 | <field type="IntegerField" name="layer_source">0</field> | 208 | <field type="IntegerField" name="layer_source">0</field> |
177 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> | 209 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> |
@@ -179,18 +211,25 @@ | |||
179 | <field type="CharField" name="commit">HEAD</field> | 211 | <field type="CharField" name="commit">HEAD</field> |
180 | <field type="CharField" name="dirpath">meta-poky</field> | 212 | <field type="CharField" name="dirpath">meta-poky</field> |
181 | </object> | 213 | </object> |
182 | <object model="orm.layer_version" pk="7"> | 214 | <object model="orm.layer_version" pk="8"> |
183 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 215 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
184 | <field type="IntegerField" name="layer_source">0</field> | 216 | <field type="IntegerField" name="layer_source">0</field> |
185 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> | 217 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> |
186 | <field type="CharField" name="branch">master</field> | 218 | <field type="CharField" name="branch">master</field> |
187 | <field type="CharField" name="dirpath">meta-poky</field> | 219 | <field type="CharField" name="dirpath">meta-poky</field> |
188 | </object> | 220 | </object> |
189 | <object model="orm.layer_version" pk="8"> | 221 | <object model="orm.layer_version" pk="9"> |
190 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 222 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
191 | <field type="IntegerField" name="layer_source">0</field> | 223 | <field type="IntegerField" name="layer_source">0</field> |
192 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 224 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
193 | <field type="CharField" name="branch">gatesgarth</field> | 225 | <field type="CharField" name="branch">mickledore</field> |
226 | <field type="CharField" name="dirpath">meta-poky</field> | ||
227 | </object> | ||
228 | <object model="orm.layer_version" pk="10"> | ||
229 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | ||
230 | <field type="IntegerField" name="layer_source">0</field> | ||
231 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
232 | <field type="CharField" name="branch">dunfell</field> | ||
194 | <field type="CharField" name="dirpath">meta-poky</field> | 233 | <field type="CharField" name="dirpath">meta-poky</field> |
195 | </object> | 234 | </object> |
196 | 235 | ||
@@ -198,18 +237,18 @@ | |||
198 | <field type="CharField" name="name">meta-yocto-bsp</field> | 237 | <field type="CharField" name="name">meta-yocto-bsp</field> |
199 | <field type="CharField" name="layer_index_url"></field> | 238 | <field type="CharField" name="layer_index_url"></field> |
200 | <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> | 239 | <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> |
201 | <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> | 240 | <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field> |
202 | <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 241 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
203 | <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 242 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
204 | </object> | 243 | </object> |
205 | <object model="orm.layer_version" pk="9"> | 244 | <object model="orm.layer_version" pk="11"> |
206 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 245 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
207 | <field type="IntegerField" name="layer_source">0</field> | 246 | <field type="IntegerField" name="layer_source">0</field> |
208 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> | 247 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> |
209 | <field type="CharField" name="branch">dunfell</field> | 248 | <field type="CharField" name="branch">kirkstone</field> |
210 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 249 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
211 | </object> | 250 | </object> |
212 | <object model="orm.layer_version" pk="10"> | 251 | <object model="orm.layer_version" pk="12"> |
213 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 252 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
214 | <field type="IntegerField" name="layer_source">0</field> | 253 | <field type="IntegerField" name="layer_source">0</field> |
215 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> | 254 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> |
@@ -217,18 +256,25 @@ | |||
217 | <field type="CharField" name="commit">HEAD</field> | 256 | <field type="CharField" name="commit">HEAD</field> |
218 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 257 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
219 | </object> | 258 | </object> |
220 | <object model="orm.layer_version" pk="11"> | 259 | <object model="orm.layer_version" pk="13"> |
221 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 260 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
222 | <field type="IntegerField" name="layer_source">0</field> | 261 | <field type="IntegerField" name="layer_source">0</field> |
223 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> | 262 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> |
224 | <field type="CharField" name="branch">master</field> | 263 | <field type="CharField" name="branch">master</field> |
225 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 264 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
226 | </object> | 265 | </object> |
227 | <object model="orm.layer_version" pk="12"> | 266 | <object model="orm.layer_version" pk="14"> |
228 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 267 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
229 | <field type="IntegerField" name="layer_source">0</field> | 268 | <field type="IntegerField" name="layer_source">0</field> |
230 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 269 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
231 | <field type="CharField" name="branch">gatesgarth</field> | 270 | <field type="CharField" name="branch">mickledore</field> |
271 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | ||
272 | </object> | ||
273 | <object model="orm.layer_version" pk="15"> | ||
274 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | ||
275 | <field type="IntegerField" name="layer_source">0</field> | ||
276 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | ||
277 | <field type="CharField" name="branch">dunfell</field> | ||
232 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 278 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
233 | </object> | 279 | </object> |
234 | </django-objects> | 280 | </django-objects> |
diff --git a/bitbake/lib/toaster/orm/fixtures/settings.xml b/bitbake/lib/toaster/orm/fixtures/settings.xml index 78c0fdca7f..02c26a6974 100644 --- a/bitbake/lib/toaster/orm/fixtures/settings.xml +++ b/bitbake/lib/toaster/orm/fixtures/settings.xml | |||
@@ -12,14 +12,14 @@ | |||
12 | </object> | 12 | </object> |
13 | <object model="orm.toastersetting" pk="4"> | 13 | <object model="orm.toastersetting" pk="4"> |
14 | <field type="CharField" name="name">DEFCONF_MACHINE</field> | 14 | <field type="CharField" name="name">DEFCONF_MACHINE</field> |
15 | <field type="CharField" name="value">qemux86</field> | 15 | <field type="CharField" name="value">qemux86-64</field> |
16 | </object> | 16 | </object> |
17 | <object model="orm.toastersetting" pk="5"> | 17 | <object model="orm.toastersetting" pk="5"> |
18 | <field type="CharField" name="name">DEFCONF_SSTATE_DIR</field> | 18 | <field type="CharField" name="name">DEFCONF_SSTATE_DIR</field> |
19 | <field type="CharField" name="value">${TOPDIR}/../sstate-cache</field> | 19 | <field type="CharField" name="value">${TOPDIR}/../sstate-cache</field> |
20 | </object> | 20 | </object> |
21 | <object model="orm.toastersetting" pk="6"> | 21 | <object model="orm.toastersetting" pk="6"> |
22 | <field type="CharField" name="name">DEFCONF_IMAGE_INSTALL_append</field> | 22 | <field type="CharField" name="name">DEFCONF_IMAGE_INSTALL:append</field> |
23 | <field type="CharField" name="value"></field> | 23 | <field type="CharField" name="value"></field> |
24 | </object> | 24 | </object> |
25 | <object model="orm.toastersetting" pk="7"> | 25 | <object model="orm.toastersetting" pk="7"> |
diff --git a/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/bitbake/lib/toaster/orm/management/commands/lsupdates.py index 2fbd7be3d2..6d64830ebd 100644 --- a/bitbake/lib/toaster/orm/management/commands/lsupdates.py +++ b/bitbake/lib/toaster/orm/management/commands/lsupdates.py | |||
@@ -21,7 +21,7 @@ import threading | |||
21 | import time | 21 | import time |
22 | logger = logging.getLogger("toaster") | 22 | logger = logging.getLogger("toaster") |
23 | 23 | ||
24 | DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/" | 24 | DEFAULT_LAYERINDEX_SERVER = "https://layers.openembedded.org/layerindex/api/" |
25 | 25 | ||
26 | # Add path to bitbake modules for layerindexlib | 26 | # Add path to bitbake modules for layerindexlib |
27 | # lib/toaster/orm/management/commands/lsupdates.py (abspath) | 27 | # lib/toaster/orm/management/commands/lsupdates.py (abspath) |
@@ -40,7 +40,7 @@ class Spinner(threading.Thread): | |||
40 | """ A simple progress spinner to indicate download/parsing is happening""" | 40 | """ A simple progress spinner to indicate download/parsing is happening""" |
41 | def __init__(self, *args, **kwargs): | 41 | def __init__(self, *args, **kwargs): |
42 | super(Spinner, self).__init__(*args, **kwargs) | 42 | super(Spinner, self).__init__(*args, **kwargs) |
43 | self.setDaemon(True) | 43 | self.daemon = True |
44 | self.signal = True | 44 | self.signal = True |
45 | 45 | ||
46 | def run(self): | 46 | def run(self): |
@@ -87,13 +87,13 @@ class Command(BaseCommand): | |||
87 | 87 | ||
88 | # update branches; only those that we already have names listed in the | 88 | # update branches; only those that we already have names listed in the |
89 | # Releases table | 89 | # Releases table |
90 | whitelist_branch_names = [rel.branch_name | 90 | allowed_branch_names = [rel.branch_name |
91 | for rel in Release.objects.all()] | 91 | for rel in Release.objects.all()] |
92 | if len(whitelist_branch_names) == 0: | 92 | if len(allowed_branch_names) == 0: |
93 | raise Exception("Failed to make list of branches to fetch") | 93 | raise Exception("Failed to make list of branches to fetch") |
94 | 94 | ||
95 | logger.info("Fetching metadata for %s", | 95 | logger.info("Fetching metadata for %s", |
96 | " ".join(whitelist_branch_names)) | 96 | " ".join(allowed_branch_names)) |
97 | 97 | ||
98 | # We require a non-empty bb.data, but we can fake it with a dictionary | 98 | # We require a non-empty bb.data, but we can fake it with a dictionary |
99 | layerindex = layerindexlib.LayerIndex({"DUMMY" : "VALUE"}) | 99 | layerindex = layerindexlib.LayerIndex({"DUMMY" : "VALUE"}) |
@@ -101,8 +101,8 @@ class Command(BaseCommand): | |||
101 | http_progress = Spinner() | 101 | http_progress = Spinner() |
102 | http_progress.start() | 102 | http_progress.start() |
103 | 103 | ||
104 | if whitelist_branch_names: | 104 | if allowed_branch_names: |
105 | url_branches = ";branch=%s" % ','.join(whitelist_branch_names) | 105 | url_branches = ";branch=%s" % ','.join(allowed_branch_names) |
106 | else: | 106 | else: |
107 | url_branches = "" | 107 | url_branches = "" |
108 | layerindex.load_layerindex("%s%s" % (self.apiurl, url_branches)) | 108 | layerindex.load_layerindex("%s%s" % (self.apiurl, url_branches)) |
diff --git a/bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py b/bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py new file mode 100644 index 0000000000..f19b5dddbc --- /dev/null +++ b/bitbake/lib/toaster/orm/migrations/0020_models_bigautofield.py | |||
@@ -0,0 +1,173 @@ | |||
1 | # Generated by Django 3.2.12 on 2022-03-06 03:28 | ||
2 | |||
3 | from django.db import migrations, models | ||
4 | |||
5 | |||
6 | class Migration(migrations.Migration): | ||
7 | |||
8 | dependencies = [ | ||
9 | ('orm', '0019_django_2_2'), | ||
10 | ] | ||
11 | |||
12 | operations = [ | ||
13 | migrations.AlterField( | ||
14 | model_name='bitbakeversion', | ||
15 | name='id', | ||
16 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
17 | ), | ||
18 | migrations.AlterField( | ||
19 | model_name='build', | ||
20 | name='id', | ||
21 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
22 | ), | ||
23 | migrations.AlterField( | ||
24 | model_name='distro', | ||
25 | name='id', | ||
26 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
27 | ), | ||
28 | migrations.AlterField( | ||
29 | model_name='helptext', | ||
30 | name='id', | ||
31 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
32 | ), | ||
33 | migrations.AlterField( | ||
34 | model_name='layer', | ||
35 | name='id', | ||
36 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
37 | ), | ||
38 | migrations.AlterField( | ||
39 | model_name='layer_version', | ||
40 | name='id', | ||
41 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
42 | ), | ||
43 | migrations.AlterField( | ||
44 | model_name='layerversiondependency', | ||
45 | name='id', | ||
46 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
47 | ), | ||
48 | migrations.AlterField( | ||
49 | model_name='logmessage', | ||
50 | name='id', | ||
51 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
52 | ), | ||
53 | migrations.AlterField( | ||
54 | model_name='machine', | ||
55 | name='id', | ||
56 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
57 | ), | ||
58 | migrations.AlterField( | ||
59 | model_name='package', | ||
60 | name='id', | ||
61 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
62 | ), | ||
63 | migrations.AlterField( | ||
64 | model_name='package_dependency', | ||
65 | name='id', | ||
66 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
67 | ), | ||
68 | migrations.AlterField( | ||
69 | model_name='package_file', | ||
70 | name='id', | ||
71 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
72 | ), | ||
73 | migrations.AlterField( | ||
74 | model_name='project', | ||
75 | name='id', | ||
76 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
77 | ), | ||
78 | migrations.AlterField( | ||
79 | model_name='projectlayer', | ||
80 | name='id', | ||
81 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
82 | ), | ||
83 | migrations.AlterField( | ||
84 | model_name='projecttarget', | ||
85 | name='id', | ||
86 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
87 | ), | ||
88 | migrations.AlterField( | ||
89 | model_name='projectvariable', | ||
90 | name='id', | ||
91 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
92 | ), | ||
93 | migrations.AlterField( | ||
94 | model_name='provides', | ||
95 | name='id', | ||
96 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
97 | ), | ||
98 | migrations.AlterField( | ||
99 | model_name='recipe', | ||
100 | name='id', | ||
101 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
102 | ), | ||
103 | migrations.AlterField( | ||
104 | model_name='recipe_dependency', | ||
105 | name='id', | ||
106 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
107 | ), | ||
108 | migrations.AlterField( | ||
109 | model_name='release', | ||
110 | name='id', | ||
111 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
112 | ), | ||
113 | migrations.AlterField( | ||
114 | model_name='releasedefaultlayer', | ||
115 | name='id', | ||
116 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
117 | ), | ||
118 | migrations.AlterField( | ||
119 | model_name='target', | ||
120 | name='id', | ||
121 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
122 | ), | ||
123 | migrations.AlterField( | ||
124 | model_name='target_file', | ||
125 | name='id', | ||
126 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
127 | ), | ||
128 | migrations.AlterField( | ||
129 | model_name='target_image_file', | ||
130 | name='id', | ||
131 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
132 | ), | ||
133 | migrations.AlterField( | ||
134 | model_name='target_installed_package', | ||
135 | name='id', | ||
136 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
137 | ), | ||
138 | migrations.AlterField( | ||
139 | model_name='targetkernelfile', | ||
140 | name='id', | ||
141 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
142 | ), | ||
143 | migrations.AlterField( | ||
144 | model_name='targetsdkfile', | ||
145 | name='id', | ||
146 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
147 | ), | ||
148 | migrations.AlterField( | ||
149 | model_name='task', | ||
150 | name='id', | ||
151 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
152 | ), | ||
153 | migrations.AlterField( | ||
154 | model_name='task_dependency', | ||
155 | name='id', | ||
156 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
157 | ), | ||
158 | migrations.AlterField( | ||
159 | model_name='toastersetting', | ||
160 | name='id', | ||
161 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
162 | ), | ||
163 | migrations.AlterField( | ||
164 | model_name='variable', | ||
165 | name='id', | ||
166 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
167 | ), | ||
168 | migrations.AlterField( | ||
169 | model_name='variablehistory', | ||
170 | name='id', | ||
171 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), | ||
172 | ), | ||
173 | ] | ||
diff --git a/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py b/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py new file mode 100644 index 0000000000..328eb5753c --- /dev/null +++ b/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py | |||
@@ -0,0 +1,22 @@ | |||
1 | # Generated by Django 4.2.5 on 2023-11-23 18:44 | ||
2 | |||
3 | from django.db import migrations, models | ||
4 | |||
5 | |||
6 | class Migration(migrations.Migration): | ||
7 | |||
8 | dependencies = [ | ||
9 | ('orm', '0020_models_bigautofield'), | ||
10 | ] | ||
11 | |||
12 | operations = [ | ||
13 | migrations.CreateModel( | ||
14 | name='EventLogsImports', | ||
15 | fields=[ | ||
16 | ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), | ||
17 | ('name', models.CharField(max_length=255)), | ||
18 | ('imported', models.BooleanField(default=False)), | ||
19 | ('build_id', models.IntegerField(blank=True, null=True)), | ||
20 | ], | ||
21 | ), | ||
22 | ] | ||
diff --git a/bitbake/lib/toaster/orm/models.py b/bitbake/lib/toaster/orm/models.py index 7f7e922ade..19c9686206 100644 --- a/bitbake/lib/toaster/orm/models.py +++ b/bitbake/lib/toaster/orm/models.py | |||
@@ -58,7 +58,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']: | |||
58 | return _base_insert(self, *args, **kwargs) | 58 | return _base_insert(self, *args, **kwargs) |
59 | QuerySet._insert = _insert | 59 | QuerySet._insert = _insert |
60 | 60 | ||
61 | from django.utils import six | ||
62 | def _create_object_from_params(self, lookup, params): | 61 | def _create_object_from_params(self, lookup, params): |
63 | """ | 62 | """ |
64 | Tries to create an object using passed params. | 63 | Tries to create an object using passed params. |
@@ -108,7 +107,7 @@ class ToasterSetting(models.Model): | |||
108 | 107 | ||
109 | 108 | ||
110 | class ProjectManager(models.Manager): | 109 | class ProjectManager(models.Manager): |
111 | def create_project(self, name, release, existing_project=None): | 110 | def create_project(self, name, release, existing_project=None, imported=False): |
112 | if existing_project and (release is not None): | 111 | if existing_project and (release is not None): |
113 | prj = existing_project | 112 | prj = existing_project |
114 | prj.bitbake_version = release.bitbake_version | 113 | prj.bitbake_version = release.bitbake_version |
@@ -135,19 +134,19 @@ class ProjectManager(models.Manager): | |||
135 | 134 | ||
136 | if release is None: | 135 | if release is None: |
137 | return prj | 136 | return prj |
138 | 137 | if not imported: | |
139 | for rdl in release.releasedefaultlayer_set.all(): | 138 | for rdl in release.releasedefaultlayer_set.all(): |
140 | lv = Layer_Version.objects.filter( | 139 | lv = Layer_Version.objects.filter( |
141 | layer__name=rdl.layer_name, | 140 | layer__name=rdl.layer_name, |
142 | release=release).first() | 141 | release=release).first() |
143 | 142 | ||
144 | if lv: | 143 | if lv: |
145 | ProjectLayer.objects.create(project=prj, | 144 | ProjectLayer.objects.create(project=prj, |
146 | layercommit=lv, | 145 | layercommit=lv, |
147 | optional=False) | 146 | optional=False) |
148 | else: | 147 | else: |
149 | logger.warning("Default project layer %s not found" % | 148 | logger.warning("Default project layer %s not found" % |
150 | rdl.layer_name) | 149 | rdl.layer_name) |
151 | 150 | ||
152 | return prj | 151 | return prj |
153 | 152 | ||
@@ -1390,9 +1389,6 @@ class Machine(models.Model): | |||
1390 | return "Machine " + self.name + "(" + self.description + ")" | 1389 | return "Machine " + self.name + "(" + self.description + ")" |
1391 | 1390 | ||
1392 | 1391 | ||
1393 | |||
1394 | |||
1395 | |||
1396 | class BitbakeVersion(models.Model): | 1392 | class BitbakeVersion(models.Model): |
1397 | 1393 | ||
1398 | name = models.CharField(max_length=32, unique = True) | 1394 | name = models.CharField(max_length=32, unique = True) |
@@ -1717,9 +1713,9 @@ class CustomImageRecipe(Recipe): | |||
1717 | 1713 | ||
1718 | def generate_recipe_file_contents(self): | 1714 | def generate_recipe_file_contents(self): |
1719 | """Generate the contents for the recipe file.""" | 1715 | """Generate the contents for the recipe file.""" |
1720 | # If we have no excluded packages we only need to _append | 1716 | # If we have no excluded packages we only need to :append |
1721 | if self.excludes_set.count() == 0: | 1717 | if self.excludes_set.count() == 0: |
1722 | packages_conf = "IMAGE_INSTALL_append = \" " | 1718 | packages_conf = "IMAGE_INSTALL:append = \" " |
1723 | 1719 | ||
1724 | for pkg in self.appends_set.all(): | 1720 | for pkg in self.appends_set.all(): |
1725 | packages_conf += pkg.name+' ' | 1721 | packages_conf += pkg.name+' ' |
@@ -1734,7 +1730,7 @@ class CustomImageRecipe(Recipe): | |||
1734 | packages_conf += "\"" | 1730 | packages_conf += "\"" |
1735 | 1731 | ||
1736 | base_recipe_path = self.get_base_recipe_file() | 1732 | base_recipe_path = self.get_base_recipe_file() |
1737 | if base_recipe_path: | 1733 | if base_recipe_path and os.path.isfile(base_recipe_path): |
1738 | base_recipe = open(base_recipe_path, 'r').read() | 1734 | base_recipe = open(base_recipe_path, 'r').read() |
1739 | else: | 1735 | else: |
1740 | # Pass back None to trigger error message to user | 1736 | # Pass back None to trigger error message to user |
@@ -1854,6 +1850,8 @@ def signal_runbuilds(): | |||
1854 | os.kill(int(pidf.read()), SIGUSR1) | 1850 | os.kill(int(pidf.read()), SIGUSR1) |
1855 | except FileNotFoundError: | 1851 | except FileNotFoundError: |
1856 | logger.info("Stopping existing runbuilds: no current process found") | 1852 | logger.info("Stopping existing runbuilds: no current process found") |
1853 | except ProcessLookupError: | ||
1854 | logger.warning("Stopping existing runbuilds: process lookup not found") | ||
1857 | 1855 | ||
1858 | class Distro(models.Model): | 1856 | class Distro(models.Model): |
1859 | search_allowed_fields = ["name", "description", "layer_version__layer__name"] | 1857 | search_allowed_fields = ["name", "description", "layer_version__layer__name"] |
@@ -1870,6 +1868,15 @@ class Distro(models.Model): | |||
1870 | def __unicode__(self): | 1868 | def __unicode__(self): |
1871 | return "Distro " + self.name + "(" + self.description + ")" | 1869 | return "Distro " + self.name + "(" + self.description + ")" |
1872 | 1870 | ||
1871 | class EventLogsImports(models.Model): | ||
1872 | name = models.CharField(max_length=255) | ||
1873 | imported = models.BooleanField(default=False) | ||
1874 | build_id = models.IntegerField(blank=True, null=True) | ||
1875 | |||
1876 | def __str__(self): | ||
1877 | return self.name | ||
1878 | |||
1879 | |||
1873 | django.db.models.signals.post_save.connect(invalidate_cache) | 1880 | django.db.models.signals.post_save.connect(invalidate_cache) |
1874 | django.db.models.signals.post_delete.connect(invalidate_cache) | 1881 | django.db.models.signals.post_delete.connect(invalidate_cache) |
1875 | django.db.models.signals.m2m_changed.connect(invalidate_cache) | 1882 | django.db.models.signals.m2m_changed.connect(invalidate_cache) |
diff --git a/bitbake/lib/toaster/pytest.ini b/bitbake/lib/toaster/pytest.ini new file mode 100644 index 0000000000..071c65fcd5 --- /dev/null +++ b/bitbake/lib/toaster/pytest.ini | |||
@@ -0,0 +1,16 @@ | |||
1 | # -- FILE: pytest.ini (or tox.ini) | ||
2 | [pytest] | ||
3 | # --create-db - force re creation of the test database | ||
4 | # https://pytest-django.readthedocs.io/en/latest/database.html#create-db-force-re-creation-of-the-test-database | ||
5 | |||
6 | # --html=report.html --self-contained-html | ||
7 | # https://docs.pytest.org/en/latest/usage.html#creating-html-reports | ||
8 | # https://pytest-html.readthedocs.io/en/latest/user_guide.html#creating-a-self-contained-report | ||
9 | addopts = --create-db --html="Toaster Tests Report.html" --self-contained-html | ||
10 | |||
11 | # Define environment variables using pytest-env | ||
12 | # A pytest plugin that enables you to set environment variables in the pytest.ini file. | ||
13 | # https://pypi.org/project/pytest-env/ | ||
14 | env = | ||
15 | TOASTER_BUILDSERVER=1 | ||
16 | DJANGO_SETTINGS_MODULE=toastermain.settings_test | ||
diff --git a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py index 644d45fe58..393be75496 100644 --- a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py +++ b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py | |||
@@ -19,11 +19,15 @@ import os | |||
19 | import time | 19 | import time |
20 | import unittest | 20 | import unittest |
21 | 21 | ||
22 | import pytest | ||
22 | from selenium import webdriver | 23 | from selenium import webdriver |
24 | from selenium.webdriver.support import expected_conditions as EC | ||
23 | from selenium.webdriver.support.ui import WebDriverWait | 25 | from selenium.webdriver.support.ui import WebDriverWait |
26 | from selenium.webdriver.common.by import By | ||
24 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities | 27 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities |
25 | from selenium.common.exceptions import NoSuchElementException, \ | 28 | from selenium.common.exceptions import NoSuchElementException, \ |
26 | StaleElementReferenceException, TimeoutException | 29 | StaleElementReferenceException, TimeoutException, \ |
30 | SessionNotCreatedException | ||
27 | 31 | ||
28 | def create_selenium_driver(cls,browser='chrome'): | 32 | def create_selenium_driver(cls,browser='chrome'): |
29 | # set default browser string based on env (if available) | 33 | # set default browser string based on env (if available) |
@@ -32,9 +36,32 @@ def create_selenium_driver(cls,browser='chrome'): | |||
32 | browser = env_browser | 36 | browser = env_browser |
33 | 37 | ||
34 | if browser == 'chrome': | 38 | if browser == 'chrome': |
35 | return webdriver.Chrome( | 39 | options = webdriver.ChromeOptions() |
36 | service_args=["--verbose", "--log-path=selenium.log"] | 40 | options.add_argument('--headless') |
37 | ) | 41 | options.add_argument('--disable-infobars') |
42 | options.add_argument('--disable-dev-shm-usage') | ||
43 | options.add_argument('--no-sandbox') | ||
44 | options.add_argument('--remote-debugging-port=9222') | ||
45 | try: | ||
46 | return webdriver.Chrome(options=options) | ||
47 | except SessionNotCreatedException as e: | ||
48 | exit_message = "Halting tests prematurely to avoid cascading errors." | ||
49 | # check if chrome / chromedriver exists | ||
50 | chrome_path = os.popen("find ~/.cache/selenium/chrome/ -name 'chrome' -type f -print -quit").read().strip() | ||
51 | if not chrome_path: | ||
52 | pytest.exit(f"Failed to install/find chrome.\n{exit_message}") | ||
53 | chromedriver_path = os.popen("find ~/.cache/selenium/chromedriver/ -name 'chromedriver' -type f -print -quit").read().strip() | ||
54 | if not chromedriver_path: | ||
55 | pytest.exit(f"Failed to install/find chromedriver.\n{exit_message}") | ||
56 | # check if depends on each are fulfilled | ||
57 | depends_chrome = os.popen(f"ldd {chrome_path} | grep 'not found'").read().strip() | ||
58 | if depends_chrome: | ||
59 | pytest.exit(f"Missing chrome dependencies.\n{depends_chrome}\n{exit_message}") | ||
60 | depends_chromedriver = os.popen(f"ldd {chromedriver_path} | grep 'not found'").read().strip() | ||
61 | if depends_chromedriver: | ||
62 | pytest.exit(f"Missing chromedriver dependencies.\n{depends_chromedriver}\n{exit_message}") | ||
63 | # print original error otherwise | ||
64 | pytest.exit(f"Failed to start chromedriver.\n{e}\n{exit_message}") | ||
38 | elif browser == 'firefox': | 65 | elif browser == 'firefox': |
39 | return webdriver.Firefox() | 66 | return webdriver.Firefox() |
40 | elif browser == 'marionette': | 67 | elif browser == 'marionette': |
@@ -66,7 +93,9 @@ class Wait(WebDriverWait): | |||
66 | _TIMEOUT = 10 | 93 | _TIMEOUT = 10 |
67 | _POLL_FREQUENCY = 0.5 | 94 | _POLL_FREQUENCY = 0.5 |
68 | 95 | ||
69 | def __init__(self, driver): | 96 | def __init__(self, driver, timeout=_TIMEOUT, poll=_POLL_FREQUENCY): |
97 | self._TIMEOUT = timeout | ||
98 | self._POLL_FREQUENCY = poll | ||
70 | super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY) | 99 | super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY) |
71 | 100 | ||
72 | def until(self, method, message=''): | 101 | def until(self, method, message=''): |
@@ -138,6 +167,8 @@ class SeleniumTestCaseBase(unittest.TestCase): | |||
138 | """ Clean up webdriver driver """ | 167 | """ Clean up webdriver driver """ |
139 | 168 | ||
140 | cls.driver.quit() | 169 | cls.driver.quit() |
170 | # Allow driver resources to be properly freed before proceeding with further tests | ||
171 | time.sleep(5) | ||
141 | super(SeleniumTestCaseBase, cls).tearDownClass() | 172 | super(SeleniumTestCaseBase, cls).tearDownClass() |
142 | 173 | ||
143 | def get(self, url): | 174 | def get(self, url): |
@@ -151,13 +182,20 @@ class SeleniumTestCaseBase(unittest.TestCase): | |||
151 | abs_url = '%s%s' % (self.live_server_url, url) | 182 | abs_url = '%s%s' % (self.live_server_url, url) |
152 | self.driver.get(abs_url) | 183 | self.driver.get(abs_url) |
153 | 184 | ||
185 | try: # Ensure page is loaded before proceeding | ||
186 | self.wait_until_visible("#global-nav", poll=3) | ||
187 | except NoSuchElementException: | ||
188 | self.driver.implicitly_wait(3) | ||
189 | except TimeoutException: | ||
190 | self.driver.implicitly_wait(3) | ||
191 | |||
154 | def find(self, selector): | 192 | def find(self, selector): |
155 | """ Find single element by CSS selector """ | 193 | """ Find single element by CSS selector """ |
156 | return self.driver.find_element_by_css_selector(selector) | 194 | return self.driver.find_element(By.CSS_SELECTOR, selector) |
157 | 195 | ||
158 | def find_all(self, selector): | 196 | def find_all(self, selector): |
159 | """ Find all elements matching CSS selector """ | 197 | """ Find all elements matching CSS selector """ |
160 | return self.driver.find_elements_by_css_selector(selector) | 198 | return self.driver.find_elements(By.CSS_SELECTOR, selector) |
161 | 199 | ||
162 | def element_exists(self, selector): | 200 | def element_exists(self, selector): |
163 | """ | 201 | """ |
@@ -170,18 +208,34 @@ class SeleniumTestCaseBase(unittest.TestCase): | |||
170 | """ Return the element which currently has focus on the page """ | 208 | """ Return the element which currently has focus on the page """ |
171 | return self.driver.switch_to.active_element | 209 | return self.driver.switch_to.active_element |
172 | 210 | ||
173 | def wait_until_present(self, selector): | 211 | def wait_until_present(self, selector, poll=0.5): |
174 | """ Wait until element matching CSS selector is on the page """ | 212 | """ Wait until element matching CSS selector is on the page """ |
175 | is_present = lambda driver: self.find(selector) | 213 | is_present = lambda driver: self.find(selector) |
176 | msg = 'An element matching "%s" should be on the page' % selector | 214 | msg = 'An element matching "%s" should be on the page' % selector |
177 | element = Wait(self.driver).until(is_present, msg) | 215 | element = Wait(self.driver, poll=poll).until(is_present, msg) |
216 | if poll > 2: | ||
217 | time.sleep(poll) # element need more delay to be present | ||
178 | return element | 218 | return element |
179 | 219 | ||
180 | def wait_until_visible(self, selector): | 220 | def wait_until_visible(self, selector, poll=1): |
181 | """ Wait until element matching CSS selector is visible on the page """ | 221 | """ Wait until element matching CSS selector is visible on the page """ |
182 | is_visible = lambda driver: self.find(selector).is_displayed() | 222 | is_visible = lambda driver: self.find(selector).is_displayed() |
183 | msg = 'An element matching "%s" should be visible' % selector | 223 | msg = 'An element matching "%s" should be visible' % selector |
184 | Wait(self.driver).until(is_visible, msg) | 224 | Wait(self.driver, poll=poll).until(is_visible, msg) |
225 | time.sleep(poll) # wait for visibility to settle | ||
226 | return self.find(selector) | ||
227 | |||
228 | def wait_until_clickable(self, selector, poll=1): | ||
229 | """ Wait until element matching CSS selector is visible on the page """ | ||
230 | WebDriverWait( | ||
231 | self.driver, | ||
232 | Wait._TIMEOUT, | ||
233 | poll_frequency=poll | ||
234 | ).until( | ||
235 | EC.element_to_be_clickable((By.ID, selector.removeprefix('#') | ||
236 | ) | ||
237 | ) | ||
238 | ) | ||
185 | return self.find(selector) | 239 | return self.find(selector) |
186 | 240 | ||
187 | def wait_until_focused(self, selector): | 241 | def wait_until_focused(self, selector): |
diff --git a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py index 8423d3dab2..b9356a0344 100644 --- a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py +++ b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py | |||
@@ -7,13 +7,18 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import os | ||
10 | import re | 11 | import re |
11 | 12 | ||
12 | from django.urls import reverse | 13 | from django.urls import reverse |
14 | from selenium.webdriver.support.select import Select | ||
13 | from django.utils import timezone | 15 | from django.utils import timezone |
16 | from bldcontrol.models import BuildRequest | ||
14 | from tests.browser.selenium_helpers import SeleniumTestCase | 17 | from tests.browser.selenium_helpers import SeleniumTestCase |
15 | 18 | ||
16 | from orm.models import BitbakeVersion, Release, Project, Build, Target | 19 | from orm.models import BitbakeVersion, Layer, Layer_Version, Recipe, Release, Project, Build, Target, Task |
20 | |||
21 | from selenium.webdriver.common.by import By | ||
17 | 22 | ||
18 | 23 | ||
19 | class TestAllBuildsPage(SeleniumTestCase): | 24 | class TestAllBuildsPage(SeleniumTestCase): |
@@ -23,7 +28,8 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
23 | CLI_BUILDS_PROJECT_NAME = 'command line builds' | 28 | CLI_BUILDS_PROJECT_NAME = 'command line builds' |
24 | 29 | ||
25 | def setUp(self): | 30 | def setUp(self): |
26 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', | 31 | builldir = os.environ.get('BUILDDIR', './') |
32 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/', | ||
27 | branch='master', dirpath='') | 33 | branch='master', dirpath='') |
28 | release = Release.objects.create(name='release1', | 34 | release = Release.objects.create(name='release1', |
29 | bitbake_version=bbv) | 35 | bitbake_version=bbv) |
@@ -69,7 +75,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
69 | '[data-role="data-recent-build-buildtime-field"]' % build.id | 75 | '[data-role="data-recent-build-buildtime-field"]' % build.id |
70 | 76 | ||
71 | # because this loads via Ajax, wait for it to be visible | 77 | # because this loads via Ajax, wait for it to be visible |
72 | self.wait_until_present(selector) | 78 | self.wait_until_visible(selector) |
73 | 79 | ||
74 | build_time_spans = self.find_all(selector) | 80 | build_time_spans = self.find_all(selector) |
75 | 81 | ||
@@ -79,7 +85,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
79 | 85 | ||
80 | def _get_row_for_build(self, build): | 86 | def _get_row_for_build(self, build): |
81 | """ Get the table row for the build from the all builds table """ | 87 | """ Get the table row for the build from the all builds table """ |
82 | self.wait_until_present('#allbuildstable') | 88 | self.wait_until_visible('#allbuildstable') |
83 | 89 | ||
84 | rows = self.find_all('#allbuildstable tr') | 90 | rows = self.find_all('#allbuildstable tr') |
85 | 91 | ||
@@ -91,7 +97,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
91 | found_row = None | 97 | found_row = None |
92 | for row in rows: | 98 | for row in rows: |
93 | 99 | ||
94 | outcome_links = row.find_elements_by_css_selector(selector) | 100 | outcome_links = row.find_elements(By.CSS_SELECTOR, selector) |
95 | if len(outcome_links) == 1: | 101 | if len(outcome_links) == 1: |
96 | found_row = row | 102 | found_row = row |
97 | break | 103 | break |
@@ -100,6 +106,66 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
100 | 106 | ||
101 | return found_row | 107 | return found_row |
102 | 108 | ||
109 | def _get_create_builds(self, **kwargs): | ||
110 | """ Create a build and return the build object """ | ||
111 | build1 = Build.objects.create(**self.project1_build_success) | ||
112 | build2 = Build.objects.create(**self.project1_build_failure) | ||
113 | |||
114 | # add some targets to these builds so they have recipe links | ||
115 | # (and so we can find the row in the ToasterTable corresponding to | ||
116 | # a particular build) | ||
117 | Target.objects.create(build=build1, target='foo') | ||
118 | Target.objects.create(build=build2, target='bar') | ||
119 | |||
120 | if kwargs: | ||
121 | # Create kwargs.get('success') builds with success status with target | ||
122 | # and kwargs.get('failure') builds with failure status with target | ||
123 | for i in range(kwargs.get('success', 0)): | ||
124 | now = timezone.now() | ||
125 | self.project1_build_success['started_on'] = now | ||
126 | self.project1_build_success[ | ||
127 | 'completed_on'] = now - timezone.timedelta(days=i) | ||
128 | build = Build.objects.create(**self.project1_build_success) | ||
129 | Target.objects.create(build=build, | ||
130 | target=f'{i}_success_recipe', | ||
131 | task=f'{i}_success_task') | ||
132 | |||
133 | self._set_buildRequest_and_task_on_build(build) | ||
134 | for i in range(kwargs.get('failure', 0)): | ||
135 | now = timezone.now() | ||
136 | self.project1_build_failure['started_on'] = now | ||
137 | self.project1_build_failure[ | ||
138 | 'completed_on'] = now - timezone.timedelta(days=i) | ||
139 | build = Build.objects.create(**self.project1_build_failure) | ||
140 | Target.objects.create(build=build, | ||
141 | target=f'{i}_fail_recipe', | ||
142 | task=f'{i}_fail_task') | ||
143 | self._set_buildRequest_and_task_on_build(build) | ||
144 | return build1, build2 | ||
145 | |||
146 | def _create_recipe(self): | ||
147 | """ Add a recipe to the database and return it """ | ||
148 | layer = Layer.objects.create() | ||
149 | layer_version = Layer_Version.objects.create(layer=layer) | ||
150 | return Recipe.objects.create(name='recipe_foo', layer_version=layer_version) | ||
151 | |||
152 | def _set_buildRequest_and_task_on_build(self, build): | ||
153 | """ Set buildRequest and task on build """ | ||
154 | build.recipes_parsed = 1 | ||
155 | build.save() | ||
156 | buildRequest = BuildRequest.objects.create( | ||
157 | build=build, | ||
158 | project=self.project1, | ||
159 | state=BuildRequest.REQ_COMPLETED) | ||
160 | build.build_request = buildRequest | ||
161 | recipe = self._create_recipe() | ||
162 | task = Task.objects.create(build=build, | ||
163 | recipe=recipe, | ||
164 | task_name='task', | ||
165 | outcome=Task.OUTCOME_SUCCESS) | ||
166 | task.save() | ||
167 | build.save() | ||
168 | |||
103 | def test_show_tasks_with_suffix(self): | 169 | def test_show_tasks_with_suffix(self): |
104 | """ Task should be shown as suffix on build name """ | 170 | """ Task should be shown as suffix on build name """ |
105 | build = Build.objects.create(**self.project1_build_success) | 171 | build = Build.objects.create(**self.project1_build_success) |
@@ -109,7 +175,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
109 | 175 | ||
110 | url = reverse('all-builds') | 176 | url = reverse('all-builds') |
111 | self.get(url) | 177 | self.get(url) |
112 | self.wait_until_present('td[class="target"]') | 178 | self.wait_until_visible('td[class="target"]') |
113 | 179 | ||
114 | cell = self.find('td[class="target"]') | 180 | cell = self.find('td[class="target"]') |
115 | content = cell.get_attribute('innerHTML') | 181 | content = cell.get_attribute('innerHTML') |
@@ -126,23 +192,25 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
126 | but should be shown for other builds | 192 | but should be shown for other builds |
127 | """ | 193 | """ |
128 | build1 = Build.objects.create(**self.project1_build_success) | 194 | build1 = Build.objects.create(**self.project1_build_success) |
129 | default_build = Build.objects.create(**self.default_project_build_success) | 195 | default_build = Build.objects.create( |
196 | **self.default_project_build_success) | ||
130 | 197 | ||
131 | url = reverse('all-builds') | 198 | url = reverse('all-builds') |
132 | self.get(url) | 199 | self.get(url) |
133 | 200 | ||
134 | # shouldn't see a rebuild button for command-line builds | ||
135 | selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id | ||
136 | run_again_button = self.find_all(selector) | ||
137 | self.assertEqual(len(run_again_button), 0, | ||
138 | 'should not see a rebuild button for cli builds') | ||
139 | |||
140 | # should see a rebuild button for non-command-line builds | 201 | # should see a rebuild button for non-command-line builds |
202 | self.wait_until_visible('#allbuildstable tbody tr') | ||
141 | selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id | 203 | selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id |
142 | run_again_button = self.find_all(selector) | 204 | run_again_button = self.find_all(selector) |
143 | self.assertEqual(len(run_again_button), 1, | 205 | self.assertEqual(len(run_again_button), 1, |
144 | 'should see a rebuild button for non-cli builds') | 206 | 'should see a rebuild button for non-cli builds') |
145 | 207 | ||
208 | # shouldn't see a rebuild button for command-line builds | ||
209 | selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id | ||
210 | run_again_button = self.find_all(selector) | ||
211 | self.assertEqual(len(run_again_button), 0, | ||
212 | 'should not see a rebuild button for cli builds') | ||
213 | |||
146 | def test_tooltips_on_project_name(self): | 214 | def test_tooltips_on_project_name(self): |
147 | """ | 215 | """ |
148 | Test tooltips shown next to project name in the main table | 216 | Test tooltips shown next to project name in the main table |
@@ -156,6 +224,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
156 | 224 | ||
157 | url = reverse('all-builds') | 225 | url = reverse('all-builds') |
158 | self.get(url) | 226 | self.get(url) |
227 | self.wait_until_visible('#allbuildstable', poll=3) | ||
159 | 228 | ||
160 | # get the project name cells from the table | 229 | # get the project name cells from the table |
161 | cells = self.find_all('#allbuildstable td[class="project"]') | 230 | cells = self.find_all('#allbuildstable td[class="project"]') |
@@ -164,7 +233,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
164 | 233 | ||
165 | for cell in cells: | 234 | for cell in cells: |
166 | content = cell.get_attribute('innerHTML') | 235 | content = cell.get_attribute('innerHTML') |
167 | help_icons = cell.find_elements_by_css_selector(selector) | 236 | help_icons = cell.find_elements(By.CSS_SELECTOR, selector) |
168 | 237 | ||
169 | if re.search(self.PROJECT_NAME, content): | 238 | if re.search(self.PROJECT_NAME, content): |
170 | # no help icon next to non-cli project name | 239 | # no help icon next to non-cli project name |
@@ -184,38 +253,224 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
184 | recent builds area; failed builds should not have links on the time column, | 253 | recent builds area; failed builds should not have links on the time column, |
185 | or in the recent builds area | 254 | or in the recent builds area |
186 | """ | 255 | """ |
187 | build1 = Build.objects.create(**self.project1_build_success) | 256 | build1, build2 = self._get_create_builds() |
188 | build2 = Build.objects.create(**self.project1_build_failure) | ||
189 | |||
190 | # add some targets to these builds so they have recipe links | ||
191 | # (and so we can find the row in the ToasterTable corresponding to | ||
192 | # a particular build) | ||
193 | Target.objects.create(build=build1, target='foo') | ||
194 | Target.objects.create(build=build2, target='bar') | ||
195 | 257 | ||
196 | url = reverse('all-builds') | 258 | url = reverse('all-builds') |
197 | self.get(url) | 259 | self.get(url) |
260 | self.wait_until_visible('#allbuildstable', poll=3) | ||
198 | 261 | ||
199 | # test recent builds area for successful build | 262 | # test recent builds area for successful build |
200 | element = self._get_build_time_element(build1) | 263 | element = self._get_build_time_element(build1) |
201 | links = element.find_elements_by_css_selector('a') | 264 | links = element.find_elements(By.CSS_SELECTOR, 'a') |
202 | msg = 'should be a link on the build time for a successful recent build' | 265 | msg = 'should be a link on the build time for a successful recent build' |
203 | self.assertEquals(len(links), 1, msg) | 266 | self.assertEqual(len(links), 1, msg) |
204 | 267 | ||
205 | # test recent builds area for failed build | 268 | # test recent builds area for failed build |
206 | element = self._get_build_time_element(build2) | 269 | element = self._get_build_time_element(build2) |
207 | links = element.find_elements_by_css_selector('a') | 270 | links = element.find_elements(By.CSS_SELECTOR, 'a') |
208 | msg = 'should not be a link on the build time for a failed recent build' | 271 | msg = 'should not be a link on the build time for a failed recent build' |
209 | self.assertEquals(len(links), 0, msg) | 272 | self.assertEqual(len(links), 0, msg) |
210 | 273 | ||
211 | # test the time column for successful build | 274 | # test the time column for successful build |
212 | build1_row = self._get_row_for_build(build1) | 275 | build1_row = self._get_row_for_build(build1) |
213 | links = build1_row.find_elements_by_css_selector('td.time a') | 276 | links = build1_row.find_elements(By.CSS_SELECTOR, 'td.time a') |
214 | msg = 'should be a link on the build time for a successful build' | 277 | msg = 'should be a link on the build time for a successful build' |
215 | self.assertEquals(len(links), 1, msg) | 278 | self.assertEqual(len(links), 1, msg) |
216 | 279 | ||
217 | # test the time column for failed build | 280 | # test the time column for failed build |
218 | build2_row = self._get_row_for_build(build2) | 281 | build2_row = self._get_row_for_build(build2) |
219 | links = build2_row.find_elements_by_css_selector('td.time a') | 282 | links = build2_row.find_elements(By.CSS_SELECTOR, 'td.time a') |
220 | msg = 'should not be a link on the build time for a failed build' | 283 | msg = 'should not be a link on the build time for a failed build' |
221 | self.assertEquals(len(links), 0, msg) | 284 | self.assertEqual(len(links), 0, msg) |
285 | |||
286 | def test_builds_table_search_box(self): | ||
287 | """ Test the search box in the builds table on the all builds page """ | ||
288 | self._get_create_builds() | ||
289 | |||
290 | url = reverse('all-builds') | ||
291 | self.get(url) | ||
292 | |||
293 | # Check search box is present and works | ||
294 | self.wait_until_visible('#allbuildstable tbody tr') | ||
295 | search_box = self.find('#search-input-allbuildstable') | ||
296 | self.assertTrue(search_box.is_displayed()) | ||
297 | |||
298 | # Check that we can search for a build by recipe name | ||
299 | search_box.send_keys('foo') | ||
300 | search_btn = self.find('#search-submit-allbuildstable') | ||
301 | search_btn.click() | ||
302 | self.wait_until_visible('#allbuildstable tbody tr') | ||
303 | rows = self.find_all('#allbuildstable tbody tr') | ||
304 | self.assertTrue(len(rows) >= 1) | ||
305 | |||
306 | def test_filtering_on_failure_tasks_column(self): | ||
307 | """ Test the filtering on failure tasks column in the builds table on the all builds page """ | ||
308 | def _check_if_filter_failed_tasks_column_is_visible(): | ||
309 | # check if failed tasks filter column is visible, if not click on it | ||
310 | # Check edit column | ||
311 | edit_column = self.find('#edit-columns-button') | ||
312 | self.assertTrue(edit_column.is_displayed()) | ||
313 | edit_column.click() | ||
314 | # Check dropdown is visible | ||
315 | self.wait_until_visible('ul.dropdown-menu.editcol') | ||
316 | filter_fails_task_checkbox = self.find('#checkbox-failed_tasks') | ||
317 | if not filter_fails_task_checkbox.is_selected(): | ||
318 | filter_fails_task_checkbox.click() | ||
319 | edit_column.click() | ||
320 | |||
321 | self._get_create_builds(success=10, failure=10) | ||
322 | |||
323 | url = reverse('all-builds') | ||
324 | self.get(url) | ||
325 | |||
326 | # Check filtering on failure tasks column | ||
327 | self.wait_until_visible('#allbuildstable tbody tr') | ||
328 | _check_if_filter_failed_tasks_column_is_visible() | ||
329 | failed_tasks_filter = self.find('#failed_tasks_filter') | ||
330 | failed_tasks_filter.click() | ||
331 | # Check popup is visible | ||
332 | self.wait_until_visible('#filter-modal-allbuildstable') | ||
333 | self.assertTrue( | ||
334 | self.find('#filter-modal-allbuildstable').is_displayed()) | ||
335 | # Check that we can filter by failure tasks | ||
336 | build_without_failure_tasks = self.find( | ||
337 | '#failed_tasks_filter\\:without_failed_tasks') | ||
338 | build_without_failure_tasks.click() | ||
339 | # click on apply button | ||
340 | self.find('#filter-modal-allbuildstable .btn-primary').click() | ||
341 | self.wait_until_visible('#allbuildstable tbody tr') | ||
342 | # Check if filter is applied, by checking if failed_tasks_filter has btn-primary class | ||
343 | self.assertTrue(self.find('#failed_tasks_filter').get_attribute( | ||
344 | 'class').find('btn-primary') != -1) | ||
345 | |||
346 | def test_filtering_on_completedOn_column(self): | ||
347 | """ Test the filtering on completed_on column in the builds table on the all builds page """ | ||
348 | self._get_create_builds(success=10, failure=10) | ||
349 | |||
350 | url = reverse('all-builds') | ||
351 | self.get(url) | ||
352 | |||
353 | # Check filtering on failure tasks column | ||
354 | self.wait_until_visible('#allbuildstable tbody tr') | ||
355 | completed_on_filter = self.find('#completed_on_filter') | ||
356 | completed_on_filter.click() | ||
357 | # Check popup is visible | ||
358 | self.wait_until_visible('#filter-modal-allbuildstable') | ||
359 | self.assertTrue( | ||
360 | self.find('#filter-modal-allbuildstable').is_displayed()) | ||
361 | # Check that we can filter by failure tasks | ||
362 | build_without_failure_tasks = self.find( | ||
363 | '#completed_on_filter\\:date_range') | ||
364 | build_without_failure_tasks.click() | ||
365 | # click on apply button | ||
366 | self.find('#filter-modal-allbuildstable .btn-primary').click() | ||
367 | self.wait_until_visible('#allbuildstable tbody tr') | ||
368 | # Check if filter is applied, by checking if completed_on_filter has btn-primary class | ||
369 | self.assertTrue(self.find('#completed_on_filter').get_attribute( | ||
370 | 'class').find('btn-primary') != -1) | ||
371 | |||
372 | # Filter by date range | ||
373 | self.find('#completed_on_filter').click() | ||
374 | self.wait_until_visible('#filter-modal-allbuildstable') | ||
375 | date_ranges = self.driver.find_elements( | ||
376 | By.XPATH, '//input[@class="form-control hasDatepicker"]') | ||
377 | today = timezone.now() | ||
378 | yestersday = today - timezone.timedelta(days=1) | ||
379 | date_ranges[0].send_keys(yestersday.strftime('%Y-%m-%d')) | ||
380 | date_ranges[1].send_keys(today.strftime('%Y-%m-%d')) | ||
381 | self.find('#filter-modal-allbuildstable .btn-primary').click() | ||
382 | self.wait_until_visible('#allbuildstable tbody tr') | ||
383 | self.assertTrue(self.find('#completed_on_filter').get_attribute( | ||
384 | 'class').find('btn-primary') != -1) | ||
385 | # Check if filter is applied, number of builds displayed should be 6 | ||
386 | self.assertTrue(len(self.find_all('#allbuildstable tbody tr')) >= 4) | ||
387 | |||
388 | def test_builds_table_editColumn(self): | ||
389 | """ Test the edit column feature in the builds table on the all builds page """ | ||
390 | self._get_create_builds(success=10, failure=10) | ||
391 | |||
392 | def test_edit_column(check_box_id): | ||
393 | # Check that we can hide/show table column | ||
394 | check_box = self.find(f'#{check_box_id}') | ||
395 | th_class = str(check_box_id).replace('checkbox-', '') | ||
396 | if check_box.is_selected(): | ||
397 | # check if column is visible in table | ||
398 | self.assertTrue( | ||
399 | self.find( | ||
400 | f'#allbuildstable thead th.{th_class}' | ||
401 | ).is_displayed(), | ||
402 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
403 | ) | ||
404 | check_box.click() | ||
405 | # check if column is hidden in table | ||
406 | self.assertFalse( | ||
407 | self.find( | ||
408 | f'#allbuildstable thead th.{th_class}' | ||
409 | ).is_displayed(), | ||
410 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
411 | ) | ||
412 | else: | ||
413 | # check if column is hidden in table | ||
414 | self.assertFalse( | ||
415 | self.find( | ||
416 | f'#allbuildstable thead th.{th_class}' | ||
417 | ).is_displayed(), | ||
418 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
419 | ) | ||
420 | check_box.click() | ||
421 | # check if column is visible in table | ||
422 | self.assertTrue( | ||
423 | self.find( | ||
424 | f'#allbuildstable thead th.{th_class}' | ||
425 | ).is_displayed(), | ||
426 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
427 | ) | ||
428 | url = reverse('all-builds') | ||
429 | self.get(url) | ||
430 | self.wait_until_visible('#allbuildstable tbody tr') | ||
431 | |||
432 | # Check edit column | ||
433 | edit_column = self.find('#edit-columns-button') | ||
434 | self.assertTrue(edit_column.is_displayed()) | ||
435 | edit_column.click() | ||
436 | # Check dropdown is visible | ||
437 | self.wait_until_visible('ul.dropdown-menu.editcol') | ||
438 | |||
439 | # Check that we can hide the edit column | ||
440 | test_edit_column('checkbox-errors_no') | ||
441 | test_edit_column('checkbox-failed_tasks') | ||
442 | test_edit_column('checkbox-image_files') | ||
443 | test_edit_column('checkbox-project') | ||
444 | test_edit_column('checkbox-started_on') | ||
445 | test_edit_column('checkbox-time') | ||
446 | test_edit_column('checkbox-warnings_no') | ||
447 | |||
448 | def test_builds_table_show_rows(self): | ||
449 | """ Test the show rows feature in the builds table on the all builds page """ | ||
450 | self._get_create_builds(success=100, failure=100) | ||
451 | |||
452 | def test_show_rows(row_to_show, show_row_link): | ||
453 | # Check that we can show rows == row_to_show | ||
454 | show_row_link.select_by_value(str(row_to_show)) | ||
455 | self.wait_until_visible('#allbuildstable tbody tr', poll=3) | ||
456 | # check at least some rows are visible | ||
457 | self.assertTrue( | ||
458 | len(self.find_all('#allbuildstable tbody tr')) > 0 | ||
459 | ) | ||
460 | |||
461 | url = reverse('all-builds') | ||
462 | self.get(url) | ||
463 | self.wait_until_visible('#allbuildstable tbody tr') | ||
464 | |||
465 | show_rows = self.driver.find_elements( | ||
466 | By.XPATH, | ||
467 | '//select[@class="form-control pagesize-allbuildstable"]' | ||
468 | ) | ||
469 | # Check show rows | ||
470 | for show_row_link in show_rows: | ||
471 | show_row_link = Select(show_row_link) | ||
472 | test_show_rows(10, show_row_link) | ||
473 | test_show_rows(25, show_row_link) | ||
474 | test_show_rows(50, show_row_link) | ||
475 | test_show_rows(100, show_row_link) | ||
476 | test_show_rows(150, show_row_link) | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py index 15b03400f9..9ed1901cc9 100644 --- a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py +++ b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py | |||
@@ -7,15 +7,20 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import os | ||
10 | import re | 11 | import re |
11 | 12 | ||
12 | from django.urls import reverse | 13 | from django.urls import reverse |
13 | from django.utils import timezone | 14 | from django.utils import timezone |
15 | from selenium.webdriver.support.select import Select | ||
14 | from tests.browser.selenium_helpers import SeleniumTestCase | 16 | from tests.browser.selenium_helpers import SeleniumTestCase |
15 | 17 | ||
16 | from orm.models import BitbakeVersion, Release, Project, Build | 18 | from orm.models import BitbakeVersion, Release, Project, Build |
17 | from orm.models import ProjectVariable | 19 | from orm.models import ProjectVariable |
18 | 20 | ||
21 | from selenium.webdriver.common.by import By | ||
22 | |||
23 | |||
19 | class TestAllProjectsPage(SeleniumTestCase): | 24 | class TestAllProjectsPage(SeleniumTestCase): |
20 | """ Browser tests for projects page /projects/ """ | 25 | """ Browser tests for projects page /projects/ """ |
21 | 26 | ||
@@ -25,7 +30,8 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
25 | 30 | ||
26 | def setUp(self): | 31 | def setUp(self): |
27 | """ Add default project manually """ | 32 | """ Add default project manually """ |
28 | project = Project.objects.create_project(self.CLI_BUILDS_PROJECT_NAME, None) | 33 | project = Project.objects.create_project( |
34 | self.CLI_BUILDS_PROJECT_NAME, None) | ||
29 | self.default_project = project | 35 | self.default_project = project |
30 | self.default_project.is_default = True | 36 | self.default_project.is_default = True |
31 | self.default_project.save() | 37 | self.default_project.save() |
@@ -35,6 +41,17 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
35 | 41 | ||
36 | self.release = None | 42 | self.release = None |
37 | 43 | ||
44 | def _create_projects(self, nb_project=10): | ||
45 | projects = [] | ||
46 | for i in range(1, nb_project + 1): | ||
47 | projects.append( | ||
48 | Project( | ||
49 | name='test project {}'.format(i), | ||
50 | release=self.release, | ||
51 | ) | ||
52 | ) | ||
53 | Project.objects.bulk_create(projects) | ||
54 | |||
38 | def _add_build_to_default_project(self): | 55 | def _add_build_to_default_project(self): |
39 | """ Add a build to the default project (not used in all tests) """ | 56 | """ Add a build to the default project (not used in all tests) """ |
40 | now = timezone.now() | 57 | now = timezone.now() |
@@ -45,12 +62,14 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
45 | 62 | ||
46 | def _add_non_default_project(self): | 63 | def _add_non_default_project(self): |
47 | """ Add another project """ | 64 | """ Add another project """ |
48 | bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/', | 65 | builldir = os.environ.get('BUILDDIR', './') |
66 | bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/', | ||
49 | branch='master', dirpath='') | 67 | branch='master', dirpath='') |
50 | self.release = Release.objects.create(name='test release', | 68 | self.release = Release.objects.create(name='test release', |
51 | branch_name='master', | 69 | branch_name='master', |
52 | bitbake_version=bbv) | 70 | bitbake_version=bbv) |
53 | self.project = Project.objects.create_project(self.PROJECT_NAME, self.release) | 71 | self.project = Project.objects.create_project( |
72 | self.PROJECT_NAME, self.release) | ||
54 | self.project.is_default = False | 73 | self.project.is_default = False |
55 | self.project.save() | 74 | self.project.save() |
56 | 75 | ||
@@ -62,7 +81,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
62 | 81 | ||
63 | def _get_row_for_project(self, project_name): | 82 | def _get_row_for_project(self, project_name): |
64 | """ Get the HTML row for a project, or None if not found """ | 83 | """ Get the HTML row for a project, or None if not found """ |
65 | self.wait_until_present('#projectstable tbody tr') | 84 | self.wait_until_visible('#projectstable tbody tr', poll=3) |
66 | rows = self.find_all('#projectstable tbody tr') | 85 | rows = self.find_all('#projectstable tbody tr') |
67 | 86 | ||
68 | # find the row with a project name matching the one supplied | 87 | # find the row with a project name matching the one supplied |
@@ -93,7 +112,8 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
93 | url = reverse('all-projects') | 112 | url = reverse('all-projects') |
94 | self.get(url) | 113 | self.get(url) |
95 | 114 | ||
96 | default_project_row = self._get_row_for_project(self.default_project.name) | 115 | default_project_row = self._get_row_for_project( |
116 | self.default_project.name) | ||
97 | 117 | ||
98 | self.assertNotEqual(default_project_row, None, | 118 | self.assertNotEqual(default_project_row, None, |
99 | 'default project "cli builds" should be in page') | 119 | 'default project "cli builds" should be in page') |
@@ -113,11 +133,12 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
113 | self.wait_until_visible("#projectstable tr") | 133 | self.wait_until_visible("#projectstable tr") |
114 | 134 | ||
115 | # find the row for the default project | 135 | # find the row for the default project |
116 | default_project_row = self._get_row_for_project(self.default_project.name) | 136 | default_project_row = self._get_row_for_project( |
137 | self.default_project.name) | ||
117 | 138 | ||
118 | # check the release text for the default project | 139 | # check the release text for the default project |
119 | selector = 'span[data-project-field="release"] span.text-muted' | 140 | selector = 'span[data-project-field="release"] span.text-muted' |
120 | element = default_project_row.find_element_by_css_selector(selector) | 141 | element = default_project_row.find_element(By.CSS_SELECTOR, selector) |
121 | text = element.text.strip() | 142 | text = element.text.strip() |
122 | self.assertEqual(text, 'Not applicable', | 143 | self.assertEqual(text, 'Not applicable', |
123 | 'release should be "not applicable" for default project') | 144 | 'release should be "not applicable" for default project') |
@@ -127,7 +148,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
127 | 148 | ||
128 | # check the link in the release cell for the other project | 149 | # check the link in the release cell for the other project |
129 | selector = 'span[data-project-field="release"]' | 150 | selector = 'span[data-project-field="release"]' |
130 | element = other_project_row.find_element_by_css_selector(selector) | 151 | element = other_project_row.find_element(By.CSS_SELECTOR, selector) |
131 | text = element.text.strip() | 152 | text = element.text.strip() |
132 | self.assertEqual(text, self.release.name, | 153 | self.assertEqual(text, self.release.name, |
133 | 'release name should be shown for non-default project') | 154 | 'release name should be shown for non-default project') |
@@ -148,11 +169,12 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
148 | self.wait_until_visible("#projectstable tr") | 169 | self.wait_until_visible("#projectstable tr") |
149 | 170 | ||
150 | # find the row for the default project | 171 | # find the row for the default project |
151 | default_project_row = self._get_row_for_project(self.default_project.name) | 172 | default_project_row = self._get_row_for_project( |
173 | self.default_project.name) | ||
152 | 174 | ||
153 | # check the machine cell for the default project | 175 | # check the machine cell for the default project |
154 | selector = 'span[data-project-field="machine"] span.text-muted' | 176 | selector = 'span[data-project-field="machine"] span.text-muted' |
155 | element = default_project_row.find_element_by_css_selector(selector) | 177 | element = default_project_row.find_element(By.CSS_SELECTOR, selector) |
156 | text = element.text.strip() | 178 | text = element.text.strip() |
157 | self.assertEqual(text, 'Not applicable', | 179 | self.assertEqual(text, 'Not applicable', |
158 | 'machine should be not applicable for default project') | 180 | 'machine should be not applicable for default project') |
@@ -162,7 +184,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
162 | 184 | ||
163 | # check the link in the machine cell for the other project | 185 | # check the link in the machine cell for the other project |
164 | selector = 'span[data-project-field="machine"]' | 186 | selector = 'span[data-project-field="machine"]' |
165 | element = other_project_row.find_element_by_css_selector(selector) | 187 | element = other_project_row.find_element(By.CSS_SELECTOR, selector) |
166 | text = element.text.strip() | 188 | text = element.text.strip() |
167 | self.assertEqual(text, self.MACHINE_NAME, | 189 | self.assertEqual(text, self.MACHINE_NAME, |
168 | 'machine name should be shown for non-default project') | 190 | 'machine name should be shown for non-default project') |
@@ -183,13 +205,15 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
183 | self.get(reverse('all-projects')) | 205 | self.get(reverse('all-projects')) |
184 | 206 | ||
185 | # find the row for the default project | 207 | # find the row for the default project |
186 | default_project_row = self._get_row_for_project(self.default_project.name) | 208 | default_project_row = self._get_row_for_project( |
209 | self.default_project.name) | ||
187 | 210 | ||
188 | # check the link on the name field | 211 | # check the link on the name field |
189 | selector = 'span[data-project-field="name"] a' | 212 | selector = 'span[data-project-field="name"] a' |
190 | element = default_project_row.find_element_by_css_selector(selector) | 213 | element = default_project_row.find_element(By.CSS_SELECTOR, selector) |
191 | link_url = element.get_attribute('href').strip() | 214 | link_url = element.get_attribute('href').strip() |
192 | expected_url = reverse('projectbuilds', args=(self.default_project.id,)) | 215 | expected_url = reverse( |
216 | 'projectbuilds', args=(self.default_project.id,)) | ||
193 | msg = 'link on default project name should point to builds but was %s' % link_url | 217 | msg = 'link on default project name should point to builds but was %s' % link_url |
194 | self.assertTrue(link_url.endswith(expected_url), msg) | 218 | self.assertTrue(link_url.endswith(expected_url), msg) |
195 | 219 | ||
@@ -198,8 +222,116 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
198 | 222 | ||
199 | # check the link for the other project | 223 | # check the link for the other project |
200 | selector = 'span[data-project-field="name"] a' | 224 | selector = 'span[data-project-field="name"] a' |
201 | element = other_project_row.find_element_by_css_selector(selector) | 225 | element = other_project_row.find_element(By.CSS_SELECTOR, selector) |
202 | link_url = element.get_attribute('href').strip() | 226 | link_url = element.get_attribute('href').strip() |
203 | expected_url = reverse('project', args=(self.project.id,)) | 227 | expected_url = reverse('project', args=(self.project.id,)) |
204 | msg = 'link on project name should point to configuration but was %s' % link_url | 228 | msg = 'link on project name should point to configuration but was %s' % link_url |
205 | self.assertTrue(link_url.endswith(expected_url), msg) | 229 | self.assertTrue(link_url.endswith(expected_url), msg) |
230 | |||
231 | def test_allProject_table_search_box(self): | ||
232 | """ Test the search box in the all project table on the all projects page """ | ||
233 | self._create_projects() | ||
234 | |||
235 | url = reverse('all-projects') | ||
236 | self.get(url) | ||
237 | |||
238 | # Chseck search box is present and works | ||
239 | self.wait_until_visible('#projectstable tbody tr', poll=3) | ||
240 | search_box = self.find('#search-input-projectstable') | ||
241 | self.assertTrue(search_box.is_displayed()) | ||
242 | |||
243 | # Check that we can search for a project by project name | ||
244 | search_box.send_keys('test project 10') | ||
245 | search_btn = self.find('#search-submit-projectstable') | ||
246 | search_btn.click() | ||
247 | self.wait_until_visible('#projectstable tbody tr', poll=3) | ||
248 | rows = self.find_all('#projectstable tbody tr') | ||
249 | self.assertTrue(len(rows) == 1) | ||
250 | |||
251 | def test_allProject_table_editColumn(self): | ||
252 | """ Test the edit column feature in the projects table on the all projects page """ | ||
253 | self._create_projects() | ||
254 | |||
255 | def test_edit_column(check_box_id): | ||
256 | # Check that we can hide/show table column | ||
257 | check_box = self.find(f'#{check_box_id}') | ||
258 | th_class = str(check_box_id).replace('checkbox-', '') | ||
259 | if check_box.is_selected(): | ||
260 | # check if column is visible in table | ||
261 | self.assertTrue( | ||
262 | self.find( | ||
263 | f'#projectstable thead th.{th_class}' | ||
264 | ).is_displayed(), | ||
265 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
266 | ) | ||
267 | check_box.click() | ||
268 | # check if column is hidden in table | ||
269 | self.assertFalse( | ||
270 | self.find( | ||
271 | f'#projectstable thead th.{th_class}' | ||
272 | ).is_displayed(), | ||
273 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
274 | ) | ||
275 | else: | ||
276 | # check if column is hidden in table | ||
277 | self.assertFalse( | ||
278 | self.find( | ||
279 | f'#projectstable thead th.{th_class}' | ||
280 | ).is_displayed(), | ||
281 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
282 | ) | ||
283 | check_box.click() | ||
284 | # check if column is visible in table | ||
285 | self.assertTrue( | ||
286 | self.find( | ||
287 | f'#projectstable thead th.{th_class}' | ||
288 | ).is_displayed(), | ||
289 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
290 | ) | ||
291 | url = reverse('all-projects') | ||
292 | self.get(url) | ||
293 | self.wait_until_visible('#projectstable tbody tr', poll=3) | ||
294 | |||
295 | # Check edit column | ||
296 | edit_column = self.find('#edit-columns-button') | ||
297 | self.assertTrue(edit_column.is_displayed()) | ||
298 | edit_column.click() | ||
299 | # Check dropdown is visible | ||
300 | self.wait_until_visible('ul.dropdown-menu.editcol') | ||
301 | |||
302 | # Check that we can hide the edit column | ||
303 | test_edit_column('checkbox-errors') | ||
304 | test_edit_column('checkbox-image_files') | ||
305 | test_edit_column('checkbox-last_build_outcome') | ||
306 | test_edit_column('checkbox-recipe_name') | ||
307 | test_edit_column('checkbox-warnings') | ||
308 | |||
309 | def test_allProject_table_show_rows(self): | ||
310 | """ Test the show rows feature in the projects table on the all projects page """ | ||
311 | self._create_projects(nb_project=200) | ||
312 | |||
313 | def test_show_rows(row_to_show, show_row_link): | ||
314 | # Check that we can show rows == row_to_show | ||
315 | show_row_link.select_by_value(str(row_to_show)) | ||
316 | self.wait_until_visible('#projectstable tbody tr', poll=3) | ||
317 | # check at least some rows are visible | ||
318 | self.assertTrue( | ||
319 | len(self.find_all('#projectstable tbody tr')) > 0 | ||
320 | ) | ||
321 | |||
322 | url = reverse('all-projects') | ||
323 | self.get(url) | ||
324 | self.wait_until_visible('#projectstable tbody tr', poll=3) | ||
325 | |||
326 | show_rows = self.driver.find_elements( | ||
327 | By.XPATH, | ||
328 | '//select[@class="form-control pagesize-projectstable"]' | ||
329 | ) | ||
330 | # Check show rows | ||
331 | for show_row_link in show_rows: | ||
332 | show_row_link = Select(show_row_link) | ||
333 | test_show_rows(10, show_row_link) | ||
334 | test_show_rows(25, show_row_link) | ||
335 | test_show_rows(50, show_row_link) | ||
336 | test_show_rows(100, show_row_link) | ||
337 | test_show_rows(150, show_row_link) | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py index efcd89b346..d838ce363a 100644 --- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py +++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py | |||
@@ -7,6 +7,7 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import os | ||
10 | from django.urls import reverse | 11 | from django.urls import reverse |
11 | from django.utils import timezone | 12 | from django.utils import timezone |
12 | 13 | ||
@@ -15,11 +16,14 @@ from tests.browser.selenium_helpers import SeleniumTestCase | |||
15 | from orm.models import Project, Release, BitbakeVersion, Build, LogMessage | 16 | from orm.models import Project, Release, BitbakeVersion, Build, LogMessage |
16 | from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable | 17 | from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable |
17 | 18 | ||
19 | from selenium.webdriver.common.by import By | ||
20 | |||
18 | class TestBuildDashboardPage(SeleniumTestCase): | 21 | class TestBuildDashboardPage(SeleniumTestCase): |
19 | """ Tests for the build dashboard /build/X """ | 22 | """ Tests for the build dashboard /build/X """ |
20 | 23 | ||
21 | def setUp(self): | 24 | def setUp(self): |
22 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', | 25 | builldir = os.environ.get('BUILDDIR', './') |
26 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/', | ||
23 | branch='master', dirpath="") | 27 | branch='master', dirpath="") |
24 | release = Release.objects.create(name='release1', | 28 | release = Release.objects.create(name='release1', |
25 | bitbake_version=bbv) | 29 | bitbake_version=bbv) |
@@ -158,6 +162,7 @@ class TestBuildDashboardPage(SeleniumTestCase): | |||
158 | """ | 162 | """ |
159 | url = reverse('builddashboard', args=(build.id,)) | 163 | url = reverse('builddashboard', args=(build.id,)) |
160 | self.get(url) | 164 | self.get(url) |
165 | self.wait_until_visible('#global-nav', poll=3) | ||
161 | 166 | ||
162 | def _get_build_dashboard_errors(self, build): | 167 | def _get_build_dashboard_errors(self, build): |
163 | """ | 168 | """ |
@@ -183,7 +188,7 @@ class TestBuildDashboardPage(SeleniumTestCase): | |||
183 | 188 | ||
184 | found = False | 189 | found = False |
185 | for element in message_elements: | 190 | for element in message_elements: |
186 | log_message_text = element.find_element_by_tag_name('pre').text.strip() | 191 | log_message_text = element.find_element(By.TAG_NAME, 'pre').text.strip() |
187 | text_matches = (log_message_text == expected_text) | 192 | text_matches = (log_message_text == expected_text) |
188 | 193 | ||
189 | log_message_pk = element.get_attribute('data-log-message-id') | 194 | log_message_pk = element.get_attribute('data-log-message-id') |
@@ -213,7 +218,7 @@ class TestBuildDashboardPage(SeleniumTestCase): | |||
213 | the WebElement modal match the list of text values in expected | 218 | the WebElement modal match the list of text values in expected |
214 | """ | 219 | """ |
215 | # labels containing the radio buttons we're testing for | 220 | # labels containing the radio buttons we're testing for |
216 | labels = modal.find_elements_by_css_selector(".radio") | 221 | labels = modal.find_elements(By.CSS_SELECTOR,".radio") |
217 | 222 | ||
218 | labels_text = [lab.text for lab in labels] | 223 | labels_text = [lab.text for lab in labels] |
219 | self.assertEqual(len(labels_text), len(expected)) | 224 | self.assertEqual(len(labels_text), len(expected)) |
@@ -248,7 +253,7 @@ class TestBuildDashboardPage(SeleniumTestCase): | |||
248 | selector = '[data-role="edit-custom-image-trigger"]' | 253 | selector = '[data-role="edit-custom-image-trigger"]' |
249 | self.click(selector) | 254 | self.click(selector) |
250 | 255 | ||
251 | modal = self.driver.find_element_by_id('edit-custom-image-modal') | 256 | modal = self.driver.find_element(By.ID, 'edit-custom-image-modal') |
252 | self.wait_until_visible("#edit-custom-image-modal") | 257 | self.wait_until_visible("#edit-custom-image-modal") |
253 | 258 | ||
254 | # recipes we expect to see in the edit custom image modal | 259 | # recipes we expect to see in the edit custom image modal |
@@ -270,7 +275,7 @@ class TestBuildDashboardPage(SeleniumTestCase): | |||
270 | selector = '[data-role="new-custom-image-trigger"]' | 275 | selector = '[data-role="new-custom-image-trigger"]' |
271 | self.click(selector) | 276 | self.click(selector) |
272 | 277 | ||
273 | modal = self.driver.find_element_by_id('new-custom-image-modal') | 278 | modal = self.driver.find_element(By.ID,'new-custom-image-modal') |
274 | self.wait_until_visible("#new-custom-image-modal") | 279 | self.wait_until_visible("#new-custom-image-modal") |
275 | 280 | ||
276 | # recipes we expect to see in the new custom image modal | 281 | # recipes we expect to see in the new custom image modal |
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py index c6226d60eb..675825bd40 100644 --- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py +++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py | |||
@@ -7,6 +7,7 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import os | ||
10 | from django.urls import reverse | 11 | from django.urls import reverse |
11 | from django.utils import timezone | 12 | from django.utils import timezone |
12 | 13 | ||
@@ -20,7 +21,8 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase): | |||
20 | """ Tests for artifacts on the build dashboard /build/X """ | 21 | """ Tests for artifacts on the build dashboard /build/X """ |
21 | 22 | ||
22 | def setUp(self): | 23 | def setUp(self): |
23 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', | 24 | builldir = os.environ.get('BUILDDIR', './') |
25 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/', | ||
24 | branch='master', dirpath="") | 26 | branch='master', dirpath="") |
25 | release = Release.objects.create(name='release1', | 27 | release = Release.objects.create(name='release1', |
26 | bitbake_version=bbv) | 28 | bitbake_version=bbv) |
@@ -197,12 +199,12 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase): | |||
197 | # check package count and size, link on target name | 199 | # check package count and size, link on target name |
198 | selector = '[data-value="target-package-count"]' | 200 | selector = '[data-value="target-package-count"]' |
199 | element = self.find(selector) | 201 | element = self.find(selector) |
200 | self.assertEquals(element.text, '1', | 202 | self.assertEqual(element.text, '1', |
201 | 'package count should be shown for image builds') | 203 | 'package count should be shown for image builds') |
202 | 204 | ||
203 | selector = '[data-value="target-package-size"]' | 205 | selector = '[data-value="target-package-size"]' |
204 | element = self.find(selector) | 206 | element = self.find(selector) |
205 | self.assertEquals(element.text, '1.0 KB', | 207 | self.assertEqual(element.text, '1.0 KB', |
206 | 'package size should be shown for image builds') | 208 | 'package size should be shown for image builds') |
207 | 209 | ||
208 | selector = '[data-link="target-packages"]' | 210 | selector = '[data-link="target-packages"]' |
diff --git a/bitbake/lib/toaster/tests/browser/test_delete_project.py b/bitbake/lib/toaster/tests/browser/test_delete_project.py new file mode 100644 index 0000000000..1941777ccc --- /dev/null +++ b/bitbake/lib/toaster/tests/browser/test_delete_project.py | |||
@@ -0,0 +1,103 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # -*- coding: utf-8 -*- | ||
3 | # BitBake Toaster UI tests implementation | ||
4 | # | ||
5 | # Copyright (C) 2023 Savoir-faire Linux Inc | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | |||
9 | import pytest | ||
10 | from django.urls import reverse | ||
11 | from selenium.webdriver.support.ui import Select | ||
12 | from tests.browser.selenium_helpers import SeleniumTestCase | ||
13 | from orm.models import BitbakeVersion, Project, Release | ||
14 | from selenium.webdriver.common.by import By | ||
15 | |||
16 | class TestDeleteProject(SeleniumTestCase): | ||
17 | |||
18 | def setUp(self): | ||
19 | bitbake, _ = BitbakeVersion.objects.get_or_create( | ||
20 | name="master", | ||
21 | giturl="git://master", | ||
22 | branch="master", | ||
23 | dirpath="master") | ||
24 | |||
25 | self.release, _ = Release.objects.get_or_create( | ||
26 | name="master", | ||
27 | description="Yocto Project master", | ||
28 | branch_name="master", | ||
29 | helptext="latest", | ||
30 | bitbake_version=bitbake) | ||
31 | |||
32 | Release.objects.get_or_create( | ||
33 | name="foo", | ||
34 | description="Yocto Project foo", | ||
35 | branch_name="foo", | ||
36 | helptext="latest", | ||
37 | bitbake_version=bitbake) | ||
38 | |||
39 | @pytest.mark.django_db | ||
40 | def test_delete_project(self): | ||
41 | """ Test delete a project | ||
42 | - Check delete modal is visible | ||
43 | - Check delete modal has right text | ||
44 | - Confirm delete | ||
45 | - Check project is deleted | ||
46 | """ | ||
47 | project_name = "project_to_delete" | ||
48 | url = reverse('newproject') | ||
49 | self.get(url) | ||
50 | self.enter_text('#new-project-name', project_name) | ||
51 | select = Select(self.find('#projectversion')) | ||
52 | select.select_by_value(str(self.release.pk)) | ||
53 | self.click("#create-project-button") | ||
54 | # We should get redirected to the new project's page with the | ||
55 | # notification at the top | ||
56 | element = self.wait_until_visible('#project-created-notification') | ||
57 | self.assertTrue(project_name in element.text, | ||
58 | "New project name not in new project notification") | ||
59 | self.assertTrue(Project.objects.filter(name=project_name).count(), | ||
60 | "New project not found in database") | ||
61 | |||
62 | # Delete project | ||
63 | delete_project_link = self.driver.find_element( | ||
64 | By.XPATH, '//a[@href="#delete-project-modal"]') | ||
65 | delete_project_link.click() | ||
66 | |||
67 | # Check delete modal is visible | ||
68 | self.wait_until_visible('#delete-project-modal') | ||
69 | |||
70 | # Check delete modal has right text | ||
71 | modal_header_text = self.find('#delete-project-modal .modal-header').text | ||
72 | self.assertTrue( | ||
73 | "Are you sure you want to delete this project?" in modal_header_text, | ||
74 | "Delete project modal header text is wrong") | ||
75 | |||
76 | modal_body_text = self.find('#delete-project-modal .modal-body').text | ||
77 | self.assertTrue( | ||
78 | "Cancel its builds currently in progress" in modal_body_text, | ||
79 | "Modal body doesn't contain: Cancel its builds currently in progress") | ||
80 | self.assertTrue( | ||
81 | "Remove its configuration information" in modal_body_text, | ||
82 | "Modal body doesn't contain: Remove its configuration information") | ||
83 | self.assertTrue( | ||
84 | "Remove its imported layers" in modal_body_text, | ||
85 | "Modal body doesn't contain: Remove its imported layers") | ||
86 | self.assertTrue( | ||
87 | "Remove its custom images" in modal_body_text, | ||
88 | "Modal body doesn't contain: Remove its custom images") | ||
89 | self.assertTrue( | ||
90 | "Remove all its build information" in modal_body_text, | ||
91 | "Modal body doesn't contain: Remove all its build information") | ||
92 | |||
93 | # Confirm delete | ||
94 | delete_btn = self.find('#delete-project-confirmed') | ||
95 | delete_btn.click() | ||
96 | |||
97 | # Check project is deleted | ||
98 | self.wait_until_visible('#change-notification') | ||
99 | delete_notification = self.find('#change-notification-msg') | ||
100 | self.assertTrue("You have deleted 1 project:" in delete_notification.text) | ||
101 | self.assertTrue(project_name in delete_notification.text) | ||
102 | self.assertFalse(Project.objects.filter(name=project_name).exists(), | ||
103 | "Project not deleted from database") | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_landing_page.py b/bitbake/lib/toaster/tests/browser/test_landing_page.py index 8bb64b9f3e..8fe5fea467 100644 --- a/bitbake/lib/toaster/tests/browser/test_landing_page.py +++ b/bitbake/lib/toaster/tests/browser/test_landing_page.py | |||
@@ -10,8 +10,10 @@ | |||
10 | from django.urls import reverse | 10 | from django.urls import reverse |
11 | from django.utils import timezone | 11 | from django.utils import timezone |
12 | from tests.browser.selenium_helpers import SeleniumTestCase | 12 | from tests.browser.selenium_helpers import SeleniumTestCase |
13 | from selenium.webdriver.common.by import By | ||
14 | |||
15 | from orm.models import Layer, Layer_Version, Project, Build | ||
13 | 16 | ||
14 | from orm.models import Project, Build | ||
15 | 17 | ||
16 | class TestLandingPage(SeleniumTestCase): | 18 | class TestLandingPage(SeleniumTestCase): |
17 | """ Tests for redirects on the landing page """ | 19 | """ Tests for redirects on the landing page """ |
@@ -29,6 +31,130 @@ class TestLandingPage(SeleniumTestCase): | |||
29 | self.project.is_default = True | 31 | self.project.is_default = True |
30 | self.project.save() | 32 | self.project.save() |
31 | 33 | ||
34 | def test_icon_info_visible_and_clickable(self): | ||
35 | """ Test that the information icon is visible and clickable """ | ||
36 | self.get(reverse('landing')) | ||
37 | info_sign = self.find('#toaster-version-info-sign') | ||
38 | |||
39 | # check that the info sign is visible | ||
40 | self.assertTrue(info_sign.is_displayed()) | ||
41 | |||
42 | # check that the info sign is clickable | ||
43 | # and info modal is appearing when clicking on the info sign | ||
44 | info_sign.click() # click on the info sign make attribute 'aria-describedby' visible | ||
45 | info_model_id = info_sign.get_attribute('aria-describedby') | ||
46 | info_modal = self.find(f'#{info_model_id}') | ||
47 | self.assertTrue(info_modal.is_displayed()) | ||
48 | self.assertTrue("Toaster version information" in info_modal.text) | ||
49 | |||
50 | def test_documentation_link_displayed(self): | ||
51 | """ Test that the documentation link is displayed """ | ||
52 | self.get(reverse('landing')) | ||
53 | documentation_link = self.find('#navbar-docs > a') | ||
54 | |||
55 | # check that the documentation link is visible | ||
56 | self.assertTrue(documentation_link.is_displayed()) | ||
57 | |||
58 | # check browser open new tab toaster manual when clicking on the documentation link | ||
59 | self.assertEqual(documentation_link.get_attribute('target'), '_blank') | ||
60 | self.assertEqual( | ||
61 | documentation_link.get_attribute('href'), | ||
62 | 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual') | ||
63 | self.assertTrue("Documentation" in documentation_link.text) | ||
64 | |||
65 | def test_openembedded_jumbotron_link_visible_and_clickable(self): | ||
66 | """ Test OpenEmbedded link jumbotron is visible and clickable: """ | ||
67 | self.get(reverse('landing')) | ||
68 | jumbotron = self.find('.jumbotron') | ||
69 | |||
70 | # check OpenEmbedded | ||
71 | openembedded = jumbotron.find_element(By.LINK_TEXT, 'OpenEmbedded') | ||
72 | self.assertTrue(openembedded.is_displayed()) | ||
73 | openembedded.click() | ||
74 | self.assertTrue("openembedded.org" in self.driver.current_url) | ||
75 | |||
76 | def test_bitbake_jumbotron_link_visible_and_clickable(self): | ||
77 | """ Test BitBake link jumbotron is visible and clickable: """ | ||
78 | self.get(reverse('landing')) | ||
79 | jumbotron = self.find('.jumbotron') | ||
80 | |||
81 | # check BitBake | ||
82 | bitbake = jumbotron.find_element(By.LINK_TEXT, 'BitBake') | ||
83 | self.assertTrue(bitbake.is_displayed()) | ||
84 | bitbake.click() | ||
85 | self.assertTrue( | ||
86 | "docs.yoctoproject.org/bitbake.html" in self.driver.current_url) | ||
87 | |||
88 | def test_yoctoproject_jumbotron_link_visible_and_clickable(self): | ||
89 | """ Test Yocto Project link jumbotron is visible and clickable: """ | ||
90 | self.get(reverse('landing')) | ||
91 | jumbotron = self.find('.jumbotron') | ||
92 | |||
93 | # check Yocto Project | ||
94 | yoctoproject = jumbotron.find_element(By.LINK_TEXT, 'Yocto Project') | ||
95 | self.assertTrue(yoctoproject.is_displayed()) | ||
96 | yoctoproject.click() | ||
97 | self.assertTrue("yoctoproject.org" in self.driver.current_url) | ||
98 | |||
99 | def test_link_setup_using_toaster_visible_and_clickable(self): | ||
100 | """ Test big magenta button setting up and using toaster link in jumbotron | ||
101 | if visible and clickable | ||
102 | """ | ||
103 | self.get(reverse('landing')) | ||
104 | jumbotron = self.find('.jumbotron') | ||
105 | |||
106 | # check Big magenta button | ||
107 | big_magenta_button = jumbotron.find_element(By.LINK_TEXT, | ||
108 | 'Toaster is ready to capture your command line builds' | ||
109 | ) | ||
110 | self.assertTrue(big_magenta_button.is_displayed()) | ||
111 | big_magenta_button.click() | ||
112 | self.assertTrue( | ||
113 | "docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" in self.driver.current_url) | ||
114 | |||
115 | def test_link_create_new_project_in_jumbotron_visible_and_clickable(self): | ||
116 | """ Test big blue button create new project jumbotron if visible and clickable """ | ||
117 | # Create a layer and a layer version to make visible the big blue button | ||
118 | layer = Layer.objects.create(name='bar') | ||
119 | Layer_Version.objects.create(layer=layer) | ||
120 | |||
121 | self.get(reverse('landing')) | ||
122 | jumbotron = self.find('.jumbotron') | ||
123 | |||
124 | # check Big Blue button | ||
125 | big_blue_button = jumbotron.find_element(By.LINK_TEXT, | ||
126 | 'Create your first Toaster project to run manage builds' | ||
127 | ) | ||
128 | self.assertTrue(big_blue_button.is_displayed()) | ||
129 | big_blue_button.click() | ||
130 | self.assertTrue("toastergui/newproject/" in self.driver.current_url) | ||
131 | |||
132 | def test_toaster_manual_link_visible_and_clickable(self): | ||
133 | """ Test Read the Toaster manual link jumbotron is visible and clickable: """ | ||
134 | self.get(reverse('landing')) | ||
135 | jumbotron = self.find('.jumbotron') | ||
136 | |||
137 | # check Read the Toaster manual | ||
138 | toaster_manual = jumbotron.find_element( | ||
139 | By.LINK_TEXT, 'Read the Toaster manual') | ||
140 | self.assertTrue(toaster_manual.is_displayed()) | ||
141 | toaster_manual.click() | ||
142 | self.assertTrue( | ||
143 | "https://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual" in self.driver.current_url) | ||
144 | |||
145 | def test_contrib_to_toaster_link_visible_and_clickable(self): | ||
146 | """ Test Contribute to Toaster link jumbotron is visible and clickable: """ | ||
147 | self.get(reverse('landing')) | ||
148 | jumbotron = self.find('.jumbotron') | ||
149 | |||
150 | # check Contribute to Toaster | ||
151 | contribute_to_toaster = jumbotron.find_element( | ||
152 | By.LINK_TEXT, 'Contribute to Toaster') | ||
153 | self.assertTrue(contribute_to_toaster.is_displayed()) | ||
154 | contribute_to_toaster.click() | ||
155 | self.assertTrue( | ||
156 | "wiki.yoctoproject.org/wiki/contribute_to_toaster" in str(self.driver.current_url).lower()) | ||
157 | |||
32 | def test_only_default_project(self): | 158 | def test_only_default_project(self): |
33 | """ | 159 | """ |
34 | No projects except default | 160 | No projects except default |
@@ -87,10 +213,9 @@ class TestLandingPage(SeleniumTestCase): | |||
87 | 213 | ||
88 | self.get(reverse('landing')) | 214 | self.get(reverse('landing')) |
89 | 215 | ||
216 | self.wait_until_visible("#latest-builds", poll=3) | ||
90 | elements = self.find_all('#allbuildstable') | 217 | elements = self.find_all('#allbuildstable') |
91 | self.assertEqual(len(elements), 1, 'should redirect to builds') | 218 | self.assertEqual(len(elements), 1, 'should redirect to builds') |
92 | content = self.get_page_source() | 219 | content = self.get_page_source() |
93 | self.assertTrue(self.PROJECT_NAME in content, | 220 | self.assertTrue(self.PROJECT_NAME in content, |
94 | 'should show builds for project %s' % self.PROJECT_NAME) | 221 | 'should show builds for project %s' % self.PROJECT_NAME) |
95 | self.assertFalse(self.CLI_BUILDS_PROJECT_NAME in content, | ||
96 | 'should not show builds for cli project') | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py index 71bdd2aafd..5c29548b78 100644 --- a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py +++ b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py | |||
@@ -8,6 +8,7 @@ | |||
8 | # | 8 | # |
9 | 9 | ||
10 | from django.urls import reverse | 10 | from django.urls import reverse |
11 | from selenium.common.exceptions import ElementClickInterceptedException, TimeoutException | ||
11 | from tests.browser.selenium_helpers import SeleniumTestCase | 12 | from tests.browser.selenium_helpers import SeleniumTestCase |
12 | 13 | ||
13 | from orm.models import Layer, Layer_Version, Project, LayerSource, Release | 14 | from orm.models import Layer, Layer_Version, Project, LayerSource, Release |
@@ -63,11 +64,12 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
63 | args=(self.project.pk, | 64 | args=(self.project.pk, |
64 | self.imported_layer_version.pk)) | 65 | self.imported_layer_version.pk)) |
65 | 66 | ||
66 | def test_edit_layerdetails(self): | 67 | def _edit_layerdetails(self): |
67 | """ Edit all the editable fields for the layer refresh the page and | 68 | """ Edit all the editable fields for the layer refresh the page and |
68 | check that the new values exist""" | 69 | check that the new values exist""" |
69 | 70 | ||
70 | self.get(self.url) | 71 | self.get(self.url) |
72 | self.wait_until_visible("#add-remove-layer-btn") | ||
71 | 73 | ||
72 | self.click("#add-remove-layer-btn") | 74 | self.click("#add-remove-layer-btn") |
73 | self.click("#edit-layer-source") | 75 | self.click("#edit-layer-source") |
@@ -97,13 +99,26 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
97 | "Expecting any of \"%s\"but got \"%s\"" % | 99 | "Expecting any of \"%s\"but got \"%s\"" % |
98 | (self.initial_values, value)) | 100 | (self.initial_values, value)) |
99 | 101 | ||
102 | # Make sure the input visible beofre sending keys | ||
103 | self.wait_until_visible("#layer-git input[type=text]") | ||
100 | inputs.send_keys("-edited") | 104 | inputs.send_keys("-edited") |
101 | 105 | ||
102 | # Save the new values | 106 | # Save the new values |
103 | for save_btn in self.find_all(".change-btn"): | 107 | for save_btn in self.find_all(".change-btn"): |
104 | save_btn.click() | 108 | save_btn.click() |
105 | 109 | ||
106 | self.click("#save-changes-for-switch") | 110 | try: |
111 | self.wait_until_visible("#save-changes-for-switch", poll=3) | ||
112 | btn_save_chg_for_switch = self.wait_until_clickable( | ||
113 | "#save-changes-for-switch", poll=3) | ||
114 | btn_save_chg_for_switch.click() | ||
115 | except ElementClickInterceptedException: | ||
116 | self.skipTest( | ||
117 | "save-changes-for-switch click intercepted. Element not visible or maybe covered by another element.") | ||
118 | except TimeoutException: | ||
119 | self.skipTest( | ||
120 | "save-changes-for-switch is not clickable within the specified timeout.") | ||
121 | |||
107 | self.wait_until_visible("#edit-layer-source") | 122 | self.wait_until_visible("#edit-layer-source") |
108 | 123 | ||
109 | # Refresh the page to see if the new values are returned | 124 | # Refresh the page to see if the new values are returned |
@@ -132,7 +147,18 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
132 | new_dir = "/home/test/my-meta-dir" | 147 | new_dir = "/home/test/my-meta-dir" |
133 | dir_input.send_keys(new_dir) | 148 | dir_input.send_keys(new_dir) |
134 | 149 | ||
135 | self.click("#save-changes-for-switch") | 150 | try: |
151 | self.wait_until_visible("#save-changes-for-switch", poll=3) | ||
152 | btn_save_chg_for_switch = self.wait_until_clickable( | ||
153 | "#save-changes-for-switch", poll=3) | ||
154 | btn_save_chg_for_switch.click() | ||
155 | except ElementClickInterceptedException: | ||
156 | self.skipTest( | ||
157 | "save-changes-for-switch click intercepted. Element not properly visible or maybe behind another element.") | ||
158 | except TimeoutException: | ||
159 | self.skipTest( | ||
160 | "save-changes-for-switch is not clickable within the specified timeout.") | ||
161 | |||
136 | self.wait_until_visible("#edit-layer-source") | 162 | self.wait_until_visible("#edit-layer-source") |
137 | 163 | ||
138 | # Refresh the page to see if the new values are returned | 164 | # Refresh the page to see if the new values are returned |
@@ -142,6 +168,13 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
142 | "Expected %s in the dir value for layer directory" % | 168 | "Expected %s in the dir value for layer directory" % |
143 | new_dir) | 169 | new_dir) |
144 | 170 | ||
171 | def test_edit_layerdetails_page(self): | ||
172 | try: | ||
173 | self._edit_layerdetails() | ||
174 | except ElementClickInterceptedException: | ||
175 | self.skipTest( | ||
176 | "ElementClickInterceptedException occured. Element not visible or maybe covered by another element.") | ||
177 | |||
145 | def test_delete_layer(self): | 178 | def test_delete_layer(self): |
146 | """ Delete the layer """ | 179 | """ Delete the layer """ |
147 | 180 | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py b/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py index 7844aaa395..d7a4c34532 100644 --- a/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py +++ b/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py | |||
@@ -6,7 +6,6 @@ | |||
6 | # | 6 | # |
7 | # Copyright (C) 2013-2016 Intel Corporation | 7 | # Copyright (C) 2013-2016 Intel Corporation |
8 | # | 8 | # |
9 | |||
10 | from django.urls import reverse | 9 | from django.urls import reverse |
11 | from django.utils import timezone | 10 | from django.utils import timezone |
12 | from tests.browser.selenium_helpers import SeleniumTestCase | 11 | from tests.browser.selenium_helpers import SeleniumTestCase |
@@ -14,6 +13,8 @@ from tests.browser.selenium_helpers_base import Wait | |||
14 | from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version | 13 | from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version |
15 | from bldcontrol.models import BuildRequest | 14 | from bldcontrol.models import BuildRequest |
16 | 15 | ||
16 | from selenium.webdriver.common.by import By | ||
17 | |||
17 | class TestMostRecentBuildsStates(SeleniumTestCase): | 18 | class TestMostRecentBuildsStates(SeleniumTestCase): |
18 | """ Test states update correctly in most recent builds area """ | 19 | """ Test states update correctly in most recent builds area """ |
19 | 20 | ||
@@ -45,13 +46,14 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
45 | # build queued; check shown as queued | 46 | # build queued; check shown as queued |
46 | selector = base_selector + '[data-build-state="Queued"]' | 47 | selector = base_selector + '[data-build-state="Queued"]' |
47 | element = self.wait_until_visible(selector) | 48 | element = self.wait_until_visible(selector) |
48 | self.assertRegexpMatches(element.get_attribute('innerHTML'), | 49 | self.assertRegex(element.get_attribute('innerHTML'), |
49 | 'Build queued', 'build should show queued status') | 50 | 'Build queued', 'build should show queued status') |
50 | 51 | ||
51 | # waiting for recipes to be parsed | 52 | # waiting for recipes to be parsed |
52 | build.outcome = Build.IN_PROGRESS | 53 | build.outcome = Build.IN_PROGRESS |
53 | build.recipes_to_parse = recipes_to_parse | 54 | build.recipes_to_parse = recipes_to_parse |
54 | build.recipes_parsed = 0 | 55 | build.recipes_parsed = 0 |
56 | build.save() | ||
55 | 57 | ||
56 | build_request.state = BuildRequest.REQ_INPROGRESS | 58 | build_request.state = BuildRequest.REQ_INPROGRESS |
57 | build_request.save() | 59 | build_request.save() |
@@ -62,7 +64,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
62 | element = self.wait_until_visible(selector) | 64 | element = self.wait_until_visible(selector) |
63 | 65 | ||
64 | bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id | 66 | bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id |
65 | bar_element = element.find_element_by_css_selector(bar_selector) | 67 | bar_element = element.find_element(By.CSS_SELECTOR, bar_selector) |
66 | self.assertEqual(bar_element.value_of_css_property('width'), '0px', | 68 | self.assertEqual(bar_element.value_of_css_property('width'), '0px', |
67 | 'recipe parse progress should be at 0') | 69 | 'recipe parse progress should be at 0') |
68 | 70 | ||
@@ -73,7 +75,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
73 | self.get(url) | 75 | self.get(url) |
74 | 76 | ||
75 | element = self.wait_until_visible(selector) | 77 | element = self.wait_until_visible(selector) |
76 | bar_element = element.find_element_by_css_selector(bar_selector) | 78 | bar_element = element.find_element(By.CSS_SELECTOR, bar_selector) |
77 | recipe_bar_updated = lambda driver: \ | 79 | recipe_bar_updated = lambda driver: \ |
78 | bar_element.get_attribute('style') == 'width: 50%;' | 80 | bar_element.get_attribute('style') == 'width: 50%;' |
79 | msg = 'recipe parse progress bar should update to 50%' | 81 | msg = 'recipe parse progress bar should update to 50%' |
@@ -94,11 +96,11 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
94 | 96 | ||
95 | selector = base_selector + '[data-build-state="Starting"]' | 97 | selector = base_selector + '[data-build-state="Starting"]' |
96 | element = self.wait_until_visible(selector) | 98 | element = self.wait_until_visible(selector) |
97 | self.assertRegexpMatches(element.get_attribute('innerHTML'), | 99 | self.assertRegex(element.get_attribute('innerHTML'), |
98 | 'Tasks starting', 'build should show "tasks starting" status') | 100 | 'Tasks starting', 'build should show "tasks starting" status') |
99 | 101 | ||
100 | # first task finished; check tasks progress bar | 102 | # first task finished; check tasks progress bar |
101 | task1.order = 1 | 103 | task1.outcome = Task.OUTCOME_SUCCESS |
102 | task1.save() | 104 | task1.save() |
103 | 105 | ||
104 | self.get(url) | 106 | self.get(url) |
@@ -107,7 +109,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
107 | element = self.wait_until_visible(selector) | 109 | element = self.wait_until_visible(selector) |
108 | 110 | ||
109 | bar_selector = '#build-pc-done-bar-%s' % build.id | 111 | bar_selector = '#build-pc-done-bar-%s' % build.id |
110 | bar_element = element.find_element_by_css_selector(bar_selector) | 112 | bar_element = element.find_element(By.CSS_SELECTOR, bar_selector) |
111 | 113 | ||
112 | task_bar_updated = lambda driver: \ | 114 | task_bar_updated = lambda driver: \ |
113 | bar_element.get_attribute('style') == 'width: 50%;' | 115 | bar_element.get_attribute('style') == 'width: 50%;' |
@@ -115,13 +117,13 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
115 | element = Wait(self.driver).until(task_bar_updated, msg) | 117 | element = Wait(self.driver).until(task_bar_updated, msg) |
116 | 118 | ||
117 | # last task finished; check tasks progress bar updates | 119 | # last task finished; check tasks progress bar updates |
118 | task2.order = 2 | 120 | task2.outcome = Task.OUTCOME_SUCCESS |
119 | task2.save() | 121 | task2.save() |
120 | 122 | ||
121 | self.get(url) | 123 | self.get(url) |
122 | 124 | ||
123 | element = self.wait_until_visible(selector) | 125 | element = self.wait_until_visible(selector) |
124 | bar_element = element.find_element_by_css_selector(bar_selector) | 126 | bar_element = element.find_element(By.CSS_SELECTOR, bar_selector) |
125 | task_bar_updated = lambda driver: \ | 127 | task_bar_updated = lambda driver: \ |
126 | bar_element.get_attribute('style') == 'width: 100%;' | 128 | bar_element.get_attribute('style') == 'width: 100%;' |
127 | msg = 'tasks progress bar should update to 100%' | 129 | msg = 'tasks progress bar should update to 100%' |
@@ -183,7 +185,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
183 | selector = '[data-latest-build-result="%s"] ' \ | 185 | selector = '[data-latest-build-result="%s"] ' \ |
184 | '[data-build-state="Cancelling"]' % build.id | 186 | '[data-build-state="Cancelling"]' % build.id |
185 | element = self.wait_until_visible(selector) | 187 | element = self.wait_until_visible(selector) |
186 | self.assertRegexpMatches(element.get_attribute('innerHTML'), | 188 | self.assertRegex(element.get_attribute('innerHTML'), |
187 | 'Cancelling the build', 'build should show "cancelling" status') | 189 | 'Cancelling the build', 'build should show "cancelling" status') |
188 | 190 | ||
189 | # check cancelled state | 191 | # check cancelled state |
@@ -195,5 +197,5 @@ class TestMostRecentBuildsStates(SeleniumTestCase): | |||
195 | selector = '[data-latest-build-result="%s"] ' \ | 197 | selector = '[data-latest-build-result="%s"] ' \ |
196 | '[data-build-state="Cancelled"]' % build.id | 198 | '[data-build-state="Cancelled"]' % build.id |
197 | element = self.wait_until_visible(selector) | 199 | element = self.wait_until_visible(selector) |
198 | self.assertRegexpMatches(element.get_attribute('innerHTML'), | 200 | self.assertRegex(element.get_attribute('innerHTML'), |
199 | 'Build cancelled', 'build should show "cancelled" status') | 201 | 'Build cancelled', 'build should show "cancelled" status') |
diff --git a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py index 9906ae42a9..9f0b6397fe 100644 --- a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py +++ b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py | |||
@@ -6,6 +6,7 @@ | |||
6 | # | 6 | # |
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | from bldcontrol.models import BuildEnvironment | ||
9 | 10 | ||
10 | from django.urls import reverse | 11 | from django.urls import reverse |
11 | from tests.browser.selenium_helpers import SeleniumTestCase | 12 | from tests.browser.selenium_helpers import SeleniumTestCase |
@@ -18,6 +19,9 @@ class TestNewCustomImagePage(SeleniumTestCase): | |||
18 | CUSTOM_IMAGE_NAME = 'roopa-doopa' | 19 | CUSTOM_IMAGE_NAME = 'roopa-doopa' |
19 | 20 | ||
20 | def setUp(self): | 21 | def setUp(self): |
22 | BuildEnvironment.objects.get_or_create( | ||
23 | betype=BuildEnvironment.TYPE_LOCAL, | ||
24 | ) | ||
21 | release = Release.objects.create( | 25 | release = Release.objects.create( |
22 | name='baz', | 26 | name='baz', |
23 | bitbake_version=BitbakeVersion.objects.create(name='v1') | 27 | bitbake_version=BitbakeVersion.objects.create(name='v1') |
@@ -41,11 +45,16 @@ class TestNewCustomImagePage(SeleniumTestCase): | |||
41 | ) | 45 | ) |
42 | 46 | ||
43 | # add a fake image recipe to the layer that can be customised | 47 | # add a fake image recipe to the layer that can be customised |
48 | builldir = os.environ.get('BUILDDIR', './') | ||
44 | self.recipe = Recipe.objects.create( | 49 | self.recipe = Recipe.objects.create( |
45 | name='core-image-minimal', | 50 | name='core-image-minimal', |
46 | layer_version=layer_version, | 51 | layer_version=layer_version, |
52 | file_path=f'{builldir}/core-image-minimal.bb', | ||
47 | is_image=True | 53 | is_image=True |
48 | ) | 54 | ) |
55 | # create a tmp file for the recipe | ||
56 | with open(self.recipe.file_path, 'w') as f: | ||
57 | f.write('foo') | ||
49 | 58 | ||
50 | # another project with a custom image already in it | 59 | # another project with a custom image already in it |
51 | project2 = Project.objects.create(name='whoop', release=release) | 60 | project2 = Project.objects.create(name='whoop', release=release) |
@@ -81,6 +90,7 @@ class TestNewCustomImagePage(SeleniumTestCase): | |||
81 | """ | 90 | """ |
82 | url = reverse('newcustomimage', args=(self.project.id,)) | 91 | url = reverse('newcustomimage', args=(self.project.id,)) |
83 | self.get(url) | 92 | self.get(url) |
93 | self.wait_until_visible('#global-nav', poll=3) | ||
84 | 94 | ||
85 | self.click('button[data-recipe="%s"]' % self.recipe.id) | 95 | self.click('button[data-recipe="%s"]' % self.recipe.id) |
86 | 96 | ||
@@ -128,7 +138,7 @@ class TestNewCustomImagePage(SeleniumTestCase): | |||
128 | """ | 138 | """ |
129 | self._create_custom_image(self.recipe.name) | 139 | self._create_custom_image(self.recipe.name) |
130 | element = self.wait_until_visible('#invalid-name-help') | 140 | element = self.wait_until_visible('#invalid-name-help') |
131 | self.assertRegexpMatches(element.text.strip(), | 141 | self.assertRegex(element.text.strip(), |
132 | 'image with this name already exists') | 142 | 'image with this name already exists') |
133 | 143 | ||
134 | def test_new_duplicates_project_image(self): | 144 | def test_new_duplicates_project_image(self): |
@@ -146,4 +156,4 @@ class TestNewCustomImagePage(SeleniumTestCase): | |||
146 | self._create_custom_image(custom_image_name) | 156 | self._create_custom_image(custom_image_name) |
147 | element = self.wait_until_visible('#invalid-name-help') | 157 | element = self.wait_until_visible('#invalid-name-help') |
148 | expected = 'An image with this name already exists in this project' | 158 | expected = 'An image with this name already exists in this project' |
149 | self.assertRegexpMatches(element.text.strip(), expected) | 159 | self.assertRegex(element.text.strip(), expected) |
diff --git a/bitbake/lib/toaster/tests/browser/test_new_project_page.py b/bitbake/lib/toaster/tests/browser/test_new_project_page.py index e20a1f686e..458bb6538d 100644 --- a/bitbake/lib/toaster/tests/browser/test_new_project_page.py +++ b/bitbake/lib/toaster/tests/browser/test_new_project_page.py | |||
@@ -6,11 +6,11 @@ | |||
6 | # | 6 | # |
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | |||
10 | from django.urls import reverse | 9 | from django.urls import reverse |
11 | from tests.browser.selenium_helpers import SeleniumTestCase | 10 | from tests.browser.selenium_helpers import SeleniumTestCase |
12 | from selenium.webdriver.support.ui import Select | 11 | from selenium.webdriver.support.ui import Select |
13 | from selenium.common.exceptions import InvalidElementStateException | 12 | from selenium.common.exceptions import InvalidElementStateException |
13 | from selenium.webdriver.common.by import By | ||
14 | 14 | ||
15 | from orm.models import Project, Release, BitbakeVersion | 15 | from orm.models import Project, Release, BitbakeVersion |
16 | 16 | ||
@@ -47,7 +47,7 @@ class TestNewProjectPage(SeleniumTestCase): | |||
47 | 47 | ||
48 | url = reverse('newproject') | 48 | url = reverse('newproject') |
49 | self.get(url) | 49 | self.get(url) |
50 | 50 | self.wait_until_visible('#new-project-name', poll=3) | |
51 | self.enter_text('#new-project-name', project_name) | 51 | self.enter_text('#new-project-name', project_name) |
52 | 52 | ||
53 | select = Select(self.find('#projectversion')) | 53 | select = Select(self.find('#projectversion')) |
@@ -57,7 +57,8 @@ class TestNewProjectPage(SeleniumTestCase): | |||
57 | 57 | ||
58 | # We should get redirected to the new project's page with the | 58 | # We should get redirected to the new project's page with the |
59 | # notification at the top | 59 | # notification at the top |
60 | element = self.wait_until_visible('#project-created-notification') | 60 | element = self.wait_until_visible( |
61 | '#project-created-notification', poll=3) | ||
61 | 62 | ||
62 | self.assertTrue(project_name in element.text, | 63 | self.assertTrue(project_name in element.text, |
63 | "New project name not in new project notification") | 64 | "New project name not in new project notification") |
@@ -78,13 +79,20 @@ class TestNewProjectPage(SeleniumTestCase): | |||
78 | 79 | ||
79 | url = reverse('newproject') | 80 | url = reverse('newproject') |
80 | self.get(url) | 81 | self.get(url) |
82 | self.wait_until_visible('#new-project-name', poll=3) | ||
81 | 83 | ||
82 | self.enter_text('#new-project-name', project_name) | 84 | self.enter_text('#new-project-name', project_name) |
83 | 85 | ||
84 | select = Select(self.find('#projectversion')) | 86 | select = Select(self.find('#projectversion')) |
85 | select.select_by_value(str(self.release.pk)) | 87 | select.select_by_value(str(self.release.pk)) |
86 | 88 | ||
87 | element = self.wait_until_visible('#hint-error-project-name') | 89 | radio = self.driver.find_element(By.ID, 'type-new') |
90 | radio.click() | ||
91 | |||
92 | self.click("#create-project-button") | ||
93 | |||
94 | self.wait_until_present('#hint-error-project-name', poll=3) | ||
95 | element = self.find('#hint-error-project-name') | ||
88 | 96 | ||
89 | self.assertTrue(("Project names must be unique" in element.text), | 97 | self.assertTrue(("Project names must be unique" in element.text), |
90 | "Did not find unique project name error message") | 98 | "Did not find unique project name error message") |
diff --git a/bitbake/lib/toaster/tests/browser/test_project_builds_page.py b/bitbake/lib/toaster/tests/browser/test_project_builds_page.py index 51717e72d4..0dba33b9c8 100644 --- a/bitbake/lib/toaster/tests/browser/test_project_builds_page.py +++ b/bitbake/lib/toaster/tests/browser/test_project_builds_page.py | |||
@@ -7,6 +7,7 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import os | ||
10 | import re | 11 | import re |
11 | 12 | ||
12 | from django.urls import reverse | 13 | from django.urls import reverse |
@@ -22,7 +23,8 @@ class TestProjectBuildsPage(SeleniumTestCase): | |||
22 | CLI_BUILDS_PROJECT_NAME = 'command line builds' | 23 | CLI_BUILDS_PROJECT_NAME = 'command line builds' |
23 | 24 | ||
24 | def setUp(self): | 25 | def setUp(self): |
25 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', | 26 | builldir = os.environ.get('BUILDDIR', './') |
27 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/', | ||
26 | branch='master', dirpath='') | 28 | branch='master', dirpath='') |
27 | release = Release.objects.create(name='release1', | 29 | release = Release.objects.create(name='release1', |
28 | bitbake_version=bbv) | 30 | bitbake_version=bbv) |
diff --git a/bitbake/lib/toaster/tests/browser/test_project_config_page.py b/bitbake/lib/toaster/tests/browser/test_project_config_page.py index 944bcb2631..b9de541efa 100644 --- a/bitbake/lib/toaster/tests/browser/test_project_config_page.py +++ b/bitbake/lib/toaster/tests/browser/test_project_config_page.py | |||
@@ -7,10 +7,12 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import os | ||
10 | from django.urls import reverse | 11 | from django.urls import reverse |
11 | from tests.browser.selenium_helpers import SeleniumTestCase | 12 | from tests.browser.selenium_helpers import SeleniumTestCase |
12 | 13 | ||
13 | from orm.models import BitbakeVersion, Release, Project, ProjectVariable | 14 | from orm.models import BitbakeVersion, Release, Project, ProjectVariable |
15 | from selenium.webdriver.common.by import By | ||
14 | 16 | ||
15 | class TestProjectConfigsPage(SeleniumTestCase): | 17 | class TestProjectConfigsPage(SeleniumTestCase): |
16 | """ Test data at /project/X/builds is displayed correctly """ | 18 | """ Test data at /project/X/builds is displayed correctly """ |
@@ -21,7 +23,8 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
21 | 'any of these characters' | 23 | 'any of these characters' |
22 | 24 | ||
23 | def setUp(self): | 25 | def setUp(self): |
24 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/', | 26 | builldir = os.environ.get('BUILDDIR', './') |
27 | bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/', | ||
25 | branch='master', dirpath='') | 28 | branch='master', dirpath='') |
26 | release = Release.objects.create(name='release1', | 29 | release = Release.objects.create(name='release1', |
27 | bitbake_version=bbv) | 30 | bitbake_version=bbv) |
@@ -66,7 +69,7 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
66 | 69 | ||
67 | self.enter_text('#new-imagefs_types', imagefs_type) | 70 | self.enter_text('#new-imagefs_types', imagefs_type) |
68 | 71 | ||
69 | checkboxes = self.driver.find_elements_by_xpath("//input[@class='fs-checkbox-fstypes']") | 72 | checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']") |
70 | 73 | ||
71 | for checkbox in checkboxes: | 74 | for checkbox in checkboxes: |
72 | if checkbox.get_attribute("value") == "btrfs": | 75 | if checkbox.get_attribute("value") == "btrfs": |
@@ -95,7 +98,7 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
95 | for checkbox in checkboxes: | 98 | for checkbox in checkboxes: |
96 | if checkbox.get_attribute("value") == "cpio": | 99 | if checkbox.get_attribute("value") == "cpio": |
97 | checkbox.click() | 100 | checkbox.click() |
98 | element = self.driver.find_element_by_id('new-imagefs_types') | 101 | element = self.driver.find_element(By.ID, 'new-imagefs_types') |
99 | 102 | ||
100 | self.wait_until_visible('#new-imagefs_types') | 103 | self.wait_until_visible('#new-imagefs_types') |
101 | 104 | ||
@@ -129,7 +132,7 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
129 | self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) | 132 | self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) |
130 | 133 | ||
131 | # downloads dir path has a space | 134 | # downloads dir path has a space |
132 | self.driver.find_element_by_id('new-dl_dir').clear() | 135 | self.driver.find_element(By.ID, 'new-dl_dir').clear() |
133 | self.enter_text('#new-dl_dir', '/foo/bar a') | 136 | self.enter_text('#new-dl_dir', '/foo/bar a') |
134 | 137 | ||
135 | element = self.wait_until_visible('#hintError-dl_dir') | 138 | element = self.wait_until_visible('#hintError-dl_dir') |
@@ -137,7 +140,7 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
137 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 140 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
138 | 141 | ||
139 | # downloads dir path starts with ${...} but has a space | 142 | # downloads dir path starts with ${...} but has a space |
140 | self.driver.find_element_by_id('new-dl_dir').clear() | 143 | self.driver.find_element(By.ID,'new-dl_dir').clear() |
141 | self.enter_text('#new-dl_dir', '${TOPDIR}/down foo') | 144 | self.enter_text('#new-dl_dir', '${TOPDIR}/down foo') |
142 | 145 | ||
143 | element = self.wait_until_visible('#hintError-dl_dir') | 146 | element = self.wait_until_visible('#hintError-dl_dir') |
@@ -145,18 +148,18 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
145 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 148 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
146 | 149 | ||
147 | # downloads dir path starts with / | 150 | # downloads dir path starts with / |
148 | self.driver.find_element_by_id('new-dl_dir').clear() | 151 | self.driver.find_element(By.ID,'new-dl_dir').clear() |
149 | self.enter_text('#new-dl_dir', '/bar/foo') | 152 | self.enter_text('#new-dl_dir', '/bar/foo') |
150 | 153 | ||
151 | hidden_element = self.driver.find_element_by_id('hintError-dl_dir') | 154 | hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir') |
152 | self.assertEqual(hidden_element.is_displayed(), False, | 155 | self.assertEqual(hidden_element.is_displayed(), False, |
153 | 'downloads directory path valid but treated as invalid') | 156 | 'downloads directory path valid but treated as invalid') |
154 | 157 | ||
155 | # downloads dir path starts with ${...} | 158 | # downloads dir path starts with ${...} |
156 | self.driver.find_element_by_id('new-dl_dir').clear() | 159 | self.driver.find_element(By.ID,'new-dl_dir').clear() |
157 | self.enter_text('#new-dl_dir', '${TOPDIR}/down') | 160 | self.enter_text('#new-dl_dir', '${TOPDIR}/down') |
158 | 161 | ||
159 | hidden_element = self.driver.find_element_by_id('hintError-dl_dir') | 162 | hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir') |
160 | self.assertEqual(hidden_element.is_displayed(), False, | 163 | self.assertEqual(hidden_element.is_displayed(), False, |
161 | 'downloads directory path valid but treated as invalid') | 164 | 'downloads directory path valid but treated as invalid') |
162 | 165 | ||
@@ -184,7 +187,7 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
184 | self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) | 187 | self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) |
185 | 188 | ||
186 | # path has a space | 189 | # path has a space |
187 | self.driver.find_element_by_id('new-sstate_dir').clear() | 190 | self.driver.find_element(By.ID, 'new-sstate_dir').clear() |
188 | self.enter_text('#new-sstate_dir', '/foo/bar a') | 191 | self.enter_text('#new-sstate_dir', '/foo/bar a') |
189 | 192 | ||
190 | element = self.wait_until_visible('#hintError-sstate_dir') | 193 | element = self.wait_until_visible('#hintError-sstate_dir') |
@@ -192,7 +195,7 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
192 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 195 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
193 | 196 | ||
194 | # path starts with ${...} but has a space | 197 | # path starts with ${...} but has a space |
195 | self.driver.find_element_by_id('new-sstate_dir').clear() | 198 | self.driver.find_element(By.ID,'new-sstate_dir').clear() |
196 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo') | 199 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo') |
197 | 200 | ||
198 | element = self.wait_until_visible('#hintError-sstate_dir') | 201 | element = self.wait_until_visible('#hintError-sstate_dir') |
@@ -200,18 +203,18 @@ class TestProjectConfigsPage(SeleniumTestCase): | |||
200 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 203 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
201 | 204 | ||
202 | # path starts with / | 205 | # path starts with / |
203 | self.driver.find_element_by_id('new-sstate_dir').clear() | 206 | self.driver.find_element(By.ID,'new-sstate_dir').clear() |
204 | self.enter_text('#new-sstate_dir', '/bar/foo') | 207 | self.enter_text('#new-sstate_dir', '/bar/foo') |
205 | 208 | ||
206 | hidden_element = self.driver.find_element_by_id('hintError-sstate_dir') | 209 | hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir') |
207 | self.assertEqual(hidden_element.is_displayed(), False, | 210 | self.assertEqual(hidden_element.is_displayed(), False, |
208 | 'sstate directory path valid but treated as invalid') | 211 | 'sstate directory path valid but treated as invalid') |
209 | 212 | ||
210 | # paths starts with ${...} | 213 | # paths starts with ${...} |
211 | self.driver.find_element_by_id('new-sstate_dir').clear() | 214 | self.driver.find_element(By.ID, 'new-sstate_dir').clear() |
212 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down') | 215 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down') |
213 | 216 | ||
214 | hidden_element = self.driver.find_element_by_id('hintError-sstate_dir') | 217 | hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir') |
215 | self.assertEqual(hidden_element.is_displayed(), False, | 218 | self.assertEqual(hidden_element.is_displayed(), False, |
216 | 'sstate directory path valid but treated as invalid') | 219 | 'sstate directory path valid but treated as invalid') |
217 | 220 | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_sample.py b/bitbake/lib/toaster/tests/browser/test_sample.py index b0067c21cd..f04f1d9a16 100644 --- a/bitbake/lib/toaster/tests/browser/test_sample.py +++ b/bitbake/lib/toaster/tests/browser/test_sample.py | |||
@@ -27,3 +27,13 @@ class TestSample(SeleniumTestCase): | |||
27 | self.get(url) | 27 | self.get(url) |
28 | brand_link = self.find('.toaster-navbar-brand a.brand') | 28 | brand_link = self.find('.toaster-navbar-brand a.brand') |
29 | self.assertEqual(brand_link.text.strip(), 'Toaster') | 29 | self.assertEqual(brand_link.text.strip(), 'Toaster') |
30 | |||
31 | def test_no_builds_message(self): | ||
32 | """ Test that a message is shown when there are no builds """ | ||
33 | url = reverse('all-builds') | ||
34 | self.get(url) | ||
35 | self.wait_until_visible('#empty-state-allbuildstable') # wait for the empty state div to appear | ||
36 | div_msg = self.find('#empty-state-allbuildstable .alert-info') | ||
37 | |||
38 | msg = 'Sorry - no data found' | ||
39 | self.assertEqual(div_msg.text, msg) | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py b/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py index e82d5ec654..691aca1ef0 100644 --- a/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py +++ b/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py | |||
@@ -8,11 +8,13 @@ | |||
8 | # | 8 | # |
9 | 9 | ||
10 | from datetime import datetime | 10 | from datetime import datetime |
11 | import os | ||
11 | 12 | ||
12 | from django.urls import reverse | 13 | from django.urls import reverse |
13 | from django.utils import timezone | 14 | from django.utils import timezone |
14 | from tests.browser.selenium_helpers import SeleniumTestCase | 15 | from tests.browser.selenium_helpers import SeleniumTestCase |
15 | from orm.models import BitbakeVersion, Release, Project, Build | 16 | from orm.models import BitbakeVersion, Release, Project, Build |
17 | from selenium.webdriver.common.by import By | ||
16 | 18 | ||
17 | class TestToasterTableUI(SeleniumTestCase): | 19 | class TestToasterTableUI(SeleniumTestCase): |
18 | """ | 20 | """ |
@@ -33,7 +35,7 @@ class TestToasterTableUI(SeleniumTestCase): | |||
33 | table: WebElement for a ToasterTable | 35 | table: WebElement for a ToasterTable |
34 | """ | 36 | """ |
35 | selector = 'thead a.sorted' | 37 | selector = 'thead a.sorted' |
36 | heading = table.find_element_by_css_selector(selector) | 38 | heading = table.find_element(By.CSS_SELECTOR, selector) |
37 | return heading.get_attribute('innerHTML').strip() | 39 | return heading.get_attribute('innerHTML').strip() |
38 | 40 | ||
39 | def _get_datetime_from_cell(self, row, selector): | 41 | def _get_datetime_from_cell(self, row, selector): |
@@ -45,7 +47,7 @@ class TestToasterTableUI(SeleniumTestCase): | |||
45 | selector: CSS selector to use to find the cell containing the date time | 47 | selector: CSS selector to use to find the cell containing the date time |
46 | string | 48 | string |
47 | """ | 49 | """ |
48 | cell = row.find_element_by_css_selector(selector) | 50 | cell = row.find_element(By.CSS_SELECTOR, selector) |
49 | cell_text = cell.get_attribute('innerHTML').strip() | 51 | cell_text = cell.get_attribute('innerHTML').strip() |
50 | return datetime.strptime(cell_text, '%d/%m/%y %H:%M') | 52 | return datetime.strptime(cell_text, '%d/%m/%y %H:%M') |
51 | 53 | ||
@@ -58,7 +60,8 @@ class TestToasterTableUI(SeleniumTestCase): | |||
58 | later = now + timezone.timedelta(hours=1) | 60 | later = now + timezone.timedelta(hours=1) |
59 | even_later = later + timezone.timedelta(hours=1) | 61 | even_later = later + timezone.timedelta(hours=1) |
60 | 62 | ||
61 | bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/', | 63 | builldir = os.environ.get('BUILDDIR', './') |
64 | bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/', | ||
62 | branch='master', dirpath='') | 65 | branch='master', dirpath='') |
63 | release = Release.objects.create(name='test release', | 66 | release = Release.objects.create(name='test release', |
64 | branch_name='master', | 67 | branch_name='master', |
@@ -105,7 +108,7 @@ class TestToasterTableUI(SeleniumTestCase): | |||
105 | self.click('#checkbox-started_on') | 108 | self.click('#checkbox-started_on') |
106 | 109 | ||
107 | # sort by started_on column | 110 | # sort by started_on column |
108 | links = table.find_elements_by_css_selector('th.started_on a') | 111 | links = table.find_elements(By.CSS_SELECTOR, 'th.started_on a') |
109 | for link in links: | 112 | for link in links: |
110 | if link.get_attribute('innerHTML').strip() == 'Started on': | 113 | if link.get_attribute('innerHTML').strip() == 'Started on': |
111 | link.click() | 114 | link.click() |
diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py b/bitbake/lib/toaster/tests/builds/buildtest.py index 872bbd3775..cacfccd4d3 100644 --- a/bitbake/lib/toaster/tests/builds/buildtest.py +++ b/bitbake/lib/toaster/tests/builds/buildtest.py | |||
@@ -88,7 +88,7 @@ def load_build_environment(): | |||
88 | class BuildTest(unittest.TestCase): | 88 | class BuildTest(unittest.TestCase): |
89 | 89 | ||
90 | PROJECT_NAME = "Testbuild" | 90 | PROJECT_NAME = "Testbuild" |
91 | BUILDDIR = "/tmp/build/" | 91 | BUILDDIR = os.environ.get("BUILDDIR") |
92 | 92 | ||
93 | def build(self, target): | 93 | def build(self, target): |
94 | # So that the buildinfo helper uses the test database' | 94 | # So that the buildinfo helper uses the test database' |
@@ -116,10 +116,19 @@ class BuildTest(unittest.TestCase): | |||
116 | project = Project.objects.create_project(name=BuildTest.PROJECT_NAME, | 116 | project = Project.objects.create_project(name=BuildTest.PROJECT_NAME, |
117 | release=release) | 117 | release=release) |
118 | 118 | ||
119 | passthrough_variable_names = ["SSTATE_DIR", "DL_DIR", "SSTATE_MIRRORS", "BB_HASHSERVE", "BB_HASHSERVE_UPSTREAM"] | ||
120 | for variable_name in passthrough_variable_names: | ||
121 | current_variable = os.environ.get(variable_name) | ||
122 | if current_variable: | ||
123 | ProjectVariable.objects.get_or_create( | ||
124 | name=variable_name, | ||
125 | value=current_variable, | ||
126 | project=project) | ||
127 | |||
119 | if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): | 128 | if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): |
120 | ProjectVariable.objects.get_or_create( | 129 | ProjectVariable.objects.get_or_create( |
121 | name="SSTATE_MIRRORS", | 130 | name="SSTATE_MIRRORS", |
122 | value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH", | 131 | value="file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH", |
123 | project=project) | 132 | project=project) |
124 | 133 | ||
125 | ProjectTarget.objects.create(project=project, | 134 | ProjectTarget.objects.create(project=project, |
diff --git a/bitbake/lib/toaster/tests/builds/test_core_image_min.py b/bitbake/lib/toaster/tests/builds/test_core_image_min.py index 44b6cbec7b..c5bfdbfbb5 100644 --- a/bitbake/lib/toaster/tests/builds/test_core_image_min.py +++ b/bitbake/lib/toaster/tests/builds/test_core_image_min.py | |||
@@ -10,6 +10,7 @@ | |||
10 | # Ionut Chisanovici, Paul Eggleton and Cristian Iorga | 10 | # Ionut Chisanovici, Paul Eggleton and Cristian Iorga |
11 | 11 | ||
12 | import os | 12 | import os |
13 | import pytest | ||
13 | 14 | ||
14 | from django.db.models import Q | 15 | from django.db.models import Q |
15 | 16 | ||
@@ -20,12 +21,13 @@ from orm.models import CustomImagePackage | |||
20 | 21 | ||
21 | from tests.builds.buildtest import BuildTest | 22 | from tests.builds.buildtest import BuildTest |
22 | 23 | ||
23 | 24 | @pytest.mark.order(4) | |
25 | @pytest.mark.django_db(True) | ||
24 | class BuildCoreImageMinimal(BuildTest): | 26 | class BuildCoreImageMinimal(BuildTest): |
25 | """Build core-image-minimal and test the results""" | 27 | """Build core-image-minimal and test the results""" |
26 | 28 | ||
27 | def setUp(self): | 29 | def setUp(self): |
28 | self.completed_build = self.build("core-image-minimal") | 30 | self.completed_build = self.target_already_built("core-image-minimal") |
29 | 31 | ||
30 | # Check if build name is unique - tc_id=795 | 32 | # Check if build name is unique - tc_id=795 |
31 | def test_Build_Unique_Name(self): | 33 | def test_Build_Unique_Name(self): |
@@ -44,17 +46,6 @@ class BuildCoreImageMinimal(BuildTest): | |||
44 | total_builds, | 46 | total_builds, |
45 | msg='Build cooker log path is not unique') | 47 | msg='Build cooker log path is not unique') |
46 | 48 | ||
47 | # Check if task order is unique for one build - tc=824 | ||
48 | def test_Task_Unique_Order(self): | ||
49 | total_task_order = Task.objects.filter( | ||
50 | build=self.built).values('order').count() | ||
51 | distinct_task_order = Task.objects.filter( | ||
52 | build=self.completed_build).values('order').distinct().count() | ||
53 | |||
54 | self.assertEqual(total_task_order, | ||
55 | distinct_task_order, | ||
56 | msg='Errors task order is not unique') | ||
57 | |||
58 | # Check task order sequence for one build - tc=825 | 49 | # Check task order sequence for one build - tc=825 |
59 | def test_Task_Order_Sequence(self): | 50 | def test_Task_Order_Sequence(self): |
60 | cnt_err = [] | 51 | cnt_err = [] |
@@ -98,7 +89,6 @@ class BuildCoreImageMinimal(BuildTest): | |||
98 | 'task_name', | 89 | 'task_name', |
99 | 'sstate_result') | 90 | 'sstate_result') |
100 | cnt_err = [] | 91 | cnt_err = [] |
101 | |||
102 | for task in tasks: | 92 | for task in tasks: |
103 | if (task['sstate_result'] != Task.SSTATE_NA and | 93 | if (task['sstate_result'] != Task.SSTATE_NA and |
104 | task['sstate_result'] != Task.SSTATE_MISS): | 94 | task['sstate_result'] != Task.SSTATE_MISS): |
@@ -221,6 +211,7 @@ class BuildCoreImageMinimal(BuildTest): | |||
221 | # orm_build.outcome=0 then if the file exists and its size matches | 211 | # orm_build.outcome=0 then if the file exists and its size matches |
222 | # the file_size value. Need to add the tc in the test run | 212 | # the file_size value. Need to add the tc in the test run |
223 | def test_Target_File_Name_Populated(self): | 213 | def test_Target_File_Name_Populated(self): |
214 | cnt_err = [] | ||
224 | builds = Build.objects.filter(outcome=0).values('id') | 215 | builds = Build.objects.filter(outcome=0).values('id') |
225 | for build in builds: | 216 | for build in builds: |
226 | targets = Target.objects.filter( | 217 | targets = Target.objects.filter( |
@@ -230,7 +221,6 @@ class BuildCoreImageMinimal(BuildTest): | |||
230 | target_id=target['id']).values('id', | 221 | target_id=target['id']).values('id', |
231 | 'file_name', | 222 | 'file_name', |
232 | 'file_size') | 223 | 'file_size') |
233 | cnt_err = [] | ||
234 | for file_info in target_files: | 224 | for file_info in target_files: |
235 | target_id = file_info['id'] | 225 | target_id = file_info['id'] |
236 | target_file_name = file_info['file_name'] | 226 | target_file_name = file_info['file_name'] |
diff --git a/bitbake/lib/toaster/tests/commands/test_loaddata.py b/bitbake/lib/toaster/tests/commands/test_loaddata.py index 9e8d5553cf..7d04f030ee 100644 --- a/bitbake/lib/toaster/tests/commands/test_loaddata.py +++ b/bitbake/lib/toaster/tests/commands/test_loaddata.py | |||
@@ -6,13 +6,13 @@ | |||
6 | # | 6 | # |
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | import pytest | |
10 | from django.test import TestCase | 10 | from django.test import TestCase |
11 | from django.core import management | 11 | from django.core import management |
12 | 12 | ||
13 | from orm.models import Layer_Version, Layer, Release, ToasterSetting | 13 | from orm.models import Layer_Version, Layer, Release, ToasterSetting |
14 | 14 | ||
15 | 15 | @pytest.mark.order(2) | |
16 | class TestLoadDataFixtures(TestCase): | 16 | class TestLoadDataFixtures(TestCase): |
17 | """ Test loading our 3 provided fixtures """ | 17 | """ Test loading our 3 provided fixtures """ |
18 | def test_run_loaddata_poky_command(self): | 18 | def test_run_loaddata_poky_command(self): |
diff --git a/bitbake/lib/toaster/tests/commands/test_lsupdates.py b/bitbake/lib/toaster/tests/commands/test_lsupdates.py index 3c4fbe0550..30c6eeb4ac 100644 --- a/bitbake/lib/toaster/tests/commands/test_lsupdates.py +++ b/bitbake/lib/toaster/tests/commands/test_lsupdates.py | |||
@@ -7,12 +7,13 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import pytest | ||
10 | from django.test import TestCase | 11 | from django.test import TestCase |
11 | from django.core import management | 12 | from django.core import management |
12 | 13 | ||
13 | from orm.models import Layer_Version, Machine, Recipe | 14 | from orm.models import Layer_Version, Machine, Recipe |
14 | 15 | ||
15 | 16 | @pytest.mark.order(3) | |
16 | class TestLayerIndexUpdater(TestCase): | 17 | class TestLayerIndexUpdater(TestCase): |
17 | def test_run_lsupdates_command(self): | 18 | def test_run_lsupdates_command(self): |
18 | # Load some release information for us to fetch from the layer index | 19 | # Load some release information for us to fetch from the layer index |
diff --git a/bitbake/lib/toaster/tests/commands/test_runbuilds.py b/bitbake/lib/toaster/tests/commands/test_runbuilds.py index e223b95fcb..849c227edc 100644 --- a/bitbake/lib/toaster/tests/commands/test_runbuilds.py +++ b/bitbake/lib/toaster/tests/commands/test_runbuilds.py | |||
@@ -19,12 +19,14 @@ import time | |||
19 | import subprocess | 19 | import subprocess |
20 | import signal | 20 | import signal |
21 | 21 | ||
22 | import logging | ||
23 | |||
22 | 24 | ||
23 | class KillRunbuilds(threading.Thread): | 25 | class KillRunbuilds(threading.Thread): |
24 | """ Kill the runbuilds process after an amount of time """ | 26 | """ Kill the runbuilds process after an amount of time """ |
25 | def __init__(self, *args, **kwargs): | 27 | def __init__(self, *args, **kwargs): |
26 | super(KillRunbuilds, self).__init__(*args, **kwargs) | 28 | super(KillRunbuilds, self).__init__(*args, **kwargs) |
27 | self.setDaemon(True) | 29 | self.daemon = True |
28 | 30 | ||
29 | def run(self): | 31 | def run(self): |
30 | time.sleep(5) | 32 | time.sleep(5) |
@@ -34,9 +36,12 @@ class KillRunbuilds(threading.Thread): | |||
34 | pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), | 36 | pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), |
35 | ".runbuilds.pid") | 37 | ".runbuilds.pid") |
36 | 38 | ||
37 | with open(pidfile_path) as pidfile: | 39 | try: |
38 | pid = pidfile.read() | 40 | with open(pidfile_path) as pidfile: |
39 | os.kill(int(pid), signal.SIGTERM) | 41 | pid = pidfile.read() |
42 | os.kill(int(pid), signal.SIGTERM) | ||
43 | except ProcessLookupError: | ||
44 | logging.warning("Runbuilds not running or already killed") | ||
40 | 45 | ||
41 | 46 | ||
42 | class TestCommands(TestCase): | 47 | class TestCommands(TestCase): |
diff --git a/bitbake/lib/toaster/tests/db/test_db.py b/bitbake/lib/toaster/tests/db/test_db.py index 0410422276..072ab94363 100644 --- a/bitbake/lib/toaster/tests/db/test_db.py +++ b/bitbake/lib/toaster/tests/db/test_db.py | |||
@@ -23,6 +23,7 @@ | |||
23 | # SOFTWARE. | 23 | # SOFTWARE. |
24 | 24 | ||
25 | import sys | 25 | import sys |
26 | import pytest | ||
26 | 27 | ||
27 | try: | 28 | try: |
28 | from StringIO import StringIO | 29 | from StringIO import StringIO |
@@ -47,7 +48,7 @@ def capture(command, *args, **kwargs): | |||
47 | def makemigrations(): | 48 | def makemigrations(): |
48 | management.call_command('makemigrations') | 49 | management.call_command('makemigrations') |
49 | 50 | ||
50 | 51 | @pytest.mark.order(1) | |
51 | class MigrationTest(TestCase): | 52 | class MigrationTest(TestCase): |
52 | 53 | ||
53 | def testPendingMigration(self): | 54 | def testPendingMigration(self): |
diff --git a/bitbake/lib/toaster/tests/functional/functional_helpers.py b/bitbake/lib/toaster/tests/functional/functional_helpers.py index 5c4ea71794..7c20437d14 100644 --- a/bitbake/lib/toaster/tests/functional/functional_helpers.py +++ b/bitbake/lib/toaster/tests/functional/functional_helpers.py | |||
@@ -11,35 +11,55 @@ import os | |||
11 | import logging | 11 | import logging |
12 | import subprocess | 12 | import subprocess |
13 | import signal | 13 | import signal |
14 | import time | ||
15 | import re | 14 | import re |
16 | 15 | ||
17 | from tests.browser.selenium_helpers_base import SeleniumTestCaseBase | 16 | from tests.browser.selenium_helpers_base import SeleniumTestCaseBase |
18 | from tests.builds.buildtest import load_build_environment | 17 | from selenium.webdriver.common.by import By |
18 | from selenium.common.exceptions import NoSuchElementException | ||
19 | 19 | ||
20 | logger = logging.getLogger("toaster") | 20 | logger = logging.getLogger("toaster") |
21 | toaster_processes = [] | ||
21 | 22 | ||
22 | class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | 23 | class SeleniumFunctionalTestCase(SeleniumTestCaseBase): |
23 | wait_toaster_time = 5 | 24 | wait_toaster_time = 10 |
24 | 25 | ||
25 | @classmethod | 26 | @classmethod |
26 | def setUpClass(cls): | 27 | def setUpClass(cls): |
27 | # So that the buildinfo helper uses the test database' | 28 | # So that the buildinfo helper uses the test database' |
28 | if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \ | 29 | if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \ |
29 | 'toastermain.settings_test': | 30 | 'toastermain.settings_test': |
30 | raise RuntimeError("Please initialise django with the tests settings: " \ | 31 | raise RuntimeError("Please initialise django with the tests settings: " |
31 | "DJANGO_SETTINGS_MODULE='toastermain.settings_test'") | 32 | "DJANGO_SETTINGS_MODULE='toastermain.settings_test'") |
32 | 33 | ||
33 | load_build_environment() | 34 | # Wait for any known toaster processes to exit |
35 | global toaster_processes | ||
36 | for toaster_process in toaster_processes: | ||
37 | try: | ||
38 | os.waitpid(toaster_process, os.WNOHANG) | ||
39 | except ChildProcessError: | ||
40 | pass | ||
34 | 41 | ||
35 | # start toaster | 42 | # start toaster |
36 | cmd = "bash -c 'source toaster start'" | 43 | cmd = "bash -c 'source toaster start'" |
37 | p = subprocess.Popen( | 44 | start_process = subprocess.Popen( |
38 | cmd, | 45 | cmd, |
39 | cwd=os.environ.get("BUILDDIR"), | 46 | cwd=os.environ.get("BUILDDIR"), |
40 | shell=True) | 47 | shell=True) |
41 | if p.wait() != 0: | 48 | toaster_processes = [start_process.pid] |
42 | raise RuntimeError("Can't initialize toaster") | 49 | if start_process.wait() != 0: |
50 | port_use = os.popen("lsof -i -P -n | grep '8000 (LISTEN)'").read().strip() | ||
51 | message = '' | ||
52 | if port_use: | ||
53 | process_id = port_use.split()[1] | ||
54 | process = os.popen(f"ps -o cmd= -p {process_id}").read().strip() | ||
55 | message = f"Port 8000 occupied by {process}" | ||
56 | raise RuntimeError(f"Can't initialize toaster. {message}") | ||
57 | |||
58 | builddir = os.environ.get("BUILDDIR") | ||
59 | with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f: | ||
60 | toaster_processes.append(int(f.read())) | ||
61 | with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f: | ||
62 | toaster_processes.append(int(f.read())) | ||
43 | 63 | ||
44 | super(SeleniumFunctionalTestCase, cls).setUpClass() | 64 | super(SeleniumFunctionalTestCase, cls).setUpClass() |
45 | cls.live_server_url = 'http://localhost:8000/' | 65 | cls.live_server_url = 'http://localhost:8000/' |
@@ -48,22 +68,30 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | |||
48 | def tearDownClass(cls): | 68 | def tearDownClass(cls): |
49 | super(SeleniumFunctionalTestCase, cls).tearDownClass() | 69 | super(SeleniumFunctionalTestCase, cls).tearDownClass() |
50 | 70 | ||
51 | # XXX: source toaster stop gets blocked, to review why? | 71 | global toaster_processes |
52 | # from now send SIGTERM by hand | ||
53 | time.sleep(cls.wait_toaster_time) | ||
54 | builddir = os.environ.get("BUILDDIR") | ||
55 | 72 | ||
56 | with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f: | 73 | cmd = "bash -c 'source toaster stop'" |
57 | toastermain_pid = int(f.read()) | 74 | stop_process = subprocess.Popen( |
58 | os.kill(toastermain_pid, signal.SIGTERM) | 75 | cmd, |
59 | with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f: | 76 | cwd=os.environ.get("BUILDDIR"), |
60 | runbuilds_pid = int(f.read()) | 77 | shell=True) |
61 | os.kill(runbuilds_pid, signal.SIGTERM) | 78 | # Toaster stop has been known to hang in these tests so force kill if it stalls |
79 | try: | ||
80 | if stop_process.wait(cls.wait_toaster_time) != 0: | ||
81 | raise Exception('Toaster stop process failed') | ||
82 | except Exception as e: | ||
83 | if e is subprocess.TimeoutExpired: | ||
84 | print('Toaster stop process took too long. Force killing toaster...') | ||
85 | else: | ||
86 | print('Toaster stop process failed. Force killing toaster...') | ||
87 | stop_process.kill() | ||
88 | for toaster_process in toaster_processes: | ||
89 | os.kill(toaster_process, signal.SIGTERM) | ||
62 | 90 | ||
63 | 91 | ||
64 | def get_URL(self): | 92 | def get_URL(self): |
65 | rc=self.get_page_source() | 93 | rc=self.get_page_source() |
66 | project_url=re.search("(projectPageUrl\s:\s\")(.*)(\",)",rc) | 94 | project_url=re.search(r"(projectPageUrl\s:\s\")(.*)(\",)",rc) |
67 | return project_url.group(2) | 95 | return project_url.group(2) |
68 | 96 | ||
69 | 97 | ||
@@ -74,8 +102,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | |||
74 | """ | 102 | """ |
75 | try: | 103 | try: |
76 | table_element = self.get_table_element(table_id) | 104 | table_element = self.get_table_element(table_id) |
77 | element = table_element.find_element_by_link_text(link_text) | 105 | element = table_element.find_element(By.LINK_TEXT, link_text) |
78 | except self.NoSuchElementException: | 106 | except NoSuchElementException: |
79 | print('no element found') | 107 | print('no element found') |
80 | raise | 108 | raise |
81 | return element | 109 | return element |
@@ -85,8 +113,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | |||
85 | #return whole-table element | 113 | #return whole-table element |
86 | element_xpath = "//*[@id='" + table_id + "']" | 114 | element_xpath = "//*[@id='" + table_id + "']" |
87 | try: | 115 | try: |
88 | element = self.driver.find_element_by_xpath(element_xpath) | 116 | element = self.driver.find_element(By.XPATH, element_xpath) |
89 | except self.NoSuchElementException: | 117 | except NoSuchElementException: |
90 | raise | 118 | raise |
91 | return element | 119 | return element |
92 | row = coordinate[0] | 120 | row = coordinate[0] |
@@ -95,8 +123,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | |||
95 | #return whole-row element | 123 | #return whole-row element |
96 | element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]" | 124 | element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]" |
97 | try: | 125 | try: |
98 | element = self.driver.find_element_by_xpath(element_xpath) | 126 | element = self.driver.find_element(By.XPATH, element_xpath) |
99 | except self.NoSuchElementException: | 127 | except NoSuchElementException: |
100 | return False | 128 | return False |
101 | return element | 129 | return element |
102 | #now we are looking for an element with specified X and Y | 130 | #now we are looking for an element with specified X and Y |
@@ -104,7 +132,7 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | |||
104 | 132 | ||
105 | element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]" | 133 | element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]" |
106 | try: | 134 | try: |
107 | element = self.driver.find_element_by_xpath(element_xpath) | 135 | element = self.driver.find_element(By.XPATH, element_xpath) |
108 | except self.NoSuchElementException: | 136 | except NoSuchElementException: |
109 | return False | 137 | return False |
110 | return element | 138 | return element |
diff --git a/bitbake/lib/toaster/tests/functional/test_create_new_project.py b/bitbake/lib/toaster/tests/functional/test_create_new_project.py new file mode 100644 index 0000000000..94d90459e1 --- /dev/null +++ b/bitbake/lib/toaster/tests/functional/test_create_new_project.py | |||
@@ -0,0 +1,179 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # BitBake Toaster UI tests implementation | ||
3 | # | ||
4 | # Copyright (C) 2023 Savoir-faire Linux | ||
5 | # | ||
6 | # SPDX-License-Identifier: GPL-2.0-only | ||
7 | # | ||
8 | |||
9 | import re | ||
10 | import pytest | ||
11 | from django.urls import reverse | ||
12 | from selenium.webdriver.support.select import Select | ||
13 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | ||
14 | from orm.models import Project | ||
15 | from selenium.webdriver.common.by import By | ||
16 | |||
17 | |||
18 | @pytest.mark.django_db | ||
19 | @pytest.mark.order("last") | ||
20 | class TestCreateNewProject(SeleniumFunctionalTestCase): | ||
21 | |||
22 | def _create_test_new_project( | ||
23 | self, | ||
24 | project_name, | ||
25 | release, | ||
26 | release_title, | ||
27 | merge_toaster_settings, | ||
28 | ): | ||
29 | """ Create/Test new project using: | ||
30 | - Project Name: Any string | ||
31 | - Release: Any string | ||
32 | - Merge Toaster settings: True or False | ||
33 | """ | ||
34 | self.get(reverse('newproject')) | ||
35 | self.wait_until_visible('#new-project-name', poll=3) | ||
36 | self.driver.find_element(By.ID, | ||
37 | "new-project-name").send_keys(project_name) | ||
38 | |||
39 | select = Select(self.find('#projectversion')) | ||
40 | select.select_by_value(release) | ||
41 | |||
42 | # check merge toaster settings | ||
43 | checkbox = self.find('.checkbox-mergeattr') | ||
44 | if merge_toaster_settings: | ||
45 | if not checkbox.is_selected(): | ||
46 | checkbox.click() | ||
47 | else: | ||
48 | if checkbox.is_selected(): | ||
49 | checkbox.click() | ||
50 | |||
51 | self.driver.find_element(By.ID, "create-project-button").click() | ||
52 | |||
53 | element = self.wait_until_visible('#project-created-notification', poll=3) | ||
54 | self.assertTrue( | ||
55 | self.element_exists('#project-created-notification'), | ||
56 | f"Project:{project_name} creation notification not shown" | ||
57 | ) | ||
58 | self.assertTrue( | ||
59 | project_name in element.text, | ||
60 | f"New project name:{project_name} not in new project notification" | ||
61 | ) | ||
62 | self.assertTrue( | ||
63 | Project.objects.filter(name=project_name).count(), | ||
64 | f"New project:{project_name} not found in database" | ||
65 | ) | ||
66 | |||
67 | # check release | ||
68 | self.assertTrue(re.search( | ||
69 | release_title, | ||
70 | self.driver.find_element(By.XPATH, | ||
71 | "//span[@id='project-release-title']" | ||
72 | ).text), | ||
73 | 'The project release is not defined') | ||
74 | |||
75 | def test_create_new_project_master(self): | ||
76 | """ Test create new project using: | ||
77 | - Project Name: Any string | ||
78 | - Release: Yocto Project master (option value: 3) | ||
79 | - Merge Toaster settings: False | ||
80 | """ | ||
81 | release = '3' | ||
82 | release_title = 'Yocto Project master' | ||
83 | project_name = 'projectmaster' | ||
84 | self._create_test_new_project( | ||
85 | project_name, | ||
86 | release, | ||
87 | release_title, | ||
88 | False, | ||
89 | ) | ||
90 | |||
91 | def test_create_new_project_kirkstone(self): | ||
92 | """ Test create new project using: | ||
93 | - Project Name: Any string | ||
94 | - Release: Yocto Project 4.0 "Kirkstone" (option value: 1) | ||
95 | - Merge Toaster settings: True | ||
96 | """ | ||
97 | release = '1' | ||
98 | release_title = 'Yocto Project 4.0 "Kirkstone"' | ||
99 | project_name = 'projectkirkstone' | ||
100 | self._create_test_new_project( | ||
101 | project_name, | ||
102 | release, | ||
103 | release_title, | ||
104 | True, | ||
105 | ) | ||
106 | |||
107 | def test_create_new_project_dunfell(self): | ||
108 | """ Test create new project using: | ||
109 | - Project Name: Any string | ||
110 | - Release: Yocto Project 3.1 "Dunfell" (option value: 5) | ||
111 | - Merge Toaster settings: False | ||
112 | """ | ||
113 | release = '5' | ||
114 | release_title = 'Yocto Project 3.1 "Dunfell"' | ||
115 | project_name = 'projectdunfell' | ||
116 | self._create_test_new_project( | ||
117 | project_name, | ||
118 | release, | ||
119 | release_title, | ||
120 | False, | ||
121 | ) | ||
122 | |||
123 | def test_create_new_project_local(self): | ||
124 | """ Test create new project using: | ||
125 | - Project Name: Any string | ||
126 | - Release: Local Yocto Project (option value: 2) | ||
127 | - Merge Toaster settings: True | ||
128 | """ | ||
129 | release = '2' | ||
130 | release_title = 'Local Yocto Project' | ||
131 | project_name = 'projectlocal' | ||
132 | self._create_test_new_project( | ||
133 | project_name, | ||
134 | release, | ||
135 | release_title, | ||
136 | True, | ||
137 | ) | ||
138 | |||
139 | def test_create_new_project_without_name(self): | ||
140 | """ Test create new project without project name """ | ||
141 | self.get(reverse('newproject')) | ||
142 | |||
143 | select = Select(self.find('#projectversion')) | ||
144 | select.select_by_value(str(3)) | ||
145 | |||
146 | # Check input name has required attribute | ||
147 | input_name = self.driver.find_element(By.ID, "new-project-name") | ||
148 | self.assertIsNotNone(input_name.get_attribute('required'), | ||
149 | 'Input name has not required attribute') | ||
150 | |||
151 | # Check create button is disabled | ||
152 | create_btn = self.driver.find_element(By.ID, "create-project-button") | ||
153 | self.assertIsNotNone(create_btn.get_attribute('disabled'), | ||
154 | 'Create button is not disabled') | ||
155 | |||
156 | def test_import_new_project(self): | ||
157 | """ Test import new project using: | ||
158 | - Project Name: Any string | ||
159 | - Project type: select (Import command line project) | ||
160 | - Import existing project directory: Wrong Path | ||
161 | """ | ||
162 | project_name = 'projectimport' | ||
163 | self.get(reverse('newproject')) | ||
164 | self.driver.find_element(By.ID, | ||
165 | "new-project-name").send_keys(project_name) | ||
166 | # select import project | ||
167 | self.find('#type-import').click() | ||
168 | |||
169 | # set wrong path | ||
170 | wrong_path = '/wrongpath' | ||
171 | self.driver.find_element(By.ID, | ||
172 | "import-project-dir").send_keys(wrong_path) | ||
173 | self.driver.find_element(By.ID, "create-project-button").click() | ||
174 | |||
175 | # check error message | ||
176 | self.assertTrue(self.element_exists('.alert-danger'), | ||
177 | 'Allert message not shown') | ||
178 | self.assertTrue(wrong_path in self.find('.alert-danger').text, | ||
179 | "Wrong path not in alert message") | ||
diff --git a/bitbake/lib/toaster/tests/functional/test_functional_basic.py b/bitbake/lib/toaster/tests/functional/test_functional_basic.py index 5683e3873e..e4070fbb88 100644 --- a/bitbake/lib/toaster/tests/functional/test_functional_basic.py +++ b/bitbake/lib/toaster/tests/functional/test_functional_basic.py | |||
@@ -8,104 +8,129 @@ | |||
8 | # | 8 | # |
9 | 9 | ||
10 | import re | 10 | import re |
11 | from django.urls import reverse | ||
12 | import pytest | ||
11 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | 13 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase |
12 | from orm.models import Project | 14 | from orm.models import Project |
15 | from selenium.webdriver.common.by import By | ||
13 | 16 | ||
17 | from tests.functional.utils import get_projectId_from_url | ||
18 | |||
19 | |||
20 | @pytest.mark.django_db | ||
21 | @pytest.mark.order("second_to_last") | ||
14 | class FuntionalTestBasic(SeleniumFunctionalTestCase): | 22 | class FuntionalTestBasic(SeleniumFunctionalTestCase): |
23 | """Basic functional tests for Toaster""" | ||
24 | project_id = None | ||
25 | |||
26 | def setUp(self): | ||
27 | super(FuntionalTestBasic, self).setUp() | ||
28 | if not FuntionalTestBasic.project_id: | ||
29 | self._create_slenium_project() | ||
30 | current_url = self.driver.current_url | ||
31 | FuntionalTestBasic.project_id = get_projectId_from_url(current_url) | ||
15 | 32 | ||
16 | # testcase (1514) | 33 | # testcase (1514) |
17 | def test_create_slenium_project(self): | 34 | def _create_slenium_project(self): |
18 | project_name = 'selenium-project' | 35 | project_name = 'selenium-project' |
19 | self.get('') | 36 | self.get(reverse('newproject')) |
20 | self.driver.find_element_by_link_text("To start building, create your first Toaster project").click() | 37 | self.wait_until_visible('#new-project-name', poll=3) |
21 | self.driver.find_element_by_id("new-project-name").send_keys(project_name) | 38 | self.driver.find_element(By.ID, "new-project-name").send_keys(project_name) |
22 | self.driver.find_element_by_id('projectversion').click() | 39 | self.driver.find_element(By.ID, 'projectversion').click() |
23 | self.driver.find_element_by_id("create-project-button").click() | 40 | self.driver.find_element(By.ID, "create-project-button").click() |
24 | element = self.wait_until_visible('#project-created-notification') | 41 | element = self.wait_until_visible('#project-created-notification', poll=10) |
25 | self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown') | 42 | self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown') |
26 | self.assertTrue(project_name in element.text, | 43 | self.assertTrue(project_name in element.text, |
27 | "New project name not in new project notification") | 44 | "New project name not in new project notification") |
28 | self.assertTrue(Project.objects.filter(name=project_name).count(), | 45 | self.assertTrue(Project.objects.filter(name=project_name).count(), |
29 | "New project not found in database") | 46 | "New project not found in database") |
47 | return Project.objects.last().id | ||
30 | 48 | ||
31 | # testcase (1515) | 49 | # testcase (1515) |
32 | def test_verify_left_bar_menu(self): | 50 | def test_verify_left_bar_menu(self): |
33 | self.get('') | 51 | self.get(reverse('all-projects')) |
34 | self.wait_until_visible('#projectstable') | 52 | self.wait_until_present('#projectstable', poll=10) |
35 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 53 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
54 | self.wait_until_present('#config-nav', poll=10) | ||
36 | self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist') | 55 | self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist') |
37 | project_URL=self.get_URL() | 56 | project_URL=self.get_URL() |
38 | self.driver.find_element_by_xpath('//a[@href="'+project_URL+'"]').click() | 57 | self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click() |
58 | self.wait_until_present('#config-nav', poll=10) | ||
39 | 59 | ||
40 | try: | 60 | try: |
41 | self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click() | 61 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click() |
42 | self.assertTrue(re.search("Custom images",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'Custom images information is not loading properly') | 62 | self.wait_until_present('#config-nav', poll=10) |
63 | self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly') | ||
43 | except: | 64 | except: |
44 | self.fail(msg='No Custom images tab available') | 65 | self.fail(msg='No Custom images tab available') |
45 | 66 | ||
46 | try: | 67 | try: |
47 | self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click() | 68 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click() |
48 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly') | 69 | self.wait_until_present('#config-nav', poll=10) |
70 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly') | ||
49 | except: | 71 | except: |
50 | self.fail(msg='No Compatible image tab available') | 72 | self.fail(msg='No Compatible image tab available') |
51 | 73 | ||
52 | try: | 74 | try: |
53 | self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click() | 75 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click() |
54 | self.assertTrue(re.search("Compatible software recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly') | 76 | self.wait_until_present('#config-nav', poll=10) |
77 | self.assertTrue(re.search("Compatible software recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly') | ||
55 | except: | 78 | except: |
56 | self.fail(msg='No Compatible software recipe tab available') | 79 | self.fail(msg='No Compatible software recipe tab available') |
57 | 80 | ||
58 | try: | 81 | try: |
59 | self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click() | 82 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click() |
60 | self.assertTrue(re.search("Compatible machines",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly') | 83 | self.wait_until_present('#config-nav', poll=10) |
84 | self.assertTrue(re.search("Compatible machines",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly') | ||
61 | except: | 85 | except: |
62 | self.fail(msg='No Compatible machines tab available') | 86 | self.fail(msg='No Compatible machines tab available') |
63 | 87 | ||
64 | try: | 88 | try: |
65 | self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click() | 89 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click() |
66 | self.assertTrue(re.search("Compatible layers",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly') | 90 | self.wait_until_present('#config-nav', poll=10) |
91 | self.assertTrue(re.search("Compatible layers",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly') | ||
67 | except: | 92 | except: |
68 | self.fail(msg='No Compatible layers tab available') | 93 | self.fail(msg='No Compatible layers tab available') |
69 | 94 | ||
70 | try: | 95 | try: |
71 | self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click() | 96 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click() |
72 | self.assertTrue(re.search("Bitbake variables",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly') | 97 | self.wait_until_present('#config-nav', poll=10) |
98 | self.assertTrue(re.search("Bitbake variables",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly') | ||
73 | except: | 99 | except: |
74 | self.fail(msg='No Bitbake variables tab available') | 100 | self.fail(msg='No Bitbake variables tab available') |
75 | 101 | ||
76 | # testcase (1516) | 102 | # testcase (1516) |
77 | def test_review_configuration_information(self): | 103 | def test_review_configuration_information(self): |
78 | self.get('') | 104 | self.get(reverse('all-projects')) |
79 | self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() | 105 | self.wait_until_present('#projectstable', poll=10) |
80 | self.wait_until_visible('#projectstable') | ||
81 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 106 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
82 | project_URL=self.get_URL() | 107 | project_URL=self.get_URL() |
83 | 108 | self.wait_until_present('#config-nav', poll=10) | |
84 | try: | 109 | try: |
85 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') | 110 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') |
86 | self.assertTrue(re.search("qemux86",self.driver.find_element_by_xpath("//span[@id='project-machine-name']").text),'The machine type is not assigned') | 111 | self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned') |
87 | self.driver.find_element_by_xpath("//span[@id='change-machine-toggle']").click() | 112 | self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click() |
88 | self.wait_until_visible('#select-machine-form') | 113 | self.wait_until_visible('#select-machine-form', poll=10) |
89 | self.wait_until_visible('#cancel-machine-change') | 114 | self.wait_until_visible('#cancel-machine-change', poll=10) |
90 | self.driver.find_element_by_xpath("//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click() | 115 | self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click() |
91 | except: | 116 | except: |
92 | self.fail(msg='The machine information is wrong in the configuration page') | 117 | self.fail(msg='The machine information is wrong in the configuration page') |
93 | 118 | ||
94 | try: | 119 | try: |
95 | self.driver.find_element_by_id('no-most-built') | 120 | self.driver.find_element(By.ID, 'no-most-built') |
96 | except: | 121 | except: |
97 | self.fail(msg='No Most built information in project detail page') | 122 | self.fail(msg='No Most built information in project detail page') |
98 | 123 | ||
99 | try: | 124 | try: |
100 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_xpath("//span[@id='project-release-title']").text),'The project release is not defined') | 125 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.XPATH, "//span[@id='project-release-title']").text),'The project release is not defined') |
101 | except: | 126 | except: |
102 | self.fail(msg='No project release title information in project detail page') | 127 | self.fail(msg='No project release title information in project detail page') |
103 | 128 | ||
104 | try: | 129 | try: |
105 | self.driver.find_element_by_xpath("//div[@id='layer-container']") | 130 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']") |
106 | self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count') | 131 | self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count') |
107 | layer_list = self.driver.find_element_by_id("layers-in-project-list") | 132 | layer_list = self.driver.find_element(By.ID, "layers-in-project-list") |
108 | layers = layer_list.find_elements_by_tag_name("li") | 133 | layers = layer_list.find_elements(By.TAG_NAME, "li") |
109 | for layer in layers: | 134 | for layer in layers: |
110 | if re.match ("openembedded-core",layer.text): | 135 | if re.match ("openembedded-core",layer.text): |
111 | print ("openembedded-core layer is a default layer in the project configuration") | 136 | print ("openembedded-core layer is a default layer in the project configuration") |
@@ -120,61 +145,60 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase): | |||
120 | 145 | ||
121 | # testcase (1517) | 146 | # testcase (1517) |
122 | def test_verify_machine_information(self): | 147 | def test_verify_machine_information(self): |
123 | self.get('') | 148 | self.get(reverse('all-projects')) |
124 | self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() | 149 | self.wait_until_present('#projectstable', poll=10) |
125 | self.wait_until_visible('#projectstable') | ||
126 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 150 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
151 | self.wait_until_present('#config-nav', poll=10) | ||
127 | 152 | ||
128 | try: | 153 | try: |
129 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') | 154 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') |
130 | self.assertTrue(re.search("qemux86",self.driver.find_element_by_id("project-machine-name").text),'The machine type is not assigned') | 155 | self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned') |
131 | self.driver.find_element_by_id("change-machine-toggle").click() | 156 | self.driver.find_element(By.ID, "change-machine-toggle").click() |
132 | self.wait_until_visible('#select-machine-form') | 157 | self.wait_until_visible('#select-machine-form', poll=10) |
133 | self.wait_until_visible('#cancel-machine-change') | 158 | self.wait_until_visible('#cancel-machine-change', poll=10) |
134 | self.driver.find_element_by_id("cancel-machine-change").click() | 159 | self.driver.find_element(By.ID, "cancel-machine-change").click() |
135 | except: | 160 | except: |
136 | self.fail(msg='The machine information is wrong in the configuration page') | 161 | self.fail(msg='The machine information is wrong in the configuration page') |
137 | 162 | ||
138 | # testcase (1518) | 163 | # testcase (1518) |
139 | def test_verify_most_built_recipes_information(self): | 164 | def test_verify_most_built_recipes_information(self): |
140 | self.get('') | 165 | self.get(reverse('all-projects')) |
141 | self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() | 166 | self.wait_until_present('#projectstable', poll=10) |
142 | self.wait_until_visible('#projectstable') | ||
143 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 167 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
168 | self.wait_until_present('#config-nav', poll=10) | ||
144 | project_URL=self.get_URL() | 169 | project_URL=self.get_URL() |
145 | |||
146 | try: | 170 | try: |
147 | self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element_by_id("no-most-built").text),'Default message of no builds is not present') | 171 | self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present') |
148 | self.driver.find_element_by_xpath("//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click() | 172 | self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click() |
149 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly') | 173 | self.wait_until_present('#config-nav', poll=10) |
174 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly') | ||
150 | except: | 175 | except: |
151 | self.fail(msg='No Most built information in project detail page') | 176 | self.fail(msg='No Most built information in project detail page') |
152 | 177 | ||
153 | # testcase (1519) | 178 | # testcase (1519) |
154 | def test_verify_project_release_information(self): | 179 | def test_verify_project_release_information(self): |
155 | self.get('') | 180 | self.get(reverse('all-projects')) |
156 | self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() | 181 | self.wait_until_present('#projectstable', poll=10) |
157 | self.wait_until_visible('#projectstable') | ||
158 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 182 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
183 | self.wait_until_present('#config-nav', poll=10) | ||
159 | 184 | ||
160 | try: | 185 | try: |
161 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_id("project-release-title").text),'The project release is not defined') | 186 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text),'The project release is not defined') |
162 | except: | 187 | except: |
163 | self.fail(msg='No project release title information in project detail page') | 188 | self.fail(msg='No project release title information in project detail page') |
164 | 189 | ||
165 | # testcase (1520) | 190 | # testcase (1520) |
166 | def test_verify_layer_information(self): | 191 | def test_verify_layer_information(self): |
167 | self.get('') | 192 | self.get(reverse('all-projects')) |
168 | self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() | 193 | self.wait_until_present('#projectstable', poll=10) |
169 | self.wait_until_visible('#projectstable') | ||
170 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 194 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
195 | self.wait_until_present('#config-nav', poll=10) | ||
171 | project_URL=self.get_URL() | 196 | project_URL=self.get_URL() |
172 | |||
173 | try: | 197 | try: |
174 | self.driver.find_element_by_xpath("//div[@id='layer-container']") | 198 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']") |
175 | self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count') | 199 | self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count') |
176 | layer_list = self.driver.find_element_by_id("layers-in-project-list") | 200 | layer_list = self.driver.find_element(By.ID, "layers-in-project-list") |
177 | layers = layer_list.find_elements_by_tag_name("li") | 201 | layers = layer_list.find_elements(By.TAG_NAME, "li") |
178 | 202 | ||
179 | for layer in layers: | 203 | for layer in layers: |
180 | if re.match ("openembedded-core",layer.text): | 204 | if re.match ("openembedded-core",layer.text): |
@@ -186,43 +210,46 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase): | |||
186 | else: | 210 | else: |
187 | self.fail(msg='default layers are missing from the project configuration') | 211 | self.fail(msg='default layers are missing from the project configuration') |
188 | 212 | ||
189 | self.driver.find_element_by_xpath("//input[@id='layer-add-input']") | 213 | self.driver.find_element(By.XPATH, "//input[@id='layer-add-input']") |
190 | self.driver.find_element_by_xpath("//button[@id='add-layer-btn']") | 214 | self.driver.find_element(By.XPATH, "//button[@id='add-layer-btn']") |
191 | self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']") | 215 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']") |
192 | self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]") | 216 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]") |
193 | except: | 217 | except: |
194 | self.fail(msg='No Layer information in project detail page') | 218 | self.fail(msg='No Layer information in project detail page') |
195 | 219 | ||
196 | # testcase (1521) | 220 | # testcase (1521) |
197 | def test_verify_project_detail_links(self): | 221 | def test_verify_project_detail_links(self): |
198 | self.get('') | 222 | self.get(reverse('all-projects')) |
199 | self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() | 223 | self.wait_until_present('#projectstable', poll=10) |
200 | self.wait_until_visible('#projectstable') | ||
201 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 224 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
225 | self.wait_until_present('#config-nav', poll=10) | ||
202 | project_URL=self.get_URL() | 226 | project_URL=self.get_URL() |
203 | 227 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click() | |
204 | self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click() | 228 | self.wait_until_present('#config-nav', poll=10) |
205 | self.assertTrue(re.search("Configuration",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled') | 229 | self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled') |
206 | 230 | ||
207 | try: | 231 | try: |
208 | self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click() | 232 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click() |
209 | self.assertTrue(re.search("Builds",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled') | 233 | self.wait_until_visible('#project-topbar', poll=10) |
210 | self.driver.find_element_by_xpath("//div[@id='empty-state-projectbuildstable']") | 234 | self.assertTrue(re.search("Builds",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled') |
235 | self.driver.find_element(By.XPATH, "//div[@id='empty-state-projectbuildstable']") | ||
211 | except: | 236 | except: |
212 | self.fail(msg='Builds tab information is not present') | 237 | self.fail(msg='Builds tab information is not present') |
213 | 238 | ||
214 | try: | 239 | try: |
215 | self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click() | 240 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click() |
216 | self.assertTrue(re.search("Import layer",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled') | 241 | self.wait_until_visible('#project-topbar', poll=10) |
217 | self.driver.find_element_by_xpath("//fieldset[@id='repo-select']") | 242 | self.assertTrue(re.search("Import layer",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled') |
218 | self.driver.find_element_by_xpath("//fieldset[@id='git-repo']") | 243 | self.driver.find_element(By.XPATH, "//fieldset[@id='repo-select']") |
244 | self.driver.find_element(By.XPATH, "//fieldset[@id='git-repo']") | ||
219 | except: | 245 | except: |
220 | self.fail(msg='Import layer tab not loading properly') | 246 | self.fail(msg='Import layer tab not loading properly') |
221 | 247 | ||
222 | try: | 248 | try: |
223 | self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click() | 249 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click() |
224 | self.assertTrue(re.search("New custom image",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled') | 250 | self.wait_until_visible('#project-topbar', poll=10) |
225 | self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element_by_xpath("//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly') | 251 | self.assertTrue(re.search("New custom image",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled') |
252 | self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element(By.XPATH, "//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly') | ||
226 | except: | 253 | except: |
227 | self.fail(msg='New custom image tab not loading properly') | 254 | self.fail(msg='New custom image tab not loading properly') |
228 | 255 | ||
diff --git a/bitbake/lib/toaster/tests/functional/test_project_config.py b/bitbake/lib/toaster/tests/functional/test_project_config.py new file mode 100644 index 0000000000..dbee36aa4e --- /dev/null +++ b/bitbake/lib/toaster/tests/functional/test_project_config.py | |||
@@ -0,0 +1,341 @@ | |||
1 | #! /usr/bin/env python3 # | ||
2 | # BitBake Toaster UI tests implementation | ||
3 | # | ||
4 | # Copyright (C) 2023 Savoir-faire Linux | ||
5 | # | ||
6 | # SPDX-License-Identifier: GPL-2.0-only | ||
7 | # | ||
8 | |||
9 | import string | ||
10 | import random | ||
11 | import pytest | ||
12 | from django.urls import reverse | ||
13 | from selenium.webdriver import Keys | ||
14 | from selenium.webdriver.support.select import Select | ||
15 | from selenium.common.exceptions import TimeoutException | ||
16 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | ||
17 | from selenium.webdriver.common.by import By | ||
18 | |||
19 | from .utils import get_projectId_from_url | ||
20 | |||
21 | |||
22 | @pytest.mark.django_db | ||
23 | @pytest.mark.order("last") | ||
24 | class TestProjectConfig(SeleniumFunctionalTestCase): | ||
25 | project_id = None | ||
26 | PROJECT_NAME = 'TestProjectConfig' | ||
27 | INVALID_PATH_START_TEXT = 'The directory path should either start with a /' | ||
28 | INVALID_PATH_CHAR_TEXT = 'The directory path cannot include spaces or ' \ | ||
29 | 'any of these characters' | ||
30 | |||
31 | def _create_project(self, project_name): | ||
32 | """ Create/Test new project using: | ||
33 | - Project Name: Any string | ||
34 | - Release: Any string | ||
35 | - Merge Toaster settings: True or False | ||
36 | """ | ||
37 | self.get(reverse('newproject')) | ||
38 | self.wait_until_visible('#new-project-name', poll=2) | ||
39 | self.find("#new-project-name").send_keys(project_name) | ||
40 | select = Select(self.find("#projectversion")) | ||
41 | select.select_by_value('3') | ||
42 | |||
43 | # check merge toaster settings | ||
44 | checkbox = self.find('.checkbox-mergeattr') | ||
45 | if not checkbox.is_selected(): | ||
46 | checkbox.click() | ||
47 | |||
48 | if self.PROJECT_NAME != 'TestProjectConfig': | ||
49 | # Reset project name if it's not the default one | ||
50 | self.PROJECT_NAME = 'TestProjectConfig' | ||
51 | |||
52 | self.find("#create-project-button").click() | ||
53 | |||
54 | try: | ||
55 | self.wait_until_visible('#hint-error-project-name', poll=2) | ||
56 | url = reverse('project', args=(TestProjectConfig.project_id, )) | ||
57 | self.get(url) | ||
58 | self.wait_until_visible('#config-nav', poll=3) | ||
59 | except TimeoutException: | ||
60 | self.wait_until_visible('#config-nav', poll=3) | ||
61 | |||
62 | def _random_string(self, length): | ||
63 | return ''.join( | ||
64 | random.choice(string.ascii_letters) for _ in range(length) | ||
65 | ) | ||
66 | |||
67 | def _get_config_nav_item(self, index): | ||
68 | config_nav = self.find('#config-nav') | ||
69 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] | ||
70 | |||
71 | def _navigate_bbv_page(self): | ||
72 | """ Navigate to project BitBake variables page """ | ||
73 | # check if the menu is displayed | ||
74 | if TestProjectConfig.project_id is None: | ||
75 | self._create_project(project_name=self._random_string(10)) | ||
76 | current_url = self.driver.current_url | ||
77 | TestProjectConfig.project_id = get_projectId_from_url(current_url) | ||
78 | else: | ||
79 | url = reverse('projectconf', args=(TestProjectConfig.project_id,)) | ||
80 | self.get(url) | ||
81 | self.wait_until_visible('#config-nav', poll=3) | ||
82 | bbv_page_link = self._get_config_nav_item(9) | ||
83 | bbv_page_link.click() | ||
84 | self.wait_until_visible('#config-nav', poll=3) | ||
85 | |||
86 | def test_no_underscore_iamgefs_type(self): | ||
87 | """ | ||
88 | Should not accept IMAGEFS_TYPE with an underscore | ||
89 | """ | ||
90 | self._navigate_bbv_page() | ||
91 | imagefs_type = "foo_bar" | ||
92 | |||
93 | self.wait_until_visible('#change-image_fstypes-icon', poll=2) | ||
94 | |||
95 | self.click('#change-image_fstypes-icon') | ||
96 | |||
97 | self.enter_text('#new-imagefs_types', imagefs_type) | ||
98 | |||
99 | element = self.wait_until_visible('#hintError-image-fs_type', poll=2) | ||
100 | |||
101 | self.assertTrue(("A valid image type cannot include underscores" in element.text), | ||
102 | "Did not find underscore error message") | ||
103 | |||
104 | def test_checkbox_verification(self): | ||
105 | """ | ||
106 | Should automatically check the checkbox if user enters value | ||
107 | text box, if value is there in the checkbox. | ||
108 | """ | ||
109 | self._navigate_bbv_page() | ||
110 | |||
111 | imagefs_type = "btrfs" | ||
112 | |||
113 | self.wait_until_visible('#change-image_fstypes-icon', poll=2) | ||
114 | |||
115 | self.click('#change-image_fstypes-icon') | ||
116 | |||
117 | self.enter_text('#new-imagefs_types', imagefs_type) | ||
118 | |||
119 | checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']") | ||
120 | |||
121 | for checkbox in checkboxes: | ||
122 | if checkbox.get_attribute("value") == "btrfs": | ||
123 | self.assertEqual(checkbox.is_selected(), True) | ||
124 | |||
125 | def test_textbox_with_checkbox_verification(self): | ||
126 | """ | ||
127 | Should automatically add or remove value in textbox, if user checks | ||
128 | or unchecks checkboxes. | ||
129 | """ | ||
130 | self._navigate_bbv_page() | ||
131 | |||
132 | self.wait_until_visible('#change-image_fstypes-icon', poll=2) | ||
133 | |||
134 | self.click('#change-image_fstypes-icon') | ||
135 | |||
136 | checkboxes_selector = '.fs-checkbox-fstypes' | ||
137 | |||
138 | self.wait_until_visible(checkboxes_selector, poll=2) | ||
139 | checkboxes = self.find_all(checkboxes_selector) | ||
140 | |||
141 | for checkbox in checkboxes: | ||
142 | if checkbox.get_attribute("value") == "cpio": | ||
143 | checkbox.click() | ||
144 | element = self.driver.find_element(By.ID, 'new-imagefs_types') | ||
145 | |||
146 | self.wait_until_visible('#new-imagefs_types', poll=2) | ||
147 | |||
148 | self.assertTrue(("cpio" in element.get_attribute('value'), | ||
149 | "Imagefs not added into the textbox")) | ||
150 | checkbox.click() | ||
151 | self.assertTrue(("cpio" not in element.text), | ||
152 | "Image still present in the textbox") | ||
153 | |||
154 | def test_set_download_dir(self): | ||
155 | """ | ||
156 | Validate the allowed and disallowed types in the directory field for | ||
157 | DL_DIR | ||
158 | """ | ||
159 | self._navigate_bbv_page() | ||
160 | |||
161 | # activate the input to edit download dir | ||
162 | try: | ||
163 | change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2) | ||
164 | except TimeoutException: | ||
165 | # If download dir is not displayed, test is skipped | ||
166 | change_dl_dir_btn = None | ||
167 | |||
168 | if change_dl_dir_btn: | ||
169 | change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2) | ||
170 | change_dl_dir_btn.click() | ||
171 | |||
172 | # downloads dir path doesn't start with / or ${...} | ||
173 | input_field = self.wait_until_visible('#new-dl_dir', poll=2) | ||
174 | input_field.clear() | ||
175 | self.enter_text('#new-dl_dir', 'home/foo') | ||
176 | element = self.wait_until_visible('#hintError-initialChar-dl_dir', poll=2) | ||
177 | |||
178 | msg = 'downloads directory path starts with invalid character but ' \ | ||
179 | 'treated as valid' | ||
180 | self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) | ||
181 | |||
182 | # downloads dir path has a space | ||
183 | self.driver.find_element(By.ID, 'new-dl_dir').clear() | ||
184 | self.enter_text('#new-dl_dir', '/foo/bar a') | ||
185 | |||
186 | element = self.wait_until_visible('#hintError-dl_dir', poll=2) | ||
187 | msg = 'downloads directory path characters invalid but treated as valid' | ||
188 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | ||
189 | |||
190 | # downloads dir path starts with ${...} but has a space | ||
191 | self.driver.find_element(By.ID,'new-dl_dir').clear() | ||
192 | self.enter_text('#new-dl_dir', '${TOPDIR}/down foo') | ||
193 | |||
194 | element = self.wait_until_visible('#hintError-dl_dir', poll=2) | ||
195 | msg = 'downloads directory path characters invalid but treated as valid' | ||
196 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | ||
197 | |||
198 | # downloads dir path starts with / | ||
199 | self.driver.find_element(By.ID,'new-dl_dir').clear() | ||
200 | self.enter_text('#new-dl_dir', '/bar/foo') | ||
201 | |||
202 | hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir') | ||
203 | self.assertEqual(hidden_element.is_displayed(), False, | ||
204 | 'downloads directory path valid but treated as invalid') | ||
205 | |||
206 | # downloads dir path starts with ${...} | ||
207 | self.driver.find_element(By.ID,'new-dl_dir').clear() | ||
208 | self.enter_text('#new-dl_dir', '${TOPDIR}/down') | ||
209 | |||
210 | hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir') | ||
211 | self.assertEqual(hidden_element.is_displayed(), False, | ||
212 | 'downloads directory path valid but treated as invalid') | ||
213 | |||
214 | def test_set_sstate_dir(self): | ||
215 | """ | ||
216 | Validate the allowed and disallowed types in the directory field for | ||
217 | SSTATE_DIR | ||
218 | """ | ||
219 | self._navigate_bbv_page() | ||
220 | |||
221 | try: | ||
222 | btn_chg_sstate_dir = self.wait_until_visible( | ||
223 | '#change-sstate_dir-icon', | ||
224 | poll=2 | ||
225 | ) | ||
226 | self.click('#change-sstate_dir-icon') | ||
227 | except TimeoutException: | ||
228 | # If sstate_dir is not displayed, test is skipped | ||
229 | btn_chg_sstate_dir = None | ||
230 | |||
231 | if btn_chg_sstate_dir: # Skip continuation if sstate_dir is not displayed | ||
232 | # path doesn't start with / or ${...} | ||
233 | input_field = self.wait_until_visible('#new-sstate_dir', poll=2) | ||
234 | input_field.clear() | ||
235 | self.enter_text('#new-sstate_dir', 'home/foo') | ||
236 | element = self.wait_until_visible('#hintError-initialChar-sstate_dir', poll=2) | ||
237 | |||
238 | msg = 'sstate directory path starts with invalid character but ' \ | ||
239 | 'treated as valid' | ||
240 | self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg) | ||
241 | |||
242 | # path has a space | ||
243 | self.driver.find_element(By.ID, 'new-sstate_dir').clear() | ||
244 | self.enter_text('#new-sstate_dir', '/foo/bar a') | ||
245 | |||
246 | element = self.wait_until_visible('#hintError-sstate_dir', poll=2) | ||
247 | msg = 'sstate directory path characters invalid but treated as valid' | ||
248 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | ||
249 | |||
250 | # path starts with ${...} but has a space | ||
251 | self.driver.find_element(By.ID,'new-sstate_dir').clear() | ||
252 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo') | ||
253 | |||
254 | element = self.wait_until_visible('#hintError-sstate_dir', poll=2) | ||
255 | msg = 'sstate directory path characters invalid but treated as valid' | ||
256 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | ||
257 | |||
258 | # path starts with / | ||
259 | self.driver.find_element(By.ID,'new-sstate_dir').clear() | ||
260 | self.enter_text('#new-sstate_dir', '/bar/foo') | ||
261 | |||
262 | hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir') | ||
263 | self.assertEqual(hidden_element.is_displayed(), False, | ||
264 | 'sstate directory path valid but treated as invalid') | ||
265 | |||
266 | # paths starts with ${...} | ||
267 | self.driver.find_element(By.ID, 'new-sstate_dir').clear() | ||
268 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down') | ||
269 | |||
270 | hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir') | ||
271 | self.assertEqual(hidden_element.is_displayed(), False, | ||
272 | 'sstate directory path valid but treated as invalid') | ||
273 | |||
274 | def _change_bbv_value(self, **kwargs): | ||
275 | var_name, field, btn_id, input_id, value, save_btn, *_ = kwargs.values() | ||
276 | """ Change bitbake variable value """ | ||
277 | self._navigate_bbv_page() | ||
278 | self.wait_until_visible(f'#{btn_id}', poll=2) | ||
279 | if kwargs.get('new_variable'): | ||
280 | self.find(f"#{btn_id}").clear() | ||
281 | self.enter_text(f"#{btn_id}", f"{var_name}") | ||
282 | else: | ||
283 | self.click(f'#{btn_id}') | ||
284 | self.wait_until_visible(f'#{input_id}', poll=2) | ||
285 | |||
286 | if kwargs.get('is_select'): | ||
287 | select = Select(self.find(f'#{input_id}')) | ||
288 | select.select_by_visible_text(value) | ||
289 | else: | ||
290 | self.find(f"#{input_id}").clear() | ||
291 | self.enter_text(f'#{input_id}', f'{value}') | ||
292 | self.click(f'#{save_btn}') | ||
293 | value_displayed = str(self.wait_until_visible(f'#{field}').text).lower() | ||
294 | msg = f'{var_name} variable not changed' | ||
295 | self.assertTrue(str(value).lower() in value_displayed, msg) | ||
296 | |||
297 | def test_change_distro_var(self): | ||
298 | """ Test changing distro variable """ | ||
299 | self._change_bbv_value( | ||
300 | var_name='DISTRO', | ||
301 | field='distro', | ||
302 | btn_id='change-distro-icon', | ||
303 | input_id='new-distro', | ||
304 | value='poky-changed', | ||
305 | save_btn="apply-change-distro", | ||
306 | ) | ||
307 | |||
308 | def test_set_image_install_append_var(self): | ||
309 | """ Test setting IMAGE_INSTALL:append variable """ | ||
310 | self._change_bbv_value( | ||
311 | var_name='IMAGE_INSTALL:append', | ||
312 | field='image_install', | ||
313 | btn_id='change-image_install-icon', | ||
314 | input_id='new-image_install', | ||
315 | value='bash, apt, busybox', | ||
316 | save_btn="apply-change-image_install", | ||
317 | ) | ||
318 | |||
319 | def test_set_package_classes_var(self): | ||
320 | """ Test setting PACKAGE_CLASSES variable """ | ||
321 | self._change_bbv_value( | ||
322 | var_name='PACKAGE_CLASSES', | ||
323 | field='package_classes', | ||
324 | btn_id='change-package_classes-icon', | ||
325 | input_id='package_classes-select', | ||
326 | value='package_deb', | ||
327 | save_btn="apply-change-package_classes", | ||
328 | is_select=True, | ||
329 | ) | ||
330 | |||
331 | def test_create_new_bbv(self): | ||
332 | """ Test creating new bitbake variable """ | ||
333 | self._change_bbv_value( | ||
334 | var_name='New_Custom_Variable', | ||
335 | field='configvar-list', | ||
336 | btn_id='variable', | ||
337 | input_id='value', | ||
338 | value='new variable value', | ||
339 | save_btn="add-configvar-button", | ||
340 | new_variable=True | ||
341 | ) | ||
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page.py b/bitbake/lib/toaster/tests/functional/test_project_page.py new file mode 100644 index 0000000000..adbe3587e4 --- /dev/null +++ b/bitbake/lib/toaster/tests/functional/test_project_page.py | |||
@@ -0,0 +1,792 @@ | |||
1 | #! /usr/bin/env python3 # | ||
2 | # BitBake Toaster UI tests implementation | ||
3 | # | ||
4 | # Copyright (C) 2023 Savoir-faire Linux | ||
5 | # | ||
6 | # SPDX-License-Identifier: GPL-2.0-only | ||
7 | # | ||
8 | |||
9 | import os | ||
10 | import random | ||
11 | import string | ||
12 | from unittest import skip | ||
13 | import pytest | ||
14 | from django.urls import reverse | ||
15 | from django.utils import timezone | ||
16 | from selenium.webdriver.common.keys import Keys | ||
17 | from selenium.webdriver.support.select import Select | ||
18 | from selenium.common.exceptions import TimeoutException | ||
19 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | ||
20 | from orm.models import Build, Project, Target | ||
21 | from selenium.webdriver.common.by import By | ||
22 | |||
23 | from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled | ||
24 | |||
25 | |||
26 | @pytest.mark.django_db | ||
27 | @pytest.mark.order("last") | ||
28 | class TestProjectPage(SeleniumFunctionalTestCase): | ||
29 | project_id = None | ||
30 | PROJECT_NAME = 'TestProjectPage' | ||
31 | |||
32 | def _create_project(self, project_name): | ||
33 | """ Create/Test new project using: | ||
34 | - Project Name: Any string | ||
35 | - Release: Any string | ||
36 | - Merge Toaster settings: True or False | ||
37 | """ | ||
38 | self.get(reverse('newproject')) | ||
39 | self.wait_until_visible('#new-project-name') | ||
40 | self.find("#new-project-name").send_keys(project_name) | ||
41 | select = Select(self.find("#projectversion")) | ||
42 | select.select_by_value('3') | ||
43 | |||
44 | # check merge toaster settings | ||
45 | checkbox = self.find('.checkbox-mergeattr') | ||
46 | if not checkbox.is_selected(): | ||
47 | checkbox.click() | ||
48 | |||
49 | if self.PROJECT_NAME != 'TestProjectPage': | ||
50 | # Reset project name if it's not the default one | ||
51 | self.PROJECT_NAME = 'TestProjectPage' | ||
52 | |||
53 | self.find("#create-project-button").click() | ||
54 | |||
55 | try: | ||
56 | self.wait_until_visible('#hint-error-project-name') | ||
57 | url = reverse('project', args=(TestProjectPage.project_id, )) | ||
58 | self.get(url) | ||
59 | self.wait_until_visible('#config-nav', poll=3) | ||
60 | except TimeoutException: | ||
61 | self.wait_until_visible('#config-nav', poll=3) | ||
62 | |||
63 | def _random_string(self, length): | ||
64 | return ''.join( | ||
65 | random.choice(string.ascii_letters) for _ in range(length) | ||
66 | ) | ||
67 | |||
68 | def _navigate_to_project_page(self): | ||
69 | # Navigate to project page | ||
70 | if TestProjectPage.project_id is None: | ||
71 | self._create_project(project_name=self._random_string(10)) | ||
72 | current_url = self.driver.current_url | ||
73 | TestProjectPage.project_id = get_projectId_from_url(current_url) | ||
74 | else: | ||
75 | url = reverse('project', args=(TestProjectPage.project_id,)) | ||
76 | self.get(url) | ||
77 | self.wait_until_visible('#config-nav') | ||
78 | |||
79 | def _get_create_builds(self, **kwargs): | ||
80 | """ Create a build and return the build object """ | ||
81 | # parameters for builds to associate with the projects | ||
82 | now = timezone.now() | ||
83 | self.project1_build_success = { | ||
84 | 'project': Project.objects.get(id=TestProjectPage.project_id), | ||
85 | 'started_on': now, | ||
86 | 'completed_on': now, | ||
87 | 'outcome': Build.SUCCEEDED | ||
88 | } | ||
89 | |||
90 | self.project1_build_failure = { | ||
91 | 'project': Project.objects.get(id=TestProjectPage.project_id), | ||
92 | 'started_on': now, | ||
93 | 'completed_on': now, | ||
94 | 'outcome': Build.FAILED | ||
95 | } | ||
96 | build1 = Build.objects.create(**self.project1_build_success) | ||
97 | build2 = Build.objects.create(**self.project1_build_failure) | ||
98 | |||
99 | # add some targets to these builds so they have recipe links | ||
100 | # (and so we can find the row in the ToasterTable corresponding to | ||
101 | # a particular build) | ||
102 | Target.objects.create(build=build1, target='foo') | ||
103 | Target.objects.create(build=build2, target='bar') | ||
104 | |||
105 | if kwargs: | ||
106 | # Create kwargs.get('success') builds with success status with target | ||
107 | # and kwargs.get('failure') builds with failure status with target | ||
108 | for i in range(kwargs.get('success', 0)): | ||
109 | now = timezone.now() | ||
110 | self.project1_build_success['started_on'] = now | ||
111 | self.project1_build_success[ | ||
112 | 'completed_on'] = now - timezone.timedelta(days=i) | ||
113 | build = Build.objects.create(**self.project1_build_success) | ||
114 | Target.objects.create(build=build, | ||
115 | target=f'{i}_success_recipe', | ||
116 | task=f'{i}_success_task') | ||
117 | |||
118 | for i in range(kwargs.get('failure', 0)): | ||
119 | now = timezone.now() | ||
120 | self.project1_build_failure['started_on'] = now | ||
121 | self.project1_build_failure[ | ||
122 | 'completed_on'] = now - timezone.timedelta(days=i) | ||
123 | build = Build.objects.create(**self.project1_build_failure) | ||
124 | Target.objects.create(build=build, | ||
125 | target=f'{i}_fail_recipe', | ||
126 | task=f'{i}_fail_task') | ||
127 | return build1, build2 | ||
128 | |||
129 | def _mixin_test_table_edit_column( | ||
130 | self, | ||
131 | table_id, | ||
132 | edit_btn_id, | ||
133 | list_check_box_id: list | ||
134 | ): | ||
135 | # Check edit column | ||
136 | edit_column = self.find(f'#{edit_btn_id}') | ||
137 | self.assertTrue(edit_column.is_displayed()) | ||
138 | edit_column.click() | ||
139 | # Check dropdown is visible | ||
140 | self.wait_until_visible('ul.dropdown-menu.editcol') | ||
141 | for check_box_id in list_check_box_id: | ||
142 | # Check that we can hide/show table column | ||
143 | check_box = self.find(f'#{check_box_id}') | ||
144 | th_class = str(check_box_id).replace('checkbox-', '') | ||
145 | if check_box.is_selected(): | ||
146 | # check if column is visible in table | ||
147 | self.assertTrue( | ||
148 | self.find( | ||
149 | f'#{table_id} thead th.{th_class}' | ||
150 | ).is_displayed(), | ||
151 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
152 | ) | ||
153 | check_box.click() | ||
154 | # check if column is hidden in table | ||
155 | self.assertFalse( | ||
156 | self.find( | ||
157 | f'#{table_id} thead th.{th_class}' | ||
158 | ).is_displayed(), | ||
159 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
160 | ) | ||
161 | else: | ||
162 | # check if column is hidden in table | ||
163 | self.assertFalse( | ||
164 | self.find( | ||
165 | f'#{table_id} thead th.{th_class}' | ||
166 | ).is_displayed(), | ||
167 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
168 | ) | ||
169 | check_box.click() | ||
170 | # check if column is visible in table | ||
171 | self.assertTrue( | ||
172 | self.find( | ||
173 | f'#{table_id} thead th.{th_class}' | ||
174 | ).is_displayed(), | ||
175 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
176 | ) | ||
177 | |||
178 | def _get_config_nav_item(self, index): | ||
179 | config_nav = self.find('#config-nav') | ||
180 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] | ||
181 | |||
182 | def _navigate_to_config_nav(self, nav_id, nav_index): | ||
183 | # navigate to the project page | ||
184 | self._navigate_to_project_page() | ||
185 | # click on "Software recipe" tab | ||
186 | soft_recipe = self._get_config_nav_item(nav_index) | ||
187 | soft_recipe.click() | ||
188 | self.wait_until_visible(f'#{nav_id}') | ||
189 | |||
190 | def _mixin_test_table_show_rows(self, table_selector, **kwargs): | ||
191 | """ Test the show rows feature in the builds table on the all builds page """ | ||
192 | def test_show_rows(row_to_show, show_row_link): | ||
193 | # Check that we can show rows == row_to_show | ||
194 | show_row_link.select_by_value(str(row_to_show)) | ||
195 | self.wait_until_visible(f'#{table_selector} tbody tr', poll=3) | ||
196 | # check at least some rows are visible | ||
197 | self.assertTrue( | ||
198 | len(self.find_all(f'#{table_selector} tbody tr')) > 0 | ||
199 | ) | ||
200 | self.wait_until_present(f'#{table_selector} tbody tr') | ||
201 | show_rows = self.driver.find_elements( | ||
202 | By.XPATH, | ||
203 | f'//select[@class="form-control pagesize-{table_selector}"]' | ||
204 | ) | ||
205 | rows_to_show = [10, 25, 50, 100, 150] | ||
206 | to_skip = kwargs.get('to_skip', []) | ||
207 | # Check show rows | ||
208 | for show_row_link in show_rows: | ||
209 | show_row_link = Select(show_row_link) | ||
210 | for row_to_show in rows_to_show: | ||
211 | if row_to_show not in to_skip: | ||
212 | test_show_rows(row_to_show, show_row_link) | ||
213 | |||
214 | def _mixin_test_table_search_input(self, **kwargs): | ||
215 | input_selector, input_text, searchBtn_selector, table_selector, *_ = kwargs.values() | ||
216 | # Test search input | ||
217 | self.wait_until_visible(f'#{input_selector}') | ||
218 | recipe_input = self.find(f'#{input_selector}') | ||
219 | recipe_input.send_keys(input_text) | ||
220 | self.find(f'#{searchBtn_selector}').click() | ||
221 | self.wait_until_visible(f'#{table_selector} tbody tr') | ||
222 | rows = self.find_all(f'#{table_selector} tbody tr') | ||
223 | self.assertTrue(len(rows) > 0) | ||
224 | |||
225 | def test_create_project(self): | ||
226 | """ Create/Test new project using: | ||
227 | - Project Name: Any string | ||
228 | - Release: Any string | ||
229 | - Merge Toaster settings: True or False | ||
230 | """ | ||
231 | self._create_project(project_name=self.PROJECT_NAME) | ||
232 | |||
233 | def test_image_recipe_editColumn(self): | ||
234 | """ Test the edit column feature in image recipe table on project page """ | ||
235 | self._get_create_builds(success=10, failure=10) | ||
236 | |||
237 | url = reverse('projectimagerecipes', args=(TestProjectPage.project_id,)) | ||
238 | self.get(url) | ||
239 | self.wait_until_present('#imagerecipestable tbody tr') | ||
240 | |||
241 | column_list = [ | ||
242 | 'get_description_or_summary', 'layer_version__get_vcs_reference', | ||
243 | 'layer_version__layer__name', 'license', 'recipe-file', 'section', | ||
244 | 'version' | ||
245 | ] | ||
246 | |||
247 | # Check that we can hide the edit column | ||
248 | self._mixin_test_table_edit_column( | ||
249 | 'imagerecipestable', | ||
250 | 'edit-columns-button', | ||
251 | [f'checkbox-{column}' for column in column_list] | ||
252 | ) | ||
253 | |||
254 | def test_page_header_on_project_page(self): | ||
255 | """ Check page header in project page: | ||
256 | - AT LEFT -> Logo of Yocto project, displayed, clickable | ||
257 | - "Toaster"+" Information icon", displayed, clickable | ||
258 | - "Server Icon" + "All builds", displayed, clickable | ||
259 | - "Directory Icon" + "All projects", displayed, clickable | ||
260 | - "Book Icon" + "Documentation", displayed, clickable | ||
261 | - AT RIGHT -> button "New project", displayed, clickable | ||
262 | """ | ||
263 | # navigate to the project page | ||
264 | self._navigate_to_project_page() | ||
265 | |||
266 | # check page header | ||
267 | # AT LEFT -> Logo of Yocto project | ||
268 | logo = self.driver.find_element( | ||
269 | By.XPATH, | ||
270 | "//div[@class='toaster-navbar-brand']", | ||
271 | ) | ||
272 | logo_img = logo.find_element(By.TAG_NAME, 'img') | ||
273 | self.assertTrue(logo_img.is_displayed(), | ||
274 | 'Logo of Yocto project not found') | ||
275 | self.assertTrue( | ||
276 | '/static/img/logo.png' in str(logo_img.get_attribute('src')), | ||
277 | 'Logo of Yocto project not found' | ||
278 | ) | ||
279 | # "Toaster"+" Information icon", clickable | ||
280 | toaster = self.driver.find_element( | ||
281 | By.XPATH, | ||
282 | "//div[@class='toaster-navbar-brand']//a[@class='brand']", | ||
283 | ) | ||
284 | self.assertTrue(toaster.is_displayed(), 'Toaster not found') | ||
285 | self.assertTrue(toaster.text == 'Toaster') | ||
286 | info_sign = self.find('.glyphicon-info-sign') | ||
287 | self.assertTrue(info_sign.is_displayed()) | ||
288 | |||
289 | # "Server Icon" + "All builds" | ||
290 | all_builds = self.find('#navbar-all-builds') | ||
291 | all_builds_link = all_builds.find_element(By.TAG_NAME, 'a') | ||
292 | self.assertTrue("All builds" in all_builds_link.text) | ||
293 | self.assertTrue( | ||
294 | '/toastergui/builds/' in str(all_builds_link.get_attribute('href')) | ||
295 | ) | ||
296 | server_icon = all_builds.find_element(By.TAG_NAME, 'i') | ||
297 | self.assertTrue( | ||
298 | server_icon.get_attribute('class') == 'glyphicon glyphicon-tasks' | ||
299 | ) | ||
300 | self.assertTrue(server_icon.is_displayed()) | ||
301 | |||
302 | # "Directory Icon" + "All projects" | ||
303 | all_projects = self.find('#navbar-all-projects') | ||
304 | all_projects_link = all_projects.find_element(By.TAG_NAME, 'a') | ||
305 | self.assertTrue("All projects" in all_projects_link.text) | ||
306 | self.assertTrue( | ||
307 | '/toastergui/projects/' in str(all_projects_link.get_attribute( | ||
308 | 'href')) | ||
309 | ) | ||
310 | dir_icon = all_projects.find_element(By.TAG_NAME, 'i') | ||
311 | self.assertTrue( | ||
312 | dir_icon.get_attribute('class') == 'icon-folder-open' | ||
313 | ) | ||
314 | self.assertTrue(dir_icon.is_displayed()) | ||
315 | |||
316 | # "Book Icon" + "Documentation" | ||
317 | toaster_docs_link = self.find('#navbar-docs') | ||
318 | toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME, | ||
319 | 'a') | ||
320 | self.assertTrue("Documentation" in toaster_docs_link_link.text) | ||
321 | self.assertTrue( | ||
322 | toaster_docs_link_link.get_attribute('href') == 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual' | ||
323 | ) | ||
324 | book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i') | ||
325 | self.assertTrue( | ||
326 | book_icon.get_attribute('class') == 'glyphicon glyphicon-book' | ||
327 | ) | ||
328 | self.assertTrue(book_icon.is_displayed()) | ||
329 | |||
330 | # AT RIGHT -> button "New project" | ||
331 | new_project_button = self.find('#new-project-button') | ||
332 | self.assertTrue(new_project_button.is_displayed()) | ||
333 | self.assertTrue(new_project_button.text == 'New project') | ||
334 | new_project_button.click() | ||
335 | self.assertTrue( | ||
336 | '/toastergui/newproject/' in str(self.driver.current_url) | ||
337 | ) | ||
338 | |||
339 | def test_edit_project_name(self): | ||
340 | """ Test edit project name: | ||
341 | - Click on "Edit" icon button | ||
342 | - Change project name | ||
343 | - Click on "Save" button | ||
344 | - Check project name is changed | ||
345 | """ | ||
346 | # navigate to the project page | ||
347 | self._navigate_to_project_page() | ||
348 | |||
349 | # click on "Edit" icon button | ||
350 | self.wait_until_visible('#project-name-container') | ||
351 | edit_button = self.find('#project-change-form-toggle') | ||
352 | edit_button.click() | ||
353 | project_name_input = self.find('#project-name-change-input') | ||
354 | self.assertTrue(project_name_input.is_displayed()) | ||
355 | project_name_input.clear() | ||
356 | project_name_input.send_keys('New Name') | ||
357 | self.find('#project-name-change-btn').click() | ||
358 | |||
359 | # check project name is changed | ||
360 | self.wait_until_visible('#project-name-container') | ||
361 | self.assertTrue( | ||
362 | 'New Name' in str(self.find('#project-name-container').text) | ||
363 | ) | ||
364 | |||
365 | def test_project_page_tabs(self): | ||
366 | """ Test project tabs: | ||
367 | - "configuration" tab | ||
368 | - "Builds" tab | ||
369 | - "Import layers" tab | ||
370 | - "New custom image" tab | ||
371 | Check search box used to build recipes | ||
372 | """ | ||
373 | # navigate to the project page | ||
374 | self._navigate_to_project_page() | ||
375 | |||
376 | # check "configuration" tab | ||
377 | self.wait_until_visible('#topbar-configuration-tab') | ||
378 | config_tab = self.find('#topbar-configuration-tab') | ||
379 | self.assertTrue(config_tab.get_attribute('class') == 'active') | ||
380 | self.assertTrue('Configuration' in str(config_tab.text)) | ||
381 | self.assertTrue( | ||
382 | f"/toastergui/project/{TestProjectPage.project_id}" in str(self.driver.current_url) | ||
383 | ) | ||
384 | |||
385 | def get_tabs(): | ||
386 | # tabs links list | ||
387 | return self.driver.find_elements( | ||
388 | By.XPATH, | ||
389 | '//div[@id="project-topbar"]//li' | ||
390 | ) | ||
391 | |||
392 | def check_tab_link(tab_index, tab_name, url): | ||
393 | tab = get_tabs()[tab_index] | ||
394 | tab_link = tab.find_element(By.TAG_NAME, 'a') | ||
395 | self.assertTrue(url in tab_link.get_attribute('href')) | ||
396 | self.assertTrue(tab_name in tab_link.text) | ||
397 | self.assertTrue(tab.get_attribute('class') == 'active') | ||
398 | |||
399 | # check "Builds" tab | ||
400 | builds_tab = get_tabs()[1] | ||
401 | builds_tab.find_element(By.TAG_NAME, 'a').click() | ||
402 | check_tab_link( | ||
403 | 1, | ||
404 | 'Builds', | ||
405 | f"/toastergui/project/{TestProjectPage.project_id}/builds" | ||
406 | ) | ||
407 | |||
408 | # check "Import layers" tab | ||
409 | import_layers_tab = get_tabs()[2] | ||
410 | import_layers_tab.find_element(By.TAG_NAME, 'a').click() | ||
411 | check_tab_link( | ||
412 | 2, | ||
413 | 'Import layer', | ||
414 | f"/toastergui/project/{TestProjectPage.project_id}/importlayer" | ||
415 | ) | ||
416 | |||
417 | # check "New custom image" tab | ||
418 | new_custom_image_tab = get_tabs()[3] | ||
419 | new_custom_image_tab.find_element(By.TAG_NAME, 'a').click() | ||
420 | check_tab_link( | ||
421 | 3, | ||
422 | 'New custom image', | ||
423 | f"/toastergui/project/{TestProjectPage.project_id}/newcustomimage" | ||
424 | ) | ||
425 | |||
426 | # check search box can be use to build recipes | ||
427 | search_box = self.find('#build-input') | ||
428 | search_box.send_keys('core-image-minimal') | ||
429 | self.find('#build-button').click() | ||
430 | self.wait_until_visible('#latest-builds') | ||
431 | lastest_builds = self.driver.find_elements( | ||
432 | By.XPATH, | ||
433 | '//div[@id="latest-builds"]', | ||
434 | ) | ||
435 | last_build = lastest_builds[0] | ||
436 | self.assertTrue( | ||
437 | 'core-image-minimal' in str(last_build.text) | ||
438 | ) | ||
439 | |||
440 | def test_softwareRecipe_page(self): | ||
441 | """ Test software recipe page | ||
442 | - Check title "Compatible software recipes" is displayed | ||
443 | - Check search input | ||
444 | - Check "build recipe" button works | ||
445 | - Check software recipe table feature(show/hide column, pagination) | ||
446 | """ | ||
447 | self._navigate_to_config_nav('softwarerecipestable', 4) | ||
448 | # check title "Compatible software recipes" is displayed | ||
449 | self.assertTrue("Compatible software recipes" in self.get_page_source()) | ||
450 | # Test search input | ||
451 | self._mixin_test_table_search_input( | ||
452 | input_selector='search-input-softwarerecipestable', | ||
453 | input_text='busybox', | ||
454 | searchBtn_selector='search-submit-softwarerecipestable', | ||
455 | table_selector='softwarerecipestable' | ||
456 | ) | ||
457 | # check "build recipe" button works | ||
458 | rows = self.find_all('#softwarerecipestable tbody tr') | ||
459 | image_to_build = rows[0] | ||
460 | build_btn = image_to_build.find_element( | ||
461 | By.XPATH, | ||
462 | '//td[@class="add-del-layers"]//a[1]' | ||
463 | ) | ||
464 | build_btn.click() | ||
465 | build_state = wait_until_build(self, 'queued cloning starting parsing failed') | ||
466 | lastest_builds = self.driver.find_elements( | ||
467 | By.XPATH, | ||
468 | '//div[@id="latest-builds"]/div' | ||
469 | ) | ||
470 | self.assertTrue(len(lastest_builds) > 0) | ||
471 | last_build = lastest_builds[0] | ||
472 | cancel_button = last_build.find_element( | ||
473 | By.XPATH, | ||
474 | '//span[@class="cancel-build-btn pull-right alert-link"]', | ||
475 | ) | ||
476 | cancel_button.click() | ||
477 | if 'starting' not in build_state: # change build state when cancelled in starting state | ||
478 | wait_until_build_cancelled(self) | ||
479 | |||
480 | # check software recipe table feature(show/hide column, pagination) | ||
481 | self._navigate_to_config_nav('softwarerecipestable', 4) | ||
482 | column_list = [ | ||
483 | 'get_description_or_summary', | ||
484 | 'layer_version__get_vcs_reference', | ||
485 | 'layer_version__layer__name', | ||
486 | 'license', | ||
487 | 'recipe-file', | ||
488 | 'section', | ||
489 | 'version', | ||
490 | ] | ||
491 | self._mixin_test_table_edit_column( | ||
492 | 'softwarerecipestable', | ||
493 | 'edit-columns-button', | ||
494 | [f'checkbox-{column}' for column in column_list] | ||
495 | ) | ||
496 | self._navigate_to_config_nav('softwarerecipestable', 4) | ||
497 | # check show rows(pagination) | ||
498 | self._mixin_test_table_show_rows( | ||
499 | table_selector='softwarerecipestable', | ||
500 | to_skip=[150], | ||
501 | ) | ||
502 | |||
503 | def test_machines_page(self): | ||
504 | """ Test Machine page | ||
505 | - Check if title "Compatible machines" is displayed | ||
506 | - Check search input | ||
507 | - Check "Select machine" button works | ||
508 | - Check "Add layer" button works | ||
509 | - Check Machine table feature(show/hide column, pagination) | ||
510 | """ | ||
511 | self._navigate_to_config_nav('machinestable', 5) | ||
512 | # check title "Compatible software recipes" is displayed | ||
513 | self.assertTrue("Compatible machines" in self.get_page_source()) | ||
514 | # Test search input | ||
515 | self._mixin_test_table_search_input( | ||
516 | input_selector='search-input-machinestable', | ||
517 | input_text='qemux86-64', | ||
518 | searchBtn_selector='search-submit-machinestable', | ||
519 | table_selector='machinestable' | ||
520 | ) | ||
521 | # check "Select machine" button works | ||
522 | rows = self.find_all('#machinestable tbody tr') | ||
523 | machine_to_select = rows[0] | ||
524 | select_btn = machine_to_select.find_element( | ||
525 | By.XPATH, | ||
526 | '//td[@class="add-del-layers"]//a[1]' | ||
527 | ) | ||
528 | select_btn.send_keys(Keys.RETURN) | ||
529 | self.wait_until_visible('#config-nav') | ||
530 | project_machine_name = self.find('#project-machine-name') | ||
531 | self.assertTrue( | ||
532 | 'qemux86-64' in project_machine_name.text | ||
533 | ) | ||
534 | # check "Add layer" button works | ||
535 | self._navigate_to_config_nav('machinestable', 5) | ||
536 | # Search for a machine whit layer not in project | ||
537 | self._mixin_test_table_search_input( | ||
538 | input_selector='search-input-machinestable', | ||
539 | input_text='qemux86-64-tpm2', | ||
540 | searchBtn_selector='search-submit-machinestable', | ||
541 | table_selector='machinestable' | ||
542 | ) | ||
543 | self.wait_until_visible('#machinestable tbody tr', poll=3) | ||
544 | rows = self.find_all('#machinestable tbody tr') | ||
545 | machine_to_add = rows[0] | ||
546 | add_btn = machine_to_add.find_element(By.XPATH, '//td[@class="add-del-layers"]') | ||
547 | add_btn.click() | ||
548 | self.wait_until_visible('#change-notification') | ||
549 | change_notification = self.find('#change-notification') | ||
550 | self.assertTrue( | ||
551 | f'You have added 1 layer to your project' in str(change_notification.text) | ||
552 | ) | ||
553 | # check Machine table feature(show/hide column, pagination) | ||
554 | self._navigate_to_config_nav('machinestable', 5) | ||
555 | column_list = [ | ||
556 | 'description', | ||
557 | 'layer_version__get_vcs_reference', | ||
558 | 'layer_version__layer__name', | ||
559 | 'machinefile', | ||
560 | ] | ||
561 | self._mixin_test_table_edit_column( | ||
562 | 'machinestable', | ||
563 | 'edit-columns-button', | ||
564 | [f'checkbox-{column}' for column in column_list] | ||
565 | ) | ||
566 | self._navigate_to_config_nav('machinestable', 5) | ||
567 | # check show rows(pagination) | ||
568 | self._mixin_test_table_show_rows( | ||
569 | table_selector='machinestable', | ||
570 | to_skip=[150], | ||
571 | ) | ||
572 | |||
573 | def test_layers_page(self): | ||
574 | """ Test layers page | ||
575 | - Check if title "Compatible layerss" is displayed | ||
576 | - Check search input | ||
577 | - Check "Add layer" button works | ||
578 | - Check "Remove layer" button works | ||
579 | - Check layers table feature(show/hide column, pagination) | ||
580 | """ | ||
581 | self._navigate_to_config_nav('layerstable', 6) | ||
582 | # check title "Compatible layers" is displayed | ||
583 | self.assertTrue("Compatible layers" in self.get_page_source()) | ||
584 | # Test search input | ||
585 | input_text='meta-tanowrt' | ||
586 | self._mixin_test_table_search_input( | ||
587 | input_selector='search-input-layerstable', | ||
588 | input_text=input_text, | ||
589 | searchBtn_selector='search-submit-layerstable', | ||
590 | table_selector='layerstable' | ||
591 | ) | ||
592 | # check "Add layer" button works | ||
593 | self.wait_until_visible('#layerstable tbody tr', poll=3) | ||
594 | rows = self.find_all('#layerstable tbody tr') | ||
595 | layer_to_add = rows[0] | ||
596 | add_btn = layer_to_add.find_element( | ||
597 | By.XPATH, | ||
598 | '//td[@class="add-del-layers"]' | ||
599 | ) | ||
600 | add_btn.click() | ||
601 | # check modal is displayed | ||
602 | self.wait_until_visible('#dependencies-modal', poll=3) | ||
603 | list_dependencies = self.find_all('#dependencies-list li') | ||
604 | # click on add-layers button | ||
605 | add_layers_btn = self.driver.find_element( | ||
606 | By.XPATH, | ||
607 | '//form[@id="dependencies-modal-form"]//button[@class="btn btn-primary"]' | ||
608 | ) | ||
609 | add_layers_btn.click() | ||
610 | self.wait_until_visible('#change-notification') | ||
611 | change_notification = self.find('#change-notification') | ||
612 | self.assertTrue( | ||
613 | f'You have added {len(list_dependencies)+1} layers to your project: {input_text} and its dependencies' in str(change_notification.text) | ||
614 | ) | ||
615 | # check "Remove layer" button works | ||
616 | self.wait_until_visible('#layerstable tbody tr', poll=3) | ||
617 | rows = self.find_all('#layerstable tbody tr') | ||
618 | layer_to_remove = rows[0] | ||
619 | remove_btn = layer_to_remove.find_element( | ||
620 | By.XPATH, | ||
621 | '//td[@class="add-del-layers"]' | ||
622 | ) | ||
623 | remove_btn.click() | ||
624 | self.wait_until_visible('#change-notification', poll=2) | ||
625 | change_notification = self.find('#change-notification') | ||
626 | self.assertTrue( | ||
627 | f'You have removed 1 layer from your project: {input_text}' in str(change_notification.text) | ||
628 | ) | ||
629 | # check layers table feature(show/hide column, pagination) | ||
630 | self._navigate_to_config_nav('layerstable', 6) | ||
631 | column_list = [ | ||
632 | 'dependencies', | ||
633 | 'revision', | ||
634 | 'layer__vcs_url', | ||
635 | 'git_subdir', | ||
636 | 'layer__summary', | ||
637 | ] | ||
638 | self._mixin_test_table_edit_column( | ||
639 | 'layerstable', | ||
640 | 'edit-columns-button', | ||
641 | [f'checkbox-{column}' for column in column_list] | ||
642 | ) | ||
643 | self._navigate_to_config_nav('layerstable', 6) | ||
644 | # check show rows(pagination) | ||
645 | self._mixin_test_table_show_rows( | ||
646 | table_selector='layerstable', | ||
647 | to_skip=[150], | ||
648 | ) | ||
649 | |||
650 | def test_distro_page(self): | ||
651 | """ Test distros page | ||
652 | - Check if title "Compatible distros" is displayed | ||
653 | - Check search input | ||
654 | - Check "Add layer" button works | ||
655 | - Check distro table feature(show/hide column, pagination) | ||
656 | """ | ||
657 | self._navigate_to_config_nav('distrostable', 7) | ||
658 | # check title "Compatible distros" is displayed | ||
659 | self.assertTrue("Compatible Distros" in self.get_page_source()) | ||
660 | # Test search input | ||
661 | input_text='poky-altcfg' | ||
662 | self._mixin_test_table_search_input( | ||
663 | input_selector='search-input-distrostable', | ||
664 | input_text=input_text, | ||
665 | searchBtn_selector='search-submit-distrostable', | ||
666 | table_selector='distrostable' | ||
667 | ) | ||
668 | # check "Add distro" button works | ||
669 | rows = self.find_all('#distrostable tbody tr') | ||
670 | distro_to_add = rows[0] | ||
671 | add_btn = distro_to_add.find_element( | ||
672 | By.XPATH, | ||
673 | '//td[@class="add-del-layers"]//a[1]' | ||
674 | ) | ||
675 | add_btn.click() | ||
676 | self.wait_until_visible('#change-notification', poll=2) | ||
677 | change_notification = self.find('#change-notification') | ||
678 | self.assertTrue( | ||
679 | f'You have changed the distro to: {input_text}' in str(change_notification.text) | ||
680 | ) | ||
681 | # check distro table feature(show/hide column, pagination) | ||
682 | self._navigate_to_config_nav('distrostable', 7) | ||
683 | column_list = [ | ||
684 | 'description', | ||
685 | 'templatefile', | ||
686 | 'layer_version__get_vcs_reference', | ||
687 | 'layer_version__layer__name', | ||
688 | ] | ||
689 | self._mixin_test_table_edit_column( | ||
690 | 'distrostable', | ||
691 | 'edit-columns-button', | ||
692 | [f'checkbox-{column}' for column in column_list] | ||
693 | ) | ||
694 | self._navigate_to_config_nav('distrostable', 7) | ||
695 | # check show rows(pagination) | ||
696 | self._mixin_test_table_show_rows( | ||
697 | table_selector='distrostable', | ||
698 | to_skip=[150], | ||
699 | ) | ||
700 | |||
701 | def test_single_layer_page(self): | ||
702 | """ Test layer page | ||
703 | - Check if title is displayed | ||
704 | - Check add/remove layer button works | ||
705 | - Check tabs(layers, recipes, machines) are displayed | ||
706 | - Check left section is displayed | ||
707 | - Check layer name | ||
708 | - Check layer summary | ||
709 | - Check layer description | ||
710 | """ | ||
711 | url = reverse("layerdetails", args=(TestProjectPage.project_id, 8)) | ||
712 | self.get(url) | ||
713 | self.wait_until_visible('.page-header') | ||
714 | # check title is displayed | ||
715 | self.assertTrue(self.find('.page-header h1').is_displayed()) | ||
716 | |||
717 | # check add layer button works | ||
718 | remove_layer_btn = self.find('#add-remove-layer-btn') | ||
719 | remove_layer_btn.click() | ||
720 | self.wait_until_visible('#change-notification', poll=2) | ||
721 | change_notification = self.find('#change-notification') | ||
722 | self.assertTrue( | ||
723 | f'You have removed 1 layer from your project' in str(change_notification.text) | ||
724 | ) | ||
725 | # check add layer button works, 18 is the random layer id | ||
726 | add_layer_btn = self.find('#add-remove-layer-btn') | ||
727 | add_layer_btn.click() | ||
728 | self.wait_until_visible('#change-notification') | ||
729 | change_notification = self.find('#change-notification') | ||
730 | self.assertTrue( | ||
731 | f'You have added 1 layer to your project' in str(change_notification.text) | ||
732 | ) | ||
733 | # check tabs(layers, recipes, machines) are displayed | ||
734 | tabs = self.find_all('.nav-tabs li') | ||
735 | self.assertEqual(len(tabs), 3) | ||
736 | # Check first tab | ||
737 | tabs[0].click() | ||
738 | self.assertTrue( | ||
739 | 'active' in str(self.find('#information').get_attribute('class')) | ||
740 | ) | ||
741 | # Check second tab | ||
742 | tabs[1].click() | ||
743 | self.assertTrue( | ||
744 | 'active' in str(self.find('#recipes').get_attribute('class')) | ||
745 | ) | ||
746 | # Check third tab | ||
747 | tabs[2].click() | ||
748 | self.assertTrue( | ||
749 | 'active' in str(self.find('#machines').get_attribute('class')) | ||
750 | ) | ||
751 | # Check left section is displayed | ||
752 | section = self.find('.well') | ||
753 | # Check layer name | ||
754 | self.assertTrue( | ||
755 | section.find_element(By.XPATH, '//h2[1]').is_displayed() | ||
756 | ) | ||
757 | # Check layer summary | ||
758 | self.assertTrue("Summary" in section.text) | ||
759 | # Check layer description | ||
760 | self.assertTrue("Description" in section.text) | ||
761 | |||
762 | def test_single_recipe_page(self): | ||
763 | """ Test recipe page | ||
764 | - Check if title is displayed | ||
765 | - Check add recipe layer displayed | ||
766 | - Check left section is displayed | ||
767 | - Check recipe: name, summary, description, Version, Section, | ||
768 | License, Approx. packages included, Approx. size, Recipe file | ||
769 | """ | ||
770 | url = reverse("recipedetails", args=(TestProjectPage.project_id, 53428)) | ||
771 | self.get(url) | ||
772 | self.wait_until_visible('.page-header') | ||
773 | # check title is displayed | ||
774 | self.assertTrue(self.find('.page-header h1').is_displayed()) | ||
775 | # check add recipe layer displayed | ||
776 | add_recipe_layer_btn = self.find('#add-layer-btn') | ||
777 | self.assertTrue(add_recipe_layer_btn.is_displayed()) | ||
778 | # check left section is displayed | ||
779 | section = self.find('.well') | ||
780 | # Check recipe name | ||
781 | self.assertTrue( | ||
782 | section.find_element(By.XPATH, '//h2[1]').is_displayed() | ||
783 | ) | ||
784 | # Check recipe sections details info are displayed | ||
785 | self.assertTrue("Summary" in section.text) | ||
786 | self.assertTrue("Description" in section.text) | ||
787 | self.assertTrue("Version" in section.text) | ||
788 | self.assertTrue("Section" in section.text) | ||
789 | self.assertTrue("License" in section.text) | ||
790 | self.assertTrue("Approx. packages included" in section.text) | ||
791 | self.assertTrue("Approx. package size" in section.text) | ||
792 | self.assertTrue("Recipe file" in section.text) | ||
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py new file mode 100644 index 0000000000..eb905ddf3f --- /dev/null +++ b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py | |||
@@ -0,0 +1,528 @@ | |||
1 | #! /usr/bin/env python3 # | ||
2 | # BitBake Toaster UI tests implementation | ||
3 | # | ||
4 | # Copyright (C) 2023 Savoir-faire Linux | ||
5 | # | ||
6 | # SPDX-License-Identifier: GPL-2.0-only | ||
7 | # | ||
8 | |||
9 | import string | ||
10 | import random | ||
11 | import pytest | ||
12 | from django.urls import reverse | ||
13 | from selenium.webdriver import Keys | ||
14 | from selenium.webdriver.support.select import Select | ||
15 | from selenium.common.exceptions import ElementClickInterceptedException, NoSuchElementException, TimeoutException | ||
16 | from orm.models import Project | ||
17 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | ||
18 | from selenium.webdriver.common.by import By | ||
19 | |||
20 | from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled | ||
21 | |||
22 | |||
23 | @pytest.mark.django_db | ||
24 | @pytest.mark.order("last") | ||
25 | class TestProjectConfigTab(SeleniumFunctionalTestCase): | ||
26 | PROJECT_NAME = 'TestProjectConfigTab' | ||
27 | project_id = None | ||
28 | |||
29 | def _create_project(self, project_name, **kwargs): | ||
30 | """ Create/Test new project using: | ||
31 | - Project Name: Any string | ||
32 | - Release: Any string | ||
33 | - Merge Toaster settings: True or False | ||
34 | """ | ||
35 | release = kwargs.get('release', '3') | ||
36 | self.get(reverse('newproject')) | ||
37 | self.wait_until_visible('#new-project-name') | ||
38 | self.find("#new-project-name").send_keys(project_name) | ||
39 | select = Select(self.find("#projectversion")) | ||
40 | select.select_by_value(release) | ||
41 | |||
42 | # check merge toaster settings | ||
43 | checkbox = self.find('.checkbox-mergeattr') | ||
44 | if not checkbox.is_selected(): | ||
45 | checkbox.click() | ||
46 | |||
47 | if self.PROJECT_NAME != 'TestProjectConfigTab': | ||
48 | # Reset project name if it's not the default one | ||
49 | self.PROJECT_NAME = 'TestProjectConfigTab' | ||
50 | |||
51 | self.find("#create-project-button").click() | ||
52 | |||
53 | try: | ||
54 | self.wait_until_visible('#hint-error-project-name', poll=3) | ||
55 | url = reverse('project', args=(TestProjectConfigTab.project_id, )) | ||
56 | self.get(url) | ||
57 | self.wait_until_visible('#config-nav', poll=3) | ||
58 | except TimeoutException: | ||
59 | self.wait_until_visible('#config-nav', poll=3) | ||
60 | |||
61 | def _random_string(self, length): | ||
62 | return ''.join( | ||
63 | random.choice(string.ascii_letters) for _ in range(length) | ||
64 | ) | ||
65 | |||
66 | def _navigate_to_project_page(self): | ||
67 | # Navigate to project page | ||
68 | if TestProjectConfigTab.project_id is None: | ||
69 | self._create_project(project_name=self._random_string(10)) | ||
70 | current_url = self.driver.current_url | ||
71 | TestProjectConfigTab.project_id = get_projectId_from_url( | ||
72 | current_url) | ||
73 | else: | ||
74 | url = reverse('project', args=(TestProjectConfigTab.project_id,)) | ||
75 | self.get(url) | ||
76 | self.wait_until_visible('#config-nav') | ||
77 | |||
78 | def _create_builds(self): | ||
79 | # check search box can be use to build recipes | ||
80 | search_box = self.find('#build-input') | ||
81 | search_box.send_keys('foo') | ||
82 | self.find('#build-button').click() | ||
83 | self.wait_until_present('#latest-builds') | ||
84 | # loop until reach the parsing state | ||
85 | wait_until_build(self, 'queued cloning starting parsing failed') | ||
86 | lastest_builds = self.driver.find_elements( | ||
87 | By.XPATH, | ||
88 | '//div[@id="latest-builds"]/div', | ||
89 | ) | ||
90 | last_build = lastest_builds[0] | ||
91 | self.assertTrue( | ||
92 | 'foo' in str(last_build.text) | ||
93 | ) | ||
94 | last_build = lastest_builds[0] | ||
95 | try: | ||
96 | cancel_button = last_build.find_element( | ||
97 | By.XPATH, | ||
98 | '//span[@class="cancel-build-btn pull-right alert-link"]', | ||
99 | ) | ||
100 | cancel_button.click() | ||
101 | except NoSuchElementException: | ||
102 | # Skip if the build is already cancelled | ||
103 | pass | ||
104 | wait_until_build_cancelled(self) | ||
105 | |||
106 | def _get_tabs(self): | ||
107 | # tabs links list | ||
108 | return self.driver.find_elements( | ||
109 | By.XPATH, | ||
110 | '//div[@id="project-topbar"]//li' | ||
111 | ) | ||
112 | |||
113 | def _get_config_nav_item(self, index): | ||
114 | config_nav = self.find('#config-nav') | ||
115 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] | ||
116 | |||
117 | def test_project_config_nav(self): | ||
118 | """ Test project config tab navigation: | ||
119 | - Check if the menu is displayed and contains the right elements: | ||
120 | - Configuration | ||
121 | - COMPATIBLE METADATA | ||
122 | - Custom images | ||
123 | - Image recipes | ||
124 | - Software recipes | ||
125 | - Machines | ||
126 | - Layers | ||
127 | - Distro | ||
128 | - EXTRA CONFIGURATION | ||
129 | - Bitbake variables | ||
130 | - Actions | ||
131 | - Delete project | ||
132 | """ | ||
133 | self._navigate_to_project_page() | ||
134 | |||
135 | def _get_config_nav_item(index): | ||
136 | config_nav = self.find('#config-nav') | ||
137 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] | ||
138 | |||
139 | def check_config_nav_item(index, item_name, url): | ||
140 | item = _get_config_nav_item(index) | ||
141 | self.assertTrue(item_name in item.text) | ||
142 | self.assertTrue(item.get_attribute('class') == 'active') | ||
143 | self.assertTrue(url in self.driver.current_url) | ||
144 | |||
145 | # check if the menu contains the right elements | ||
146 | # COMPATIBLE METADATA | ||
147 | compatible_metadata = _get_config_nav_item(1) | ||
148 | self.assertTrue( | ||
149 | "compatible metadata" in compatible_metadata.text.lower() | ||
150 | ) | ||
151 | # EXTRA CONFIGURATION | ||
152 | extra_configuration = _get_config_nav_item(8) | ||
153 | self.assertTrue( | ||
154 | "extra configuration" in extra_configuration.text.lower() | ||
155 | ) | ||
156 | # Actions | ||
157 | actions = _get_config_nav_item(10) | ||
158 | self.assertTrue("actions" in str(actions.text).lower()) | ||
159 | |||
160 | conf_nav_list = [ | ||
161 | # config | ||
162 | [0, 'Configuration', | ||
163 | f"/toastergui/project/{TestProjectConfigTab.project_id}"], | ||
164 | # custom images | ||
165 | [2, 'Custom images', | ||
166 | f"/toastergui/project/{TestProjectConfigTab.project_id}/customimages"], | ||
167 | # image recipes | ||
168 | [3, 'Image recipes', | ||
169 | f"/toastergui/project/{TestProjectConfigTab.project_id}/images"], | ||
170 | # software recipes | ||
171 | [4, 'Software recipes', | ||
172 | f"/toastergui/project/{TestProjectConfigTab.project_id}/softwarerecipes"], | ||
173 | # machines | ||
174 | [5, 'Machines', | ||
175 | f"/toastergui/project/{TestProjectConfigTab.project_id}/machines"], | ||
176 | # layers | ||
177 | [6, 'Layers', | ||
178 | f"/toastergui/project/{TestProjectConfigTab.project_id}/layers"], | ||
179 | # distro | ||
180 | [7, 'Distros', | ||
181 | f"/toastergui/project/{TestProjectConfigTab.project_id}/distros"], | ||
182 | # [9, 'BitBake variables', f"/toastergui/project/{TestProjectConfigTab.project_id}/configuration"], # bitbake variables | ||
183 | ] | ||
184 | for index, item_name, url in conf_nav_list: | ||
185 | item = _get_config_nav_item(index) | ||
186 | if item.get_attribute('class') != 'active': | ||
187 | item.click() | ||
188 | check_config_nav_item(index, item_name, url) | ||
189 | |||
190 | def test_image_recipe_editColumn(self): | ||
191 | """ Test the edit column feature in image recipe table on project page """ | ||
192 | def test_edit_column(check_box_id): | ||
193 | # Check that we can hide/show table column | ||
194 | check_box = self.find(f'#{check_box_id}') | ||
195 | th_class = str(check_box_id).replace('checkbox-', '') | ||
196 | if check_box.is_selected(): | ||
197 | # check if column is visible in table | ||
198 | self.assertTrue( | ||
199 | self.find( | ||
200 | f'#imagerecipestable thead th.{th_class}' | ||
201 | ).is_displayed(), | ||
202 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
203 | ) | ||
204 | check_box.click() | ||
205 | # check if column is hidden in table | ||
206 | self.assertFalse( | ||
207 | self.find( | ||
208 | f'#imagerecipestable thead th.{th_class}' | ||
209 | ).is_displayed(), | ||
210 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
211 | ) | ||
212 | else: | ||
213 | # check if column is hidden in table | ||
214 | self.assertFalse( | ||
215 | self.find( | ||
216 | f'#imagerecipestable thead th.{th_class}' | ||
217 | ).is_displayed(), | ||
218 | f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table" | ||
219 | ) | ||
220 | check_box.click() | ||
221 | # check if column is visible in table | ||
222 | self.assertTrue( | ||
223 | self.find( | ||
224 | f'#imagerecipestable thead th.{th_class}' | ||
225 | ).is_displayed(), | ||
226 | f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table" | ||
227 | ) | ||
228 | |||
229 | self._navigate_to_project_page() | ||
230 | # navigate to project image recipe page | ||
231 | recipe_image_page_link = self._get_config_nav_item(3) | ||
232 | recipe_image_page_link.click() | ||
233 | self.wait_until_present('#imagerecipestable tbody tr') | ||
234 | |||
235 | # Check edit column | ||
236 | edit_column = self.find('#edit-columns-button') | ||
237 | self.assertTrue(edit_column.is_displayed()) | ||
238 | edit_column.click() | ||
239 | # Check dropdown is visible | ||
240 | self.wait_until_visible('ul.dropdown-menu.editcol') | ||
241 | |||
242 | # Check that we can hide the edit column | ||
243 | test_edit_column('checkbox-get_description_or_summary') | ||
244 | test_edit_column('checkbox-layer_version__get_vcs_reference') | ||
245 | test_edit_column('checkbox-layer_version__layer__name') | ||
246 | test_edit_column('checkbox-license') | ||
247 | test_edit_column('checkbox-recipe-file') | ||
248 | test_edit_column('checkbox-section') | ||
249 | test_edit_column('checkbox-version') | ||
250 | |||
251 | def test_image_recipe_show_rows(self): | ||
252 | """ Test the show rows feature in image recipe table on project page """ | ||
253 | def test_show_rows(row_to_show, show_row_link): | ||
254 | # Check that we can show rows == row_to_show | ||
255 | show_row_link.select_by_value(str(row_to_show)) | ||
256 | self.wait_until_visible('#imagerecipestable tbody tr', poll=3) | ||
257 | # check at least some rows are visible | ||
258 | self.assertTrue( | ||
259 | len(self.find_all('#imagerecipestable tbody tr')) > 0 | ||
260 | ) | ||
261 | |||
262 | self._navigate_to_project_page() | ||
263 | # navigate to project image recipe page | ||
264 | recipe_image_page_link = self._get_config_nav_item(3) | ||
265 | recipe_image_page_link.click() | ||
266 | self.wait_until_present('#imagerecipestable tbody tr') | ||
267 | |||
268 | show_rows = self.driver.find_elements( | ||
269 | By.XPATH, | ||
270 | '//select[@class="form-control pagesize-imagerecipestable"]' | ||
271 | ) | ||
272 | # Check show rows | ||
273 | for show_row_link in show_rows: | ||
274 | show_row_link = Select(show_row_link) | ||
275 | test_show_rows(10, show_row_link) | ||
276 | test_show_rows(25, show_row_link) | ||
277 | test_show_rows(50, show_row_link) | ||
278 | test_show_rows(100, show_row_link) | ||
279 | test_show_rows(150, show_row_link) | ||
280 | |||
281 | def test_project_config_tab_right_section(self): | ||
282 | """ Test project config tab right section contains five blocks: | ||
283 | - Machine: | ||
284 | - check 'Machine' is displayed | ||
285 | - check can change Machine | ||
286 | - Distro: | ||
287 | - check 'Distro' is displayed | ||
288 | - check can change Distro | ||
289 | - Most built recipes: | ||
290 | - check 'Most built recipes' is displayed | ||
291 | - check can select a recipe and build it | ||
292 | - Project release: | ||
293 | - check 'Project release' is displayed | ||
294 | - check project has right release displayed | ||
295 | - Layers: | ||
296 | - check can add a layer if exists | ||
297 | - check at least three layers are displayed | ||
298 | - openembedded-core | ||
299 | - meta-poky | ||
300 | - meta-yocto-bsp | ||
301 | """ | ||
302 | # Create a new project for this test | ||
303 | project_name = self._random_string(10) | ||
304 | self._create_project(project_name=project_name) | ||
305 | # check if the menu is displayed | ||
306 | self.wait_until_visible('#project-page') | ||
307 | block_l = self.driver.find_element( | ||
308 | By.XPATH, '//*[@id="project-page"]/div[2]') | ||
309 | project_release = self.driver.find_element( | ||
310 | By.XPATH, '//*[@id="project-page"]/div[1]/div[4]') | ||
311 | layers = block_l.find_element(By.ID, 'layer-container') | ||
312 | |||
313 | def check_machine_distro(self, item_name, new_item_name, block_id): | ||
314 | block = self.find(f'#{block_id}') | ||
315 | title = block.find_element(By.TAG_NAME, 'h3') | ||
316 | self.assertTrue(item_name.capitalize() in title.text) | ||
317 | edit_btn = self.find(f'#change-{item_name}-toggle') | ||
318 | edit_btn.click() | ||
319 | self.wait_until_visible(f'#{item_name}-change-input') | ||
320 | name_input = self.find(f'#{item_name}-change-input') | ||
321 | name_input.clear() | ||
322 | name_input.send_keys(new_item_name) | ||
323 | change_btn = self.find(f'#{item_name}-change-btn') | ||
324 | change_btn.click() | ||
325 | self.wait_until_visible(f'#project-{item_name}-name') | ||
326 | project_name = self.find(f'#project-{item_name}-name') | ||
327 | self.assertTrue(new_item_name in project_name.text) | ||
328 | # check change notificaiton is displayed | ||
329 | change_notification = self.find('#change-notification') | ||
330 | self.assertTrue( | ||
331 | f'You have changed the {item_name} to: {new_item_name}' in change_notification.text | ||
332 | ) | ||
333 | |||
334 | # Machine | ||
335 | check_machine_distro(self, 'machine', 'qemux86-64', 'machine-section') | ||
336 | # Distro | ||
337 | check_machine_distro(self, 'distro', 'poky-altcfg', 'distro-section') | ||
338 | |||
339 | # Project release | ||
340 | title = project_release.find_element(By.TAG_NAME, 'h3') | ||
341 | self.assertTrue("Project release" in title.text) | ||
342 | self.assertTrue( | ||
343 | "Yocto Project master" in self.find('#project-release-title').text | ||
344 | ) | ||
345 | # Layers | ||
346 | title = layers.find_element(By.TAG_NAME, 'h3') | ||
347 | self.assertTrue("Layers" in title.text) | ||
348 | # check at least three layers are displayed | ||
349 | # openembedded-core | ||
350 | # meta-poky | ||
351 | # meta-yocto-bsp | ||
352 | layers_list = layers.find_element(By.ID, 'layers-in-project-list') | ||
353 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | ||
354 | # remove all layers except the first three layers | ||
355 | for i in range(3, len(layers_list_items)): | ||
356 | layers_list_items[i].find_element(By.TAG_NAME, 'span').click() | ||
357 | # check can add a layer if exists | ||
358 | add_layer_input = layers.find_element(By.ID, 'layer-add-input') | ||
359 | add_layer_input.send_keys('meta-oe') | ||
360 | self.wait_until_visible('#layer-container > form > div > span > div') | ||
361 | dropdown_item = self.driver.find_element( | ||
362 | By.XPATH, | ||
363 | '//*[@id="layer-container"]/form/div/span/div' | ||
364 | ) | ||
365 | try: | ||
366 | dropdown_item.click() | ||
367 | except ElementClickInterceptedException: | ||
368 | self.skipTest( | ||
369 | "layer-container dropdown item click intercepted. Element not properly visible.") | ||
370 | add_layer_btn = layers.find_element(By.ID, 'add-layer-btn') | ||
371 | add_layer_btn.click() | ||
372 | self.wait_until_visible('#layers-in-project-list') | ||
373 | # check layer is added | ||
374 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | ||
375 | self.assertTrue(len(layers_list_items) == 4) | ||
376 | |||
377 | def test_most_build_recipes(self): | ||
378 | """ Test most build recipes block contains""" | ||
379 | def rebuild_from_most_build_recipes(recipe_list_items): | ||
380 | checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input') | ||
381 | checkbox.click() | ||
382 | build_btn = self.find('#freq-build-btn') | ||
383 | build_btn.click() | ||
384 | self.wait_until_visible('#latest-builds') | ||
385 | wait_until_build(self, 'queued cloning starting parsing failed') | ||
386 | lastest_builds = self.driver.find_elements( | ||
387 | By.XPATH, | ||
388 | '//div[@id="latest-builds"]/div' | ||
389 | ) | ||
390 | self.assertTrue(len(lastest_builds) >= 2) | ||
391 | last_build = lastest_builds[0] | ||
392 | try: | ||
393 | cancel_button = last_build.find_element( | ||
394 | By.XPATH, | ||
395 | '//span[@class="cancel-build-btn pull-right alert-link"]', | ||
396 | ) | ||
397 | cancel_button.click() | ||
398 | except NoSuchElementException: | ||
399 | # Skip if the build is already cancelled | ||
400 | pass | ||
401 | wait_until_build_cancelled(self) | ||
402 | # Create a new project for remaining asserts | ||
403 | project_name = self._random_string(10) | ||
404 | self._create_project(project_name=project_name, release='2') | ||
405 | current_url = self.driver.current_url | ||
406 | TestProjectConfigTab.project_id = get_projectId_from_url(current_url) | ||
407 | url = current_url.split('?')[0] | ||
408 | |||
409 | # Create a new builds | ||
410 | self._create_builds() | ||
411 | |||
412 | # back to project page | ||
413 | self.driver.get(url) | ||
414 | |||
415 | self.wait_until_visible('#project-page', poll=3) | ||
416 | |||
417 | # Most built recipes | ||
418 | most_built_recipes = self.driver.find_element( | ||
419 | By.XPATH, '//*[@id="project-page"]/div[1]/div[3]') | ||
420 | title = most_built_recipes.find_element(By.TAG_NAME, 'h3') | ||
421 | self.assertTrue("Most built recipes" in title.text) | ||
422 | # check can select a recipe and build it | ||
423 | self.wait_until_visible('#freq-build-list', poll=3) | ||
424 | recipe_list = self.find('#freq-build-list') | ||
425 | recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li') | ||
426 | self.assertTrue( | ||
427 | len(recipe_list_items) > 0, | ||
428 | msg="Any recipes found in the most built recipes list", | ||
429 | ) | ||
430 | rebuild_from_most_build_recipes(recipe_list_items) | ||
431 | TestProjectConfigTab.project_id = None # reset project id | ||
432 | |||
433 | def test_project_page_tab_importlayer(self): | ||
434 | """ Test project page tab import layer """ | ||
435 | self._navigate_to_project_page() | ||
436 | # navigate to "Import layers" tab | ||
437 | import_layers_tab = self._get_tabs()[2] | ||
438 | import_layers_tab.find_element(By.TAG_NAME, 'a').click() | ||
439 | self.wait_until_visible('#layer-git-repo-url') | ||
440 | |||
441 | # Check git repo radio button | ||
442 | git_repo_radio = self.find('#git-repo-radio') | ||
443 | git_repo_radio.click() | ||
444 | |||
445 | # Set git repo url | ||
446 | input_repo_url = self.find('#layer-git-repo-url') | ||
447 | input_repo_url.send_keys('git://git.yoctoproject.org/meta-fake') | ||
448 | # Blur the input to trigger the validation | ||
449 | input_repo_url.send_keys(Keys.TAB) | ||
450 | |||
451 | # Check name is set | ||
452 | input_layer_name = self.find('#import-layer-name') | ||
453 | self.assertTrue(input_layer_name.get_attribute('value') == 'meta-fake') | ||
454 | |||
455 | # Set branch | ||
456 | input_branch = self.find('#layer-git-ref') | ||
457 | input_branch.send_keys('master') | ||
458 | |||
459 | # Import layer | ||
460 | self.find('#import-and-add-btn').click() | ||
461 | |||
462 | # Check layer is added | ||
463 | self.wait_until_visible('#layer-container') | ||
464 | block_l = self.driver.find_element( | ||
465 | By.XPATH, '//*[@id="project-page"]/div[2]') | ||
466 | layers = block_l.find_element(By.ID, 'layer-container') | ||
467 | layers_list = layers.find_element(By.ID, 'layers-in-project-list') | ||
468 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | ||
469 | self.assertTrue( | ||
470 | 'meta-fake' in str(layers_list_items[-1].text) | ||
471 | ) | ||
472 | |||
473 | def test_project_page_custom_image_no_image(self): | ||
474 | """ Test project page tab "New custom image" when no custom image """ | ||
475 | project_name = self._random_string(10) | ||
476 | self._create_project(project_name=project_name) | ||
477 | current_url = self.driver.current_url | ||
478 | TestProjectConfigTab.project_id = get_projectId_from_url(current_url) | ||
479 | # navigate to "Custom image" tab | ||
480 | custom_image_section = self._get_config_nav_item(2) | ||
481 | custom_image_section.click() | ||
482 | self.wait_until_visible('#empty-state-customimagestable') | ||
483 | |||
484 | # Check message when no custom image | ||
485 | self.assertTrue( | ||
486 | "You have not created any custom images yet." in str( | ||
487 | self.find('#empty-state-customimagestable').text | ||
488 | ) | ||
489 | ) | ||
490 | div_empty_msg = self.find('#empty-state-customimagestable') | ||
491 | link_create_custom_image = div_empty_msg.find_element( | ||
492 | By.TAG_NAME, 'a') | ||
493 | self.assertTrue(TestProjectConfigTab.project_id is not None) | ||
494 | self.assertTrue( | ||
495 | f"/toastergui/project/{TestProjectConfigTab.project_id}/newcustomimage" in str( | ||
496 | link_create_custom_image.get_attribute('href') | ||
497 | ) | ||
498 | ) | ||
499 | self.assertTrue( | ||
500 | "Create your first custom image" in str( | ||
501 | link_create_custom_image.text | ||
502 | ) | ||
503 | ) | ||
504 | TestProjectConfigTab.project_id = None # reset project id | ||
505 | |||
506 | def test_project_page_image_recipe(self): | ||
507 | """ Test project page section images | ||
508 | - Check image recipes are displayed | ||
509 | - Check search input | ||
510 | - Check image recipe build button works | ||
511 | - Check image recipe table features(show/hide column, pagination) | ||
512 | """ | ||
513 | self._navigate_to_project_page() | ||
514 | # navigate to "Images section" | ||
515 | images_section = self._get_config_nav_item(3) | ||
516 | images_section.click() | ||
517 | self.wait_until_visible('#imagerecipestable') | ||
518 | rows = self.find_all('#imagerecipestable tbody tr') | ||
519 | self.assertTrue(len(rows) > 0) | ||
520 | |||
521 | # Test search input | ||
522 | self.wait_until_visible('#search-input-imagerecipestable') | ||
523 | recipe_input = self.find('#search-input-imagerecipestable') | ||
524 | recipe_input.send_keys('core-image-minimal') | ||
525 | self.find('#search-submit-imagerecipestable').click() | ||
526 | self.wait_until_visible('#imagerecipestable tbody tr') | ||
527 | rows = self.find_all('#imagerecipestable tbody tr') | ||
528 | self.assertTrue(len(rows) > 0) | ||
diff --git a/bitbake/lib/toaster/tests/functional/utils.py b/bitbake/lib/toaster/tests/functional/utils.py new file mode 100644 index 0000000000..7269fa1805 --- /dev/null +++ b/bitbake/lib/toaster/tests/functional/utils.py | |||
@@ -0,0 +1,89 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # -*- coding: utf-8 -*- | ||
3 | # BitBake Toaster UI tests implementation | ||
4 | # | ||
5 | # Copyright (C) 2023 Savoir-faire Linux | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | |||
9 | |||
10 | from time import sleep | ||
11 | from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException | ||
12 | from selenium.webdriver.common.by import By | ||
13 | |||
14 | from orm.models import Build | ||
15 | |||
16 | |||
17 | def wait_until_build(test_instance, state): | ||
18 | timeout = 60 | ||
19 | start_time = 0 | ||
20 | build_state = '' | ||
21 | while True: | ||
22 | try: | ||
23 | if start_time > timeout: | ||
24 | raise TimeoutException( | ||
25 | f'Build did not reach {state} state within {timeout} seconds' | ||
26 | ) | ||
27 | last_build_state = test_instance.driver.find_element( | ||
28 | By.XPATH, | ||
29 | '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]', | ||
30 | ) | ||
31 | build_state = last_build_state.get_attribute( | ||
32 | 'data-build-state') | ||
33 | state_text = state.lower().split() | ||
34 | if any(x in str(build_state).lower() for x in state_text): | ||
35 | return str(build_state).lower() | ||
36 | if 'failed' in str(build_state).lower(): | ||
37 | break | ||
38 | except NoSuchElementException: | ||
39 | continue | ||
40 | except TimeoutException: | ||
41 | break | ||
42 | start_time += 1 | ||
43 | sleep(1) # take a breath and try again | ||
44 | |||
45 | def wait_until_build_cancelled(test_instance): | ||
46 | """ Cancel build take a while sometime, the method is to wait driver action | ||
47 | until build being cancelled | ||
48 | """ | ||
49 | timeout = 30 | ||
50 | start_time = 0 | ||
51 | build = None | ||
52 | while True: | ||
53 | try: | ||
54 | if start_time > timeout: | ||
55 | raise TimeoutException( | ||
56 | f'Build did not reach cancelled state within {timeout} seconds' | ||
57 | ) | ||
58 | last_build_state = test_instance.driver.find_element( | ||
59 | By.XPATH, | ||
60 | '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]', | ||
61 | ) | ||
62 | build_state = last_build_state.get_attribute( | ||
63 | 'data-build-state') | ||
64 | if 'failed' in str(build_state).lower(): | ||
65 | break | ||
66 | if 'cancelling' in str(build_state).lower(): | ||
67 | # Change build state to cancelled | ||
68 | if not build: # get build object only once | ||
69 | build = Build.objects.last() | ||
70 | build.outcome = Build.CANCELLED | ||
71 | build.save() | ||
72 | if 'cancelled' in str(build_state).lower(): | ||
73 | break | ||
74 | except NoSuchElementException: | ||
75 | continue | ||
76 | except StaleElementReferenceException: | ||
77 | continue | ||
78 | except TimeoutException: | ||
79 | break | ||
80 | start_time += 1 | ||
81 | sleep(1) # take a breath and try again | ||
82 | |||
83 | def get_projectId_from_url(url): | ||
84 | # url = 'http://domainename.com/toastergui/project/1656/whatever | ||
85 | # or url = 'http://domainename.com/toastergui/project/1/ | ||
86 | # or url = 'http://domainename.com/toastergui/project/186 | ||
87 | assert '/toastergui/project/' in url, "URL is not valid" | ||
88 | url_to_list = url.split('/toastergui/project/') | ||
89 | return int(url_to_list[1].split('/')[0]) # project_id | ||
diff --git a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt index 4f9fcc46d2..71cc083436 100644 --- a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt +++ b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt | |||
@@ -1 +1,7 @@ | |||
1 | selenium==2.49.2 | 1 | selenium>=4.13.0 |
2 | pytest==7.4.2 | ||
3 | pytest-django==4.5.2 | ||
4 | pytest-env==1.1.0 | ||
5 | pytest-html==4.0.2 | ||
6 | pytest-metadata==3.0.0 | ||
7 | pytest-order==1.1.0 | ||
diff --git a/bitbake/lib/toaster/tests/views/test_views.py b/bitbake/lib/toaster/tests/views/test_views.py index 735d596bcc..e1adfcf86a 100644 --- a/bitbake/lib/toaster/tests/views/test_views.py +++ b/bitbake/lib/toaster/tests/views/test_views.py | |||
@@ -9,6 +9,8 @@ | |||
9 | 9 | ||
10 | """Test cases for Toaster GUI and ReST.""" | 10 | """Test cases for Toaster GUI and ReST.""" |
11 | 11 | ||
12 | import os | ||
13 | import pytest | ||
12 | from django.test import TestCase | 14 | from django.test import TestCase |
13 | from django.test.client import RequestFactory | 15 | from django.test.client import RequestFactory |
14 | from django.urls import reverse | 16 | from django.urls import reverse |
@@ -19,6 +21,7 @@ from orm.models import Layer_Version, Recipe | |||
19 | from orm.models import CustomImageRecipe | 21 | from orm.models import CustomImageRecipe |
20 | from orm.models import CustomImagePackage | 22 | from orm.models import CustomImagePackage |
21 | 23 | ||
24 | from bldcontrol.models import BuildEnvironment | ||
22 | import inspect | 25 | import inspect |
23 | import toastergui | 26 | import toastergui |
24 | 27 | ||
@@ -32,19 +35,32 @@ PROJECT_NAME2 = "test project 2" | |||
32 | CLI_BUILDS_PROJECT_NAME = 'Command line builds' | 35 | CLI_BUILDS_PROJECT_NAME = 'Command line builds' |
33 | 36 | ||
34 | 37 | ||
38 | |||
35 | class ViewTests(TestCase): | 39 | class ViewTests(TestCase): |
36 | """Tests to verify view APIs.""" | 40 | """Tests to verify view APIs.""" |
37 | 41 | ||
38 | fixtures = ['toastergui-unittest-data'] | 42 | fixtures = ['toastergui-unittest-data'] |
43 | builldir = os.environ.get('BUILDDIR') | ||
39 | 44 | ||
40 | def setUp(self): | 45 | def setUp(self): |
41 | 46 | ||
42 | self.project = Project.objects.first() | 47 | self.project = Project.objects.first() |
48 | |||
43 | self.recipe1 = Recipe.objects.get(pk=2) | 49 | self.recipe1 = Recipe.objects.get(pk=2) |
50 | # create a file and to recipe1 file_path | ||
51 | file_path = f"{self.builldir}/{self.recipe1.name.strip().replace(' ', '-')}.bb" | ||
52 | with open(file_path, 'w') as f: | ||
53 | f.write('foo') | ||
54 | self.recipe1.file_path = file_path | ||
55 | self.recipe1.save() | ||
56 | |||
44 | self.customr = CustomImageRecipe.objects.first() | 57 | self.customr = CustomImageRecipe.objects.first() |
45 | self.cust_package = CustomImagePackage.objects.first() | 58 | self.cust_package = CustomImagePackage.objects.first() |
46 | self.package = Package.objects.first() | 59 | self.package = Package.objects.first() |
47 | self.lver = Layer_Version.objects.first() | 60 | self.lver = Layer_Version.objects.first() |
61 | if BuildEnvironment.objects.count() == 0: | ||
62 | BuildEnvironment.objects.create(betype=BuildEnvironment.TYPE_LOCAL) | ||
63 | |||
48 | 64 | ||
49 | def test_get_base_call_returns_html(self): | 65 | def test_get_base_call_returns_html(self): |
50 | """Basic test for all-projects view""" | 66 | """Basic test for all-projects view""" |
@@ -226,7 +242,7 @@ class ViewTests(TestCase): | |||
226 | recipe = CustomImageRecipe.objects.create( | 242 | recipe = CustomImageRecipe.objects.create( |
227 | name=name, project=self.project, | 243 | name=name, project=self.project, |
228 | base_recipe=self.recipe1, | 244 | base_recipe=self.recipe1, |
229 | file_path="/tmp/testing", | 245 | file_path=f"{self.builldir}/testing", |
230 | layer_version=self.customr.layer_version) | 246 | layer_version=self.customr.layer_version) |
231 | url = reverse('xhr_customrecipe_id', args=(recipe.id,)) | 247 | url = reverse('xhr_customrecipe_id', args=(recipe.id,)) |
232 | response = self.client.delete(url) | 248 | response = self.client.delete(url) |
@@ -297,7 +313,7 @@ class ViewTests(TestCase): | |||
297 | """Download the recipe file generated for the custom image""" | 313 | """Download the recipe file generated for the custom image""" |
298 | 314 | ||
299 | # Create a dummy recipe file for the custom image generation to read | 315 | # Create a dummy recipe file for the custom image generation to read |
300 | open("/tmp/a_recipe.bb", 'a').close() | 316 | open(f"{self.builldir}/a_recipe.bb", 'a').close() |
301 | response = self.client.get(reverse('customrecipedownload', | 317 | response = self.client.get(reverse('customrecipedownload', |
302 | args=(self.project.id, | 318 | args=(self.project.id, |
303 | self.customr.id))) | 319 | self.customr.id))) |
diff --git a/bitbake/lib/toaster/toastergui/api.py b/bitbake/lib/toaster/toastergui/api.py index b4cdc335ef..e367bd910e 100644 --- a/bitbake/lib/toaster/toastergui/api.py +++ b/bitbake/lib/toaster/toastergui/api.py | |||
@@ -11,7 +11,7 @@ import os | |||
11 | import re | 11 | import re |
12 | import logging | 12 | import logging |
13 | import json | 13 | import json |
14 | import subprocess | 14 | import glob |
15 | from collections import Counter | 15 | from collections import Counter |
16 | 16 | ||
17 | from orm.models import Project, ProjectTarget, Build, Layer_Version | 17 | from orm.models import Project, ProjectTarget, Build, Layer_Version |
@@ -227,20 +227,18 @@ class XhrSetDefaultImageUrl(View): | |||
227 | # same logical name | 227 | # same logical name |
228 | # * Each project that uses a layer will have its own | 228 | # * Each project that uses a layer will have its own |
229 | # LayerVersion and Project Layer for it | 229 | # LayerVersion and Project Layer for it |
230 | # * During the Paroject delete process, when the last | 230 | # * During the Project delete process, when the last |
231 | # LayerVersion for a 'local_source_dir' layer is deleted | 231 | # LayerVersion for a 'local_source_dir' layer is deleted |
232 | # then the Layer record is deleted to remove orphans | 232 | # then the Layer record is deleted to remove orphans |
233 | # | 233 | # |
234 | 234 | ||
235 | def scan_layer_content(layer,layer_version): | 235 | def scan_layer_content(layer,layer_version): |
236 | # if this is a local layer directory, we can immediately scan its content | 236 | # if this is a local layer directory, we can immediately scan its content |
237 | if layer.local_source_dir: | 237 | if os.path.isdir(layer.local_source_dir): |
238 | try: | 238 | try: |
239 | # recipes-*/*/*.bb | 239 | # recipes-*/*/*.bb |
240 | cmd = '%s %s' % ('ls', os.path.join(layer.local_source_dir,'recipes-*/*/*.bb')) | 240 | recipes_list = glob.glob(os.path.join(layer.local_source_dir, 'recipes-*/*/*.bb')) |
241 | recipes_list = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read() | 241 | for recipe in recipes_list: |
242 | recipes_list = recipes_list.decode("utf-8").strip() | ||
243 | if recipes_list and 'No such' not in recipes_list: | ||
244 | for recipe in recipes_list.split('\n'): | 242 | for recipe in recipes_list.split('\n'): |
245 | recipe_path = recipe[recipe.rfind('recipes-'):] | 243 | recipe_path = recipe[recipe.rfind('recipes-'):] |
246 | recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','') | 244 | recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','') |
@@ -260,6 +258,9 @@ def scan_layer_content(layer,layer_version): | |||
260 | 258 | ||
261 | except Exception as e: | 259 | except Exception as e: |
262 | logger.warning("ERROR:scan_layer_content: %s" % e) | 260 | logger.warning("ERROR:scan_layer_content: %s" % e) |
261 | else: | ||
262 | logger.warning("ERROR: wrong path given") | ||
263 | raise KeyError("local_source_dir") | ||
263 | 264 | ||
264 | class XhrLayer(View): | 265 | class XhrLayer(View): |
265 | """ Delete, Get, Add and Update Layer information | 266 | """ Delete, Get, Add and Update Layer information |
@@ -456,15 +457,18 @@ class XhrLayer(View): | |||
456 | 'layerdetailurl': | 457 | 'layerdetailurl': |
457 | layer_dep.get_detailspage_url(project.pk)}) | 458 | layer_dep.get_detailspage_url(project.pk)}) |
458 | 459 | ||
459 | # Scan the layer's content and update components | 460 | # Only scan_layer_content if layer is local |
460 | scan_layer_content(layer,layer_version) | 461 | if layer_data.get('local_source_dir', None): |
462 | # Scan the layer's content and update components | ||
463 | scan_layer_content(layer,layer_version) | ||
461 | 464 | ||
462 | except Layer_Version.DoesNotExist: | 465 | except Layer_Version.DoesNotExist: |
463 | return error_response("layer-dep-not-found") | 466 | return error_response("layer-dep-not-found") |
464 | except Project.DoesNotExist: | 467 | except Project.DoesNotExist: |
465 | return error_response("project-not-found") | 468 | return error_response("project-not-found") |
466 | except KeyError: | 469 | except KeyError as e: |
467 | return error_response("incorrect-parameters") | 470 | _log("KeyError: %s" % e) |
471 | return error_response(f"incorrect-parameters") | ||
468 | 472 | ||
469 | return JsonResponse({'error': "ok", | 473 | return JsonResponse({'error': "ok", |
470 | 'imported_layer': { | 474 | 'imported_layer': { |
diff --git a/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml b/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml index 4517ed1765..f626572fd1 100644 --- a/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml +++ b/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml | |||
@@ -6,10 +6,22 @@ | |||
6 | <field type="CharField" name="dirpath">b</field> | 6 | <field type="CharField" name="dirpath">b</field> |
7 | <field type="CharField" name="branch">a</field> | 7 | <field type="CharField" name="branch">a</field> |
8 | </object> | 8 | </object> |
9 | <object pk="1" model="orm.distro"> | ||
10 | <field type="DateTimeField" name="up_date"><None></None></field> | ||
11 | <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field> | ||
12 | <field type="CharField" name="name">poky_distro1</field> | ||
13 | <field type="CharField" name="description">poky_distro1 description</field> | ||
14 | </object> | ||
15 | <object pk="2" model="orm.distro"> | ||
16 | <field type="DateTimeField" name="up_date"><None></None></field> | ||
17 | <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">2</field> | ||
18 | <field type="CharField" name="name">poky_distro2</field> | ||
19 | <field type="CharField" name="description">poky_distro2 description</field> | ||
20 | </object> | ||
9 | <object pk="1" model="orm.release"> | 21 | <object pk="1" model="orm.release"> |
10 | <field type="CharField" name="name">master</field> | 22 | <field type="CharField" name="name">foo_master</field> |
11 | <field type="CharField" name="description">master project</field> | 23 | <field type="CharField" name="description">master project</field> |
12 | <field to="orm.bitbake_version" name="bitbake_version">1</field> | 24 | <field to="orm.bitbakeversion" name="bitbake_version">1</field> |
13 | </object> | 25 | </object> |
14 | <object pk="1" model="orm.project"> | 26 | <object pk="1" model="orm.project"> |
15 | <field type="CharField" name="name">a test project</field> | 27 | <field type="CharField" name="name">a test project</field> |
@@ -34,12 +46,12 @@ | |||
34 | <object pk="1" model="orm.ProjectVariable"> | 46 | <object pk="1" model="orm.ProjectVariable"> |
35 | <field to="orm.project" name="project" rel="ManyToOneRel">1</field> | 47 | <field to="orm.project" name="project" rel="ManyToOneRel">1</field> |
36 | <field type="CharField" name="name">MACHINE</field> | 48 | <field type="CharField" name="name">MACHINE</field> |
37 | <field type="TextField" name="value">qemux86</field> | 49 | <field type="TextField" name="value">qemux86-64</field> |
38 | </object> | 50 | </object> |
39 | <object pk="2" model="orm.ProjectVariable"> | 51 | <object pk="2" model="orm.ProjectVariable"> |
40 | <field to="orm.project" name="project" rel="ManyToOneRel">2</field> | 52 | <field to="orm.project" name="project" rel="ManyToOneRel">2</field> |
41 | <field type="CharField" name="name">MACHINE</field> | 53 | <field type="CharField" name="name">MACHINE</field> |
42 | <field type="TextField" name="value">qemux86</field> | 54 | <field type="TextField" name="value">qemux86-64</field> |
43 | </object> | 55 | </object> |
44 | <object pk="1" model="orm.build"> | 56 | <object pk="1" model="orm.build"> |
45 | <field to="orm.project" name="project" rel="ManyToOneRel">1</field> | 57 | <field to="orm.project" name="project" rel="ManyToOneRel">1</field> |
@@ -67,7 +79,7 @@ | |||
67 | </object> | 79 | </object> |
68 | <object pk="3" model="orm.build"> | 80 | <object pk="3" model="orm.build"> |
69 | <field to="orm.project" name="project" rel="ManyToOneRel">1</field> | 81 | <field to="orm.project" name="project" rel="ManyToOneRel">1</field> |
70 | <field type="CharField" name="machine">qemux86</field> | 82 | <field type="CharField" name="machine">qemux86-64</field> |
71 | <field type="CharField" name="distro"></field> | 83 | <field type="CharField" name="distro"></field> |
72 | <field type="CharField" name="distro_version"></field> | 84 | <field type="CharField" name="distro_version"></field> |
73 | <field type="DateTimeField" name="started_on">2016-02-12T18:46:20.114530+00:00</field> | 85 | <field type="DateTimeField" name="started_on">2016-02-12T18:46:20.114530+00:00</field> |
@@ -79,7 +91,7 @@ | |||
79 | </object> | 91 | </object> |
80 | <object pk="4" model="orm.build"> | 92 | <object pk="4" model="orm.build"> |
81 | <field to="orm.project" name="project" rel="ManyToOneRel">2</field> | 93 | <field to="orm.project" name="project" rel="ManyToOneRel">2</field> |
82 | <field type="CharField" name="machine">qemux86</field> | 94 | <field type="CharField" name="machine">qemux86-64</field> |
83 | <field type="CharField" name="distro"></field> | 95 | <field type="CharField" name="distro"></field> |
84 | <field type="CharField" name="distro_version"></field> | 96 | <field type="CharField" name="distro_version"></field> |
85 | <field type="DateTimeField" name="started_on">2016-02-11T18:46:20.114530+00:00</field> | 97 | <field type="DateTimeField" name="started_on">2016-02-11T18:46:20.114530+00:00</field> |
diff --git a/bitbake/lib/toaster/toastergui/forms.py b/bitbake/lib/toaster/toastergui/forms.py new file mode 100644 index 0000000000..0f279e06c5 --- /dev/null +++ b/bitbake/lib/toaster/toastergui/forms.py | |||
@@ -0,0 +1,14 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # -*- coding: utf-8 -*- | ||
3 | # BitBake Toaster UI tests implementation | ||
4 | # | ||
5 | # Copyright (C) 2023 Savoir-faire Linux | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | # | ||
9 | |||
10 | from django import forms | ||
11 | from django.core.validators import FileExtensionValidator | ||
12 | |||
13 | class LoadFileForm(forms.Form): | ||
14 | eventlog_file = forms.FileField(widget=forms.FileInput(attrs={'accept': '.json'})) | ||
diff --git a/bitbake/lib/toaster/toastergui/static/css/default.css b/bitbake/lib/toaster/toastergui/static/css/default.css index 5cd7e211a0..284355e70b 100644 --- a/bitbake/lib/toaster/toastergui/static/css/default.css +++ b/bitbake/lib/toaster/toastergui/static/css/default.css | |||
@@ -367,3 +367,31 @@ h2.panel-title { font-size: 30px; } | |||
367 | } | 367 | } |
368 | } | 368 | } |
369 | /* End copied in from newer version of Font-Awesome 4.3.0 */ | 369 | /* End copied in from newer version of Font-Awesome 4.3.0 */ |
370 | |||
371 | |||
372 | #overlay { | ||
373 | display: flex; | ||
374 | position: fixed; | ||
375 | top: 0; | ||
376 | left: 0; | ||
377 | width: 100%; | ||
378 | height: 100%; | ||
379 | background-color: rgba(0, 0, 0, 0.7); | ||
380 | align-items: center; | ||
381 | justify-content: center; | ||
382 | z-index: 999; | ||
383 | } | ||
384 | |||
385 | .spinner { | ||
386 | border: 6px solid rgba(255, 255, 255, 0.3); | ||
387 | border-radius: 50%; | ||
388 | border-top: 6px solid #3498db; | ||
389 | width: 50px; | ||
390 | height: 50px; | ||
391 | animation: spin 1s linear infinite; | ||
392 | } | ||
393 | |||
394 | @keyframes spin { | ||
395 | 0% { transform: rotate(0deg); } | ||
396 | 100% { transform: rotate(360deg); } | ||
397 | } | ||
diff --git a/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css b/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css new file mode 100644 index 0000000000..c0a442ce07 --- /dev/null +++ b/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css | |||
@@ -0,0 +1 @@ | |||
:root{--dt-row-selected: 13, 110, 253;--dt-row-selected-text: 255, 255, 255;--dt-row-selected-link: 9, 10, 11;--dt-row-stripe: 0, 0, 0;--dt-row-hover: 0, 0, 0;--dt-column-ordering: 0, 0, 0;--dt-html-background: white}:root.dark{--dt-html-background: rgb(33, 37, 41)}table.dataTable td.dt-control{text-align:center;cursor:pointer}table.dataTable td.dt-control:before{display:inline-block;color:rgba(0, 0, 0, 0.5);content:"â–¶"}table.dataTable tr.dt-hasChild td.dt-control:before{content:"â–¼"}html.dark table.dataTable td.dt-control:before{color:rgba(255, 255, 255, 0.5)}html.dark table.dataTable tr.dt-hasChild td.dt-control:before{color:rgba(255, 255, 255, 0.5)}table.dataTable thead>tr>th.sorting,table.dataTable thead>tr>th.sorting_asc,table.dataTable thead>tr>th.sorting_desc,table.dataTable thead>tr>th.sorting_asc_disabled,table.dataTable thead>tr>th.sorting_desc_disabled,table.dataTable thead>tr>td.sorting,table.dataTable thead>tr>td.sorting_asc,table.dataTable thead>tr>td.sorting_desc,table.dataTable thead>tr>td.sorting_asc_disabled,table.dataTable thead>tr>td.sorting_desc_disabled{cursor:pointer;position:relative;padding-right:26px}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after{position:absolute;display:block;opacity:.125;right:10px;line-height:9px;font-size:.8em}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:before{bottom:50%;content:"â–²";content:"â–²"/""}table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:after{top:50%;content:"â–¼";content:"â–¼"/""}table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:after{opacity:.6}table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting_asc_disabled:before{display:none}table.dataTable thead>tr>th:active,table.dataTable thead>tr>td:active{outline:none}div.dataTables_scrollBody>table.dataTable>thead>tr>th:before,div.dataTables_scrollBody>table.dataTable>thead>tr>th:after,div.dataTables_scrollBody>table.dataTable>thead>tr>td:before,div.dataTables_scrollBody>table.dataTable>thead>tr>td:after{display:none}div.dataTables_processing{position:absolute;top:50%;left:50%;width:200px;margin-left:-100px;margin-top:-26px;text-align:center;padding:2px;z-index:10}div.dataTables_processing>div:last-child{position:relative;width:80px;height:15px;margin:1em auto}div.dataTables_processing>div:last-child>div{position:absolute;top:0;width:13px;height:13px;border-radius:50%;background:rgb(13, 110, 253);background:rgb(var(--dt-row-selected));animation-timing-function:cubic-bezier(0, 1, 1, 0)}div.dataTables_processing>div:last-child>div:nth-child(1){left:8px;animation:datatables-loader-1 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(2){left:8px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(3){left:32px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(4){left:56px;animation:datatables-loader-3 .6s infinite}@keyframes datatables-loader-1{0%{transform:scale(0)}100%{transform:scale(1)}}@keyframes datatables-loader-3{0%{transform:scale(1)}100%{transform:scale(0)}}@keyframes datatables-loader-2{0%{transform:translate(0, 0)}100%{transform:translate(24px, 0)}}table.dataTable.nowrap th,table.dataTable.nowrap td{white-space:nowrap}table.dataTable th.dt-left,table.dataTable td.dt-left{text-align:left}table.dataTable th.dt-center,table.dataTable td.dt-center,table.dataTable td.dataTables_empty{text-align:center}table.dataTable th.dt-right,table.dataTable td.dt-right{text-align:right}table.dataTable th.dt-justify,table.dataTable td.dt-justify{text-align:justify}table.dataTable th.dt-nowrap,table.dataTable td.dt-nowrap{white-space:nowrap}table.dataTable thead th,table.dataTable thead td,table.dataTable tfoot th,table.dataTable tfoot td{text-align:left}table.dataTable thead th.dt-head-left,table.dataTable thead td.dt-head-left,table.dataTable tfoot th.dt-head-left,table.dataTable tfoot td.dt-head-left{text-align:left}table.dataTable thead th.dt-head-center,table.dataTable thead td.dt-head-center,table.dataTable tfoot th.dt-head-center,table.dataTable tfoot td.dt-head-center{text-align:center}table.dataTable thead th.dt-head-right,table.dataTable thead td.dt-head-right,table.dataTable tfoot th.dt-head-right,table.dataTable tfoot td.dt-head-right{text-align:right}table.dataTable thead th.dt-head-justify,table.dataTable thead td.dt-head-justify,table.dataTable tfoot th.dt-head-justify,table.dataTable tfoot td.dt-head-justify{text-align:justify}table.dataTable thead th.dt-head-nowrap,table.dataTable thead td.dt-head-nowrap,table.dataTable tfoot th.dt-head-nowrap,table.dataTable tfoot td.dt-head-nowrap{white-space:nowrap}table.dataTable tbody th.dt-body-left,table.dataTable tbody td.dt-body-left{text-align:left}table.dataTable tbody th.dt-body-center,table.dataTable tbody td.dt-body-center{text-align:center}table.dataTable tbody th.dt-body-right,table.dataTable tbody td.dt-body-right{text-align:right}table.dataTable tbody th.dt-body-justify,table.dataTable tbody td.dt-body-justify{text-align:justify}table.dataTable tbody th.dt-body-nowrap,table.dataTable tbody td.dt-body-nowrap{white-space:nowrap}table.dataTable{width:100%;margin:0 auto;clear:both;border-collapse:separate;border-spacing:0}table.dataTable thead th,table.dataTable tfoot th{font-weight:bold}table.dataTable>thead>tr>th,table.dataTable>thead>tr>td{padding:10px;border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable>thead>tr>th:active,table.dataTable>thead>tr>td:active{outline:none}table.dataTable>tfoot>tr>th,table.dataTable>tfoot>tr>td{padding:10px 10px 6px 10px;border-top:1px solid rgba(0, 0, 0, 0.3)}table.dataTable tbody tr{background-color:transparent}table.dataTable tbody tr.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.9);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.9);color:rgb(255, 255, 255);color:rgb(var(--dt-row-selected-text))}table.dataTable tbody tr.selected a{color:rgb(9, 10, 11);color:rgb(var(--dt-row-selected-link))}table.dataTable tbody th,table.dataTable tbody td{padding:8px 10px}table.dataTable.row-border>tbody>tr>th,table.dataTable.row-border>tbody>tr>td,table.dataTable.display>tbody>tr>th,table.dataTable.display>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.row-border>tbody>tr:first-child>th,table.dataTable.row-border>tbody>tr:first-child>td,table.dataTable.display>tbody>tr:first-child>th,table.dataTable.display>tbody>tr:first-child>td{border-top:none}table.dataTable.row-border>tbody>tr.selected+tr.selected>td,table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0262ef}table.dataTable.cell-border>tbody>tr>th,table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15);border-right:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr>th:first-child,table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr:first-child>th,table.dataTable.cell-border>tbody>tr:first-child>td{border-top:none}table.dataTable.stripe>tbody>tr.odd>*,table.dataTable.display>tbody>tr.odd>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.023);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-stripe), 0.023)}table.dataTable.stripe>tbody>tr.odd.selected>*,table.dataTable.display>tbody>tr.odd.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.923);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.923)}table.dataTable.hover>tbody>tr:hover>*,table.dataTable.display>tbody>tr:hover>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.035);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.035)}table.dataTable.hover>tbody>tr.selected:hover>*,table.dataTable.display>tbody>tr.selected:hover>*{box-shadow:inset 0 0 0 9999px #0d6efd !important;box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 1) !important}table.dataTable.order-column>tbody tr>.sorting_1,table.dataTable.order-column>tbody tr>.sorting_2,table.dataTable.order-column>tbody tr>.sorting_3,table.dataTable.display>tbody tr>.sorting_1,table.dataTable.display>tbody tr>.sorting_2,table.dataTable.display>tbody tr>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.019)}table.dataTable.order-column>tbody tr.selected>.sorting_1,table.dataTable.order-column>tbody tr.selected>.sorting_2,table.dataTable.order-column>tbody tr.selected>.sorting_3,table.dataTable.display>tbody tr.selected>.sorting_1,table.dataTable.display>tbody tr.selected>.sorting_2,table.dataTable.display>tbody tr.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.odd>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.054);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.054)}table.dataTable.display>tbody>tr.odd>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.047);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.047)}table.dataTable.display>tbody>tr.odd>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.039);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.039)}table.dataTable.display>tbody>tr.odd.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.954);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.954)}table.dataTable.display>tbody>tr.odd.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.947);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.947)}table.dataTable.display>tbody>tr.odd.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.939);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.939)}table.dataTable.display>tbody>tr.even>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.019)}table.dataTable.display>tbody>tr.even>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.011);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.011)}table.dataTable.display>tbody>tr.even>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.003);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.003)}table.dataTable.display>tbody>tr.even.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.even.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.911);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.911)}table.dataTable.display>tbody>tr.even.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.903);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.903)}table.dataTable.display tbody tr:hover>.sorting_1,table.dataTable.order-column.hover tbody tr:hover>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.082);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.082)}table.dataTable.display tbody tr:hover>.sorting_2,table.dataTable.order-column.hover tbody tr:hover>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.074);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.074)}table.dataTable.display tbody tr:hover>.sorting_3,table.dataTable.order-column.hover tbody tr:hover>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.062);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.062)}table.dataTable.display tbody tr:hover.selected>.sorting_1,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.982);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.982)}table.dataTable.display tbody tr:hover.selected>.sorting_2,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.974);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.974)}table.dataTable.display tbody tr:hover.selected>.sorting_3,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.962);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.962)}table.dataTable.no-footer{border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable.compact thead th,table.dataTable.compact thead td,table.dataTable.compact tfoot th,table.dataTable.compact tfoot td,table.dataTable.compact tbody th,table.dataTable.compact tbody td{padding:4px}table.dataTable th,table.dataTable td{box-sizing:content-box}.dataTables_wrapper{position:relative;clear:both}.dataTables_wrapper .dataTables_length{float:left}.dataTables_wrapper .dataTables_length select{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;padding:4px}.dataTables_wrapper .dataTables_filter{float:right;text-align:right}.dataTables_wrapper .dataTables_filter input{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;margin-left:3px}.dataTables_wrapper .dataTables_info{clear:both;float:left;padding-top:.755em}.dataTables_wrapper .dataTables_paginate{float:right;text-align:right;padding-top:.25em}.dataTables_wrapper .dataTables_paginate .paginate_button{box-sizing:border-box;display:inline-block;min-width:1.5em;padding:.5em 1em;margin-left:2px;text-align:center;text-decoration:none !important;cursor:pointer;color:inherit !important;border:1px solid transparent;border-radius:2px;background:transparent}.dataTables_wrapper .dataTables_paginate .paginate_button.current,.dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{color:inherit !important;border:1px solid rgba(0, 0, 0, 0.3);background-color:rgba(0, 0, 0, 0.05);background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, rgba(230, 230, 230, 0.05)), color-stop(100%, rgba(0, 0, 0, 0.05)));background:-webkit-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-moz-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-ms-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-o-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:linear-gradient(to bottom, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button.disabled,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{cursor:default;color:#666 !important;border:1px solid transparent;background:transparent;box-shadow:none}.dataTables_wrapper .dataTables_paginate .paginate_button:hover{color:white !important;border:1px solid #111;background-color:#111;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #585858), color-stop(100%, #111));background:-webkit-linear-gradient(top, #585858 0%, #111 100%);background:-moz-linear-gradient(top, #585858 0%, #111 100%);background:-ms-linear-gradient(top, #585858 0%, #111 100%);background:-o-linear-gradient(top, #585858 0%, #111 100%);background:linear-gradient(to bottom, #585858 0%, #111 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button:active{outline:none;background-color:#0c0c0c;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #2b2b2b), color-stop(100%, #0c0c0c));background:-webkit-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-moz-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-ms-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-o-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:linear-gradient(to bottom, #2b2b2b 0%, #0c0c0c 100%);box-shadow:inset 0 0 3px #111}.dataTables_wrapper .dataTables_paginate .ellipsis{padding:0 1em}.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter,.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_processing,.dataTables_wrapper .dataTables_paginate{color:inherit}.dataTables_wrapper .dataTables_scroll{clear:both}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody{-webkit-overflow-scrolling:touch}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td{vertical-align:middle}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td>div.dataTables_sizing{height:0;overflow:hidden;margin:0 !important;padding:0 !important}.dataTables_wrapper.no-footer .dataTables_scrollBody{border-bottom:1px solid rgba(0, 0, 0, 0.3)}.dataTables_wrapper.no-footer div.dataTables_scrollHead table.dataTable,.dataTables_wrapper.no-footer div.dataTables_scrollBody>table{border-bottom:none}.dataTables_wrapper:after{visibility:hidden;display:block;content:"";clear:both;height:0}@media screen and (max-width: 767px){.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_paginate{float:none;text-align:center}.dataTables_wrapper .dataTables_paginate{margin-top:.5em}}@media screen and (max-width: 640px){.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter{float:none;text-align:center}.dataTables_wrapper .dataTables_filter{margin-top:.5em}}html.dark{--dt-row-hover: 255, 255, 255;--dt-row-stripe: 255, 255, 255;--dt-column-ordering: 255, 255, 255}html.dark table.dataTable>thead>tr>th,html.dark table.dataTable>thead>tr>td{border-bottom:1px solid rgb(89, 91, 94)}html.dark table.dataTable>thead>tr>th:active,html.dark table.dataTable>thead>tr>td:active{outline:none}html.dark table.dataTable>tfoot>tr>th,html.dark table.dataTable>tfoot>tr>td{border-top:1px solid rgb(89, 91, 94)}html.dark table.dataTable.row-border>tbody>tr>th,html.dark table.dataTable.row-border>tbody>tr>td,html.dark table.dataTable.display>tbody>tr>th,html.dark table.dataTable.display>tbody>tr>td{border-top:1px solid rgb(64, 67, 70)}html.dark table.dataTable.row-border>tbody>tr.selected+tr.selected>td,html.dark table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0257d5}html.dark table.dataTable.cell-border>tbody>tr>th,html.dark table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgb(64, 67, 70);border-right:1px solid rgb(64, 67, 70)}html.dark table.dataTable.cell-border>tbody>tr>th:first-child,html.dark table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgb(64, 67, 70)}html.dark .dataTables_wrapper .dataTables_filter input,html.dark .dataTables_wrapper .dataTables_length select{border:1px solid rgba(255, 255, 255, 0.2);background-color:var(--dt-html-background)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{border:1px solid rgb(89, 91, 94);background:rgba(255, 255, 255, 0.15)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{color:#666 !important}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:hover{border:1px solid rgb(53, 53, 53);background:rgb(53, 53, 53)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:active{background:#3a3a3a} | |||
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js index d47d640feb..170bd608f7 100644 --- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.js +++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js | |||
@@ -1,6 +1,6 @@ | |||
1 | /*! | 1 | /*! |
2 | * Bootstrap v3.3.6 (http://getbootstrap.com) | 2 | * Bootstrap v3.4.1 (https://getbootstrap.com/) |
3 | * Copyright 2011-2016 Twitter, Inc. | 3 | * Copyright 2011-2019 Twitter, Inc. |
4 | * Licensed under the MIT license | 4 | * Licensed under the MIT license |
5 | */ | 5 | */ |
6 | 6 | ||
@@ -11,16 +11,16 @@ if (typeof jQuery === 'undefined') { | |||
11 | +function ($) { | 11 | +function ($) { |
12 | 'use strict'; | 12 | 'use strict'; |
13 | var version = $.fn.jquery.split(' ')[0].split('.') | 13 | var version = $.fn.jquery.split(' ')[0].split('.') |
14 | if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 2)) { | 14 | if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) { |
15 | throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3') | 15 | throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4') |
16 | } | 16 | } |
17 | }(jQuery); | 17 | }(jQuery); |
18 | 18 | ||
19 | /* ======================================================================== | 19 | /* ======================================================================== |
20 | * Bootstrap: transition.js v3.3.6 | 20 | * Bootstrap: transition.js v3.4.1 |
21 | * http://getbootstrap.com/javascript/#transitions | 21 | * https://getbootstrap.com/docs/3.4/javascript/#transitions |
22 | * ======================================================================== | 22 | * ======================================================================== |
23 | * Copyright 2011-2015 Twitter, Inc. | 23 | * Copyright 2011-2019 Twitter, Inc. |
24 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 24 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
25 | * ======================================================================== */ | 25 | * ======================================================================== */ |
26 | 26 | ||
@@ -28,7 +28,7 @@ if (typeof jQuery === 'undefined') { | |||
28 | +function ($) { | 28 | +function ($) { |
29 | 'use strict'; | 29 | 'use strict'; |
30 | 30 | ||
31 | // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/) | 31 | // CSS TRANSITION SUPPORT (Shoutout: https://modernizr.com/) |
32 | // ============================================================ | 32 | // ============================================================ |
33 | 33 | ||
34 | function transitionEnd() { | 34 | function transitionEnd() { |
@@ -50,7 +50,7 @@ if (typeof jQuery === 'undefined') { | |||
50 | return false // explicit for ie8 ( ._.) | 50 | return false // explicit for ie8 ( ._.) |
51 | } | 51 | } |
52 | 52 | ||
53 | // http://blog.alexmaccaw.com/css-transitions | 53 | // https://blog.alexmaccaw.com/css-transitions |
54 | $.fn.emulateTransitionEnd = function (duration) { | 54 | $.fn.emulateTransitionEnd = function (duration) { |
55 | var called = false | 55 | var called = false |
56 | var $el = this | 56 | var $el = this |
@@ -77,10 +77,10 @@ if (typeof jQuery === 'undefined') { | |||
77 | }(jQuery); | 77 | }(jQuery); |
78 | 78 | ||
79 | /* ======================================================================== | 79 | /* ======================================================================== |
80 | * Bootstrap: alert.js v3.3.6 | 80 | * Bootstrap: alert.js v3.4.1 |
81 | * http://getbootstrap.com/javascript/#alerts | 81 | * https://getbootstrap.com/docs/3.4/javascript/#alerts |
82 | * ======================================================================== | 82 | * ======================================================================== |
83 | * Copyright 2011-2015 Twitter, Inc. | 83 | * Copyright 2011-2019 Twitter, Inc. |
84 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 84 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
85 | * ======================================================================== */ | 85 | * ======================================================================== */ |
86 | 86 | ||
@@ -96,7 +96,7 @@ if (typeof jQuery === 'undefined') { | |||
96 | $(el).on('click', dismiss, this.close) | 96 | $(el).on('click', dismiss, this.close) |
97 | } | 97 | } |
98 | 98 | ||
99 | Alert.VERSION = '3.3.6' | 99 | Alert.VERSION = '3.4.1' |
100 | 100 | ||
101 | Alert.TRANSITION_DURATION = 150 | 101 | Alert.TRANSITION_DURATION = 150 |
102 | 102 | ||
@@ -109,7 +109,8 @@ if (typeof jQuery === 'undefined') { | |||
109 | selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 | 109 | selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 |
110 | } | 110 | } |
111 | 111 | ||
112 | var $parent = $(selector) | 112 | selector = selector === '#' ? [] : selector |
113 | var $parent = $(document).find(selector) | ||
113 | 114 | ||
114 | if (e) e.preventDefault() | 115 | if (e) e.preventDefault() |
115 | 116 | ||
@@ -172,10 +173,10 @@ if (typeof jQuery === 'undefined') { | |||
172 | }(jQuery); | 173 | }(jQuery); |
173 | 174 | ||
174 | /* ======================================================================== | 175 | /* ======================================================================== |
175 | * Bootstrap: button.js v3.3.6 | 176 | * Bootstrap: button.js v3.4.1 |
176 | * http://getbootstrap.com/javascript/#buttons | 177 | * https://getbootstrap.com/docs/3.4/javascript/#buttons |
177 | * ======================================================================== | 178 | * ======================================================================== |
178 | * Copyright 2011-2015 Twitter, Inc. | 179 | * Copyright 2011-2019 Twitter, Inc. |
179 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 180 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
180 | * ======================================================================== */ | 181 | * ======================================================================== */ |
181 | 182 | ||
@@ -192,7 +193,7 @@ if (typeof jQuery === 'undefined') { | |||
192 | this.isLoading = false | 193 | this.isLoading = false |
193 | } | 194 | } |
194 | 195 | ||
195 | Button.VERSION = '3.3.6' | 196 | Button.VERSION = '3.4.1' |
196 | 197 | ||
197 | Button.DEFAULTS = { | 198 | Button.DEFAULTS = { |
198 | loadingText: 'loading...' | 199 | loadingText: 'loading...' |
@@ -214,10 +215,10 @@ if (typeof jQuery === 'undefined') { | |||
214 | 215 | ||
215 | if (state == 'loadingText') { | 216 | if (state == 'loadingText') { |
216 | this.isLoading = true | 217 | this.isLoading = true |
217 | $el.addClass(d).attr(d, d) | 218 | $el.addClass(d).attr(d, d).prop(d, true) |
218 | } else if (this.isLoading) { | 219 | } else if (this.isLoading) { |
219 | this.isLoading = false | 220 | this.isLoading = false |
220 | $el.removeClass(d).removeAttr(d) | 221 | $el.removeClass(d).removeAttr(d).prop(d, false) |
221 | } | 222 | } |
222 | }, this), 0) | 223 | }, this), 0) |
223 | } | 224 | } |
@@ -281,10 +282,15 @@ if (typeof jQuery === 'undefined') { | |||
281 | 282 | ||
282 | $(document) | 283 | $(document) |
283 | .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) { | 284 | .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) { |
284 | var $btn = $(e.target) | 285 | var $btn = $(e.target).closest('.btn') |
285 | if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn') | ||
286 | Plugin.call($btn, 'toggle') | 286 | Plugin.call($btn, 'toggle') |
287 | if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault() | 287 | if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) { |
288 | // Prevent double click on radios, and the double selections (so cancellation) on checkboxes | ||
289 | e.preventDefault() | ||
290 | // The target component still receive the focus | ||
291 | if ($btn.is('input,button')) $btn.trigger('focus') | ||
292 | else $btn.find('input:visible,button:visible').first().trigger('focus') | ||
293 | } | ||
288 | }) | 294 | }) |
289 | .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) { | 295 | .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) { |
290 | $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type)) | 296 | $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type)) |
@@ -293,10 +299,10 @@ if (typeof jQuery === 'undefined') { | |||
293 | }(jQuery); | 299 | }(jQuery); |
294 | 300 | ||
295 | /* ======================================================================== | 301 | /* ======================================================================== |
296 | * Bootstrap: carousel.js v3.3.6 | 302 | * Bootstrap: carousel.js v3.4.1 |
297 | * http://getbootstrap.com/javascript/#carousel | 303 | * https://getbootstrap.com/docs/3.4/javascript/#carousel |
298 | * ======================================================================== | 304 | * ======================================================================== |
299 | * Copyright 2011-2015 Twitter, Inc. | 305 | * Copyright 2011-2019 Twitter, Inc. |
300 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 306 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
301 | * ======================================================================== */ | 307 | * ======================================================================== */ |
302 | 308 | ||
@@ -324,7 +330,7 @@ if (typeof jQuery === 'undefined') { | |||
324 | .on('mouseleave.bs.carousel', $.proxy(this.cycle, this)) | 330 | .on('mouseleave.bs.carousel', $.proxy(this.cycle, this)) |
325 | } | 331 | } |
326 | 332 | ||
327 | Carousel.VERSION = '3.3.6' | 333 | Carousel.VERSION = '3.4.1' |
328 | 334 | ||
329 | Carousel.TRANSITION_DURATION = 600 | 335 | Carousel.TRANSITION_DURATION = 600 |
330 | 336 | ||
@@ -438,7 +444,9 @@ if (typeof jQuery === 'undefined') { | |||
438 | var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid" | 444 | var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid" |
439 | if ($.support.transition && this.$element.hasClass('slide')) { | 445 | if ($.support.transition && this.$element.hasClass('slide')) { |
440 | $next.addClass(type) | 446 | $next.addClass(type) |
441 | $next[0].offsetWidth // force reflow | 447 | if (typeof $next === 'object' && $next.length) { |
448 | $next[0].offsetWidth // force reflow | ||
449 | } | ||
442 | $active.addClass(direction) | 450 | $active.addClass(direction) |
443 | $next.addClass(direction) | 451 | $next.addClass(direction) |
444 | $active | 452 | $active |
@@ -500,10 +508,17 @@ if (typeof jQuery === 'undefined') { | |||
500 | // ================= | 508 | // ================= |
501 | 509 | ||
502 | var clickHandler = function (e) { | 510 | var clickHandler = function (e) { |
503 | var href | ||
504 | var $this = $(this) | 511 | var $this = $(this) |
505 | var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 | 512 | var href = $this.attr('href') |
513 | if (href) { | ||
514 | href = href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 | ||
515 | } | ||
516 | |||
517 | var target = $this.attr('data-target') || href | ||
518 | var $target = $(document).find(target) | ||
519 | |||
506 | if (!$target.hasClass('carousel')) return | 520 | if (!$target.hasClass('carousel')) return |
521 | |||
507 | var options = $.extend({}, $target.data(), $this.data()) | 522 | var options = $.extend({}, $target.data(), $this.data()) |
508 | var slideIndex = $this.attr('data-slide-to') | 523 | var slideIndex = $this.attr('data-slide-to') |
509 | if (slideIndex) options.interval = false | 524 | if (slideIndex) options.interval = false |
@@ -531,13 +546,14 @@ if (typeof jQuery === 'undefined') { | |||
531 | }(jQuery); | 546 | }(jQuery); |
532 | 547 | ||
533 | /* ======================================================================== | 548 | /* ======================================================================== |
534 | * Bootstrap: collapse.js v3.3.6 | 549 | * Bootstrap: collapse.js v3.4.1 |
535 | * http://getbootstrap.com/javascript/#collapse | 550 | * https://getbootstrap.com/docs/3.4/javascript/#collapse |
536 | * ======================================================================== | 551 | * ======================================================================== |
537 | * Copyright 2011-2015 Twitter, Inc. | 552 | * Copyright 2011-2019 Twitter, Inc. |
538 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 553 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
539 | * ======================================================================== */ | 554 | * ======================================================================== */ |
540 | 555 | ||
556 | /* jshint latedef: false */ | ||
541 | 557 | ||
542 | +function ($) { | 558 | +function ($) { |
543 | 'use strict'; | 559 | 'use strict'; |
@@ -561,7 +577,7 @@ if (typeof jQuery === 'undefined') { | |||
561 | if (this.options.toggle) this.toggle() | 577 | if (this.options.toggle) this.toggle() |
562 | } | 578 | } |
563 | 579 | ||
564 | Collapse.VERSION = '3.3.6' | 580 | Collapse.VERSION = '3.4.1' |
565 | 581 | ||
566 | Collapse.TRANSITION_DURATION = 350 | 582 | Collapse.TRANSITION_DURATION = 350 |
567 | 583 | ||
@@ -668,7 +684,7 @@ if (typeof jQuery === 'undefined') { | |||
668 | } | 684 | } |
669 | 685 | ||
670 | Collapse.prototype.getParent = function () { | 686 | Collapse.prototype.getParent = function () { |
671 | return $(this.options.parent) | 687 | return $(document).find(this.options.parent) |
672 | .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]') | 688 | .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]') |
673 | .each($.proxy(function (i, element) { | 689 | .each($.proxy(function (i, element) { |
674 | var $element = $(element) | 690 | var $element = $(element) |
@@ -691,7 +707,7 @@ if (typeof jQuery === 'undefined') { | |||
691 | var target = $trigger.attr('data-target') | 707 | var target = $trigger.attr('data-target') |
692 | || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 | 708 | || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 |
693 | 709 | ||
694 | return $(target) | 710 | return $(document).find(target) |
695 | } | 711 | } |
696 | 712 | ||
697 | 713 | ||
@@ -743,10 +759,10 @@ if (typeof jQuery === 'undefined') { | |||
743 | }(jQuery); | 759 | }(jQuery); |
744 | 760 | ||
745 | /* ======================================================================== | 761 | /* ======================================================================== |
746 | * Bootstrap: dropdown.js v3.3.6 | 762 | * Bootstrap: dropdown.js v3.4.1 |
747 | * http://getbootstrap.com/javascript/#dropdowns | 763 | * https://getbootstrap.com/docs/3.4/javascript/#dropdowns |
748 | * ======================================================================== | 764 | * ======================================================================== |
749 | * Copyright 2011-2015 Twitter, Inc. | 765 | * Copyright 2011-2019 Twitter, Inc. |
750 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 766 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
751 | * ======================================================================== */ | 767 | * ======================================================================== */ |
752 | 768 | ||
@@ -763,7 +779,7 @@ if (typeof jQuery === 'undefined') { | |||
763 | $(element).on('click.bs.dropdown', this.toggle) | 779 | $(element).on('click.bs.dropdown', this.toggle) |
764 | } | 780 | } |
765 | 781 | ||
766 | Dropdown.VERSION = '3.3.6' | 782 | Dropdown.VERSION = '3.4.1' |
767 | 783 | ||
768 | function getParent($this) { | 784 | function getParent($this) { |
769 | var selector = $this.attr('data-target') | 785 | var selector = $this.attr('data-target') |
@@ -773,7 +789,7 @@ if (typeof jQuery === 'undefined') { | |||
773 | selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 | 789 | selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 |
774 | } | 790 | } |
775 | 791 | ||
776 | var $parent = selector && $(selector) | 792 | var $parent = selector !== '#' ? $(document).find(selector) : null |
777 | 793 | ||
778 | return $parent && $parent.length ? $parent : $this.parent() | 794 | return $parent && $parent.length ? $parent : $this.parent() |
779 | } | 795 | } |
@@ -909,10 +925,10 @@ if (typeof jQuery === 'undefined') { | |||
909 | }(jQuery); | 925 | }(jQuery); |
910 | 926 | ||
911 | /* ======================================================================== | 927 | /* ======================================================================== |
912 | * Bootstrap: modal.js v3.3.6 | 928 | * Bootstrap: modal.js v3.4.1 |
913 | * http://getbootstrap.com/javascript/#modals | 929 | * https://getbootstrap.com/docs/3.4/javascript/#modals |
914 | * ======================================================================== | 930 | * ======================================================================== |
915 | * Copyright 2011-2015 Twitter, Inc. | 931 | * Copyright 2011-2019 Twitter, Inc. |
916 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 932 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
917 | * ======================================================================== */ | 933 | * ======================================================================== */ |
918 | 934 | ||
@@ -924,15 +940,16 @@ if (typeof jQuery === 'undefined') { | |||
924 | // ====================== | 940 | // ====================== |
925 | 941 | ||
926 | var Modal = function (element, options) { | 942 | var Modal = function (element, options) { |
927 | this.options = options | 943 | this.options = options |
928 | this.$body = $(document.body) | 944 | this.$body = $(document.body) |
929 | this.$element = $(element) | 945 | this.$element = $(element) |
930 | this.$dialog = this.$element.find('.modal-dialog') | 946 | this.$dialog = this.$element.find('.modal-dialog') |
931 | this.$backdrop = null | 947 | this.$backdrop = null |
932 | this.isShown = null | 948 | this.isShown = null |
933 | this.originalBodyPad = null | 949 | this.originalBodyPad = null |
934 | this.scrollbarWidth = 0 | 950 | this.scrollbarWidth = 0 |
935 | this.ignoreBackdropClick = false | 951 | this.ignoreBackdropClick = false |
952 | this.fixedContent = '.navbar-fixed-top, .navbar-fixed-bottom' | ||
936 | 953 | ||
937 | if (this.options.remote) { | 954 | if (this.options.remote) { |
938 | this.$element | 955 | this.$element |
@@ -943,7 +960,7 @@ if (typeof jQuery === 'undefined') { | |||
943 | } | 960 | } |
944 | } | 961 | } |
945 | 962 | ||
946 | Modal.VERSION = '3.3.6' | 963 | Modal.VERSION = '3.4.1' |
947 | 964 | ||
948 | Modal.TRANSITION_DURATION = 300 | 965 | Modal.TRANSITION_DURATION = 300 |
949 | Modal.BACKDROP_TRANSITION_DURATION = 150 | 966 | Modal.BACKDROP_TRANSITION_DURATION = 150 |
@@ -960,7 +977,7 @@ if (typeof jQuery === 'undefined') { | |||
960 | 977 | ||
961 | Modal.prototype.show = function (_relatedTarget) { | 978 | Modal.prototype.show = function (_relatedTarget) { |
962 | var that = this | 979 | var that = this |
963 | var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) | 980 | var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) |
964 | 981 | ||
965 | this.$element.trigger(e) | 982 | this.$element.trigger(e) |
966 | 983 | ||
@@ -1050,7 +1067,9 @@ if (typeof jQuery === 'undefined') { | |||
1050 | $(document) | 1067 | $(document) |
1051 | .off('focusin.bs.modal') // guard against infinite focus loop | 1068 | .off('focusin.bs.modal') // guard against infinite focus loop |
1052 | .on('focusin.bs.modal', $.proxy(function (e) { | 1069 | .on('focusin.bs.modal', $.proxy(function (e) { |
1053 | if (this.$element[0] !== e.target && !this.$element.has(e.target).length) { | 1070 | if (document !== e.target && |
1071 | this.$element[0] !== e.target && | ||
1072 | !this.$element.has(e.target).length) { | ||
1054 | this.$element.trigger('focus') | 1073 | this.$element.trigger('focus') |
1055 | } | 1074 | } |
1056 | }, this)) | 1075 | }, this)) |
@@ -1152,7 +1171,7 @@ if (typeof jQuery === 'undefined') { | |||
1152 | var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight | 1171 | var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight |
1153 | 1172 | ||
1154 | this.$element.css({ | 1173 | this.$element.css({ |
1155 | paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', | 1174 | paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', |
1156 | paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : '' | 1175 | paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : '' |
1157 | }) | 1176 | }) |
1158 | } | 1177 | } |
@@ -1177,11 +1196,26 @@ if (typeof jQuery === 'undefined') { | |||
1177 | Modal.prototype.setScrollbar = function () { | 1196 | Modal.prototype.setScrollbar = function () { |
1178 | var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) | 1197 | var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) |
1179 | this.originalBodyPad = document.body.style.paddingRight || '' | 1198 | this.originalBodyPad = document.body.style.paddingRight || '' |
1180 | if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth) | 1199 | var scrollbarWidth = this.scrollbarWidth |
1200 | if (this.bodyIsOverflowing) { | ||
1201 | this.$body.css('padding-right', bodyPad + scrollbarWidth) | ||
1202 | $(this.fixedContent).each(function (index, element) { | ||
1203 | var actualPadding = element.style.paddingRight | ||
1204 | var calculatedPadding = $(element).css('padding-right') | ||
1205 | $(element) | ||
1206 | .data('padding-right', actualPadding) | ||
1207 | .css('padding-right', parseFloat(calculatedPadding) + scrollbarWidth + 'px') | ||
1208 | }) | ||
1209 | } | ||
1181 | } | 1210 | } |
1182 | 1211 | ||
1183 | Modal.prototype.resetScrollbar = function () { | 1212 | Modal.prototype.resetScrollbar = function () { |
1184 | this.$body.css('padding-right', this.originalBodyPad) | 1213 | this.$body.css('padding-right', this.originalBodyPad) |
1214 | $(this.fixedContent).each(function (index, element) { | ||
1215 | var padding = $(element).data('padding-right') | ||
1216 | $(element).removeData('padding-right') | ||
1217 | element.style.paddingRight = padding ? padding : '' | ||
1218 | }) | ||
1185 | } | 1219 | } |
1186 | 1220 | ||
1187 | Modal.prototype.measureScrollbar = function () { // thx walsh | 1221 | Modal.prototype.measureScrollbar = function () { // thx walsh |
@@ -1199,8 +1233,8 @@ if (typeof jQuery === 'undefined') { | |||
1199 | 1233 | ||
1200 | function Plugin(option, _relatedTarget) { | 1234 | function Plugin(option, _relatedTarget) { |
1201 | return this.each(function () { | 1235 | return this.each(function () { |
1202 | var $this = $(this) | 1236 | var $this = $(this) |
1203 | var data = $this.data('bs.modal') | 1237 | var data = $this.data('bs.modal') |
1204 | var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) | 1238 | var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) |
1205 | 1239 | ||
1206 | if (!data) $this.data('bs.modal', (data = new Modal(this, options))) | 1240 | if (!data) $this.data('bs.modal', (data = new Modal(this, options))) |
@@ -1211,7 +1245,7 @@ if (typeof jQuery === 'undefined') { | |||
1211 | 1245 | ||
1212 | var old = $.fn.modal | 1246 | var old = $.fn.modal |
1213 | 1247 | ||
1214 | $.fn.modal = Plugin | 1248 | $.fn.modal = Plugin |
1215 | $.fn.modal.Constructor = Modal | 1249 | $.fn.modal.Constructor = Modal |
1216 | 1250 | ||
1217 | 1251 | ||
@@ -1228,10 +1262,13 @@ if (typeof jQuery === 'undefined') { | |||
1228 | // ============== | 1262 | // ============== |
1229 | 1263 | ||
1230 | $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { | 1264 | $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { |
1231 | var $this = $(this) | 1265 | var $this = $(this) |
1232 | var href = $this.attr('href') | 1266 | var href = $this.attr('href') |
1233 | var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7 | 1267 | var target = $this.attr('data-target') || |
1234 | var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) | 1268 | (href && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 |
1269 | |||
1270 | var $target = $(document).find(target) | ||
1271 | var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) | ||
1235 | 1272 | ||
1236 | if ($this.is('a')) e.preventDefault() | 1273 | if ($this.is('a')) e.preventDefault() |
1237 | 1274 | ||
@@ -1247,18 +1284,148 @@ if (typeof jQuery === 'undefined') { | |||
1247 | }(jQuery); | 1284 | }(jQuery); |
1248 | 1285 | ||
1249 | /* ======================================================================== | 1286 | /* ======================================================================== |
1250 | * Bootstrap: tooltip.js v3.3.6 | 1287 | * Bootstrap: tooltip.js v3.4.1 |
1251 | * http://getbootstrap.com/javascript/#tooltip | 1288 | * https://getbootstrap.com/docs/3.4/javascript/#tooltip |
1252 | * Inspired by the original jQuery.tipsy by Jason Frame | 1289 | * Inspired by the original jQuery.tipsy by Jason Frame |
1253 | * ======================================================================== | 1290 | * ======================================================================== |
1254 | * Copyright 2011-2015 Twitter, Inc. | 1291 | * Copyright 2011-2019 Twitter, Inc. |
1255 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 1292 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
1256 | * ======================================================================== */ | 1293 | * ======================================================================== */ |
1257 | 1294 | ||
1258 | |||
1259 | +function ($) { | 1295 | +function ($) { |
1260 | 'use strict'; | 1296 | 'use strict'; |
1261 | 1297 | ||
1298 | var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn'] | ||
1299 | |||
1300 | var uriAttrs = [ | ||
1301 | 'background', | ||
1302 | 'cite', | ||
1303 | 'href', | ||
1304 | 'itemtype', | ||
1305 | 'longdesc', | ||
1306 | 'poster', | ||
1307 | 'src', | ||
1308 | 'xlink:href' | ||
1309 | ] | ||
1310 | |||
1311 | var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i | ||
1312 | |||
1313 | var DefaultWhitelist = { | ||
1314 | // Global attributes allowed on any supplied element below. | ||
1315 | '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN], | ||
1316 | a: ['target', 'href', 'title', 'rel'], | ||
1317 | area: [], | ||
1318 | b: [], | ||
1319 | br: [], | ||
1320 | col: [], | ||
1321 | code: [], | ||
1322 | div: [], | ||
1323 | em: [], | ||
1324 | hr: [], | ||
1325 | h1: [], | ||
1326 | h2: [], | ||
1327 | h3: [], | ||
1328 | h4: [], | ||
1329 | h5: [], | ||
1330 | h6: [], | ||
1331 | i: [], | ||
1332 | img: ['src', 'alt', 'title', 'width', 'height'], | ||
1333 | li: [], | ||
1334 | ol: [], | ||
1335 | p: [], | ||
1336 | pre: [], | ||
1337 | s: [], | ||
1338 | small: [], | ||
1339 | span: [], | ||
1340 | sub: [], | ||
1341 | sup: [], | ||
1342 | strong: [], | ||
1343 | u: [], | ||
1344 | ul: [] | ||
1345 | } | ||
1346 | |||
1347 | /** | ||
1348 | * A pattern that recognizes a commonly useful subset of URLs that are safe. | ||
1349 | * | ||
1350 | * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts | ||
1351 | */ | ||
1352 | var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi | ||
1353 | |||
1354 | /** | ||
1355 | * A pattern that matches safe data URLs. Only matches image, video and audio types. | ||
1356 | * | ||
1357 | * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts | ||
1358 | */ | ||
1359 | var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i | ||
1360 | |||
1361 | function allowedAttribute(attr, allowedAttributeList) { | ||
1362 | var attrName = attr.nodeName.toLowerCase() | ||
1363 | |||
1364 | if ($.inArray(attrName, allowedAttributeList) !== -1) { | ||
1365 | if ($.inArray(attrName, uriAttrs) !== -1) { | ||
1366 | return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN)) | ||
1367 | } | ||
1368 | |||
1369 | return true | ||
1370 | } | ||
1371 | |||
1372 | var regExp = $(allowedAttributeList).filter(function (index, value) { | ||
1373 | return value instanceof RegExp | ||
1374 | }) | ||
1375 | |||
1376 | // Check if a regular expression validates the attribute. | ||
1377 | for (var i = 0, l = regExp.length; i < l; i++) { | ||
1378 | if (attrName.match(regExp[i])) { | ||
1379 | return true | ||
1380 | } | ||
1381 | } | ||
1382 | |||
1383 | return false | ||
1384 | } | ||
1385 | |||
1386 | function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) { | ||
1387 | if (unsafeHtml.length === 0) { | ||
1388 | return unsafeHtml | ||
1389 | } | ||
1390 | |||
1391 | if (sanitizeFn && typeof sanitizeFn === 'function') { | ||
1392 | return sanitizeFn(unsafeHtml) | ||
1393 | } | ||
1394 | |||
1395 | // IE 8 and below don't support createHTMLDocument | ||
1396 | if (!document.implementation || !document.implementation.createHTMLDocument) { | ||
1397 | return unsafeHtml | ||
1398 | } | ||
1399 | |||
1400 | var createdDocument = document.implementation.createHTMLDocument('sanitization') | ||
1401 | createdDocument.body.innerHTML = unsafeHtml | ||
1402 | |||
1403 | var whitelistKeys = $.map(whiteList, function (el, i) { return i }) | ||
1404 | var elements = $(createdDocument.body).find('*') | ||
1405 | |||
1406 | for (var i = 0, len = elements.length; i < len; i++) { | ||
1407 | var el = elements[i] | ||
1408 | var elName = el.nodeName.toLowerCase() | ||
1409 | |||
1410 | if ($.inArray(elName, whitelistKeys) === -1) { | ||
1411 | el.parentNode.removeChild(el) | ||
1412 | |||
1413 | continue | ||
1414 | } | ||
1415 | |||
1416 | var attributeList = $.map(el.attributes, function (el) { return el }) | ||
1417 | var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || []) | ||
1418 | |||
1419 | for (var j = 0, len2 = attributeList.length; j < len2; j++) { | ||
1420 | if (!allowedAttribute(attributeList[j], whitelistedAttributes)) { | ||
1421 | el.removeAttribute(attributeList[j].nodeName) | ||
1422 | } | ||
1423 | } | ||
1424 | } | ||
1425 | |||
1426 | return createdDocument.body.innerHTML | ||
1427 | } | ||
1428 | |||
1262 | // TOOLTIP PUBLIC CLASS DEFINITION | 1429 | // TOOLTIP PUBLIC CLASS DEFINITION |
1263 | // =============================== | 1430 | // =============================== |
1264 | 1431 | ||
@@ -1274,7 +1441,7 @@ if (typeof jQuery === 'undefined') { | |||
1274 | this.init('tooltip', element, options) | 1441 | this.init('tooltip', element, options) |
1275 | } | 1442 | } |
1276 | 1443 | ||
1277 | Tooltip.VERSION = '3.3.6' | 1444 | Tooltip.VERSION = '3.4.1' |
1278 | 1445 | ||
1279 | Tooltip.TRANSITION_DURATION = 150 | 1446 | Tooltip.TRANSITION_DURATION = 150 |
1280 | 1447 | ||
@@ -1291,7 +1458,10 @@ if (typeof jQuery === 'undefined') { | |||
1291 | viewport: { | 1458 | viewport: { |
1292 | selector: 'body', | 1459 | selector: 'body', |
1293 | padding: 0 | 1460 | padding: 0 |
1294 | } | 1461 | }, |
1462 | sanitize : true, | ||
1463 | sanitizeFn : null, | ||
1464 | whiteList : DefaultWhitelist | ||
1295 | } | 1465 | } |
1296 | 1466 | ||
1297 | Tooltip.prototype.init = function (type, element, options) { | 1467 | Tooltip.prototype.init = function (type, element, options) { |
@@ -1299,7 +1469,7 @@ if (typeof jQuery === 'undefined') { | |||
1299 | this.type = type | 1469 | this.type = type |
1300 | this.$element = $(element) | 1470 | this.$element = $(element) |
1301 | this.options = this.getOptions(options) | 1471 | this.options = this.getOptions(options) |
1302 | this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) | 1472 | this.$viewport = this.options.viewport && $(document).find($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) |
1303 | this.inState = { click: false, hover: false, focus: false } | 1473 | this.inState = { click: false, hover: false, focus: false } |
1304 | 1474 | ||
1305 | if (this.$element[0] instanceof document.constructor && !this.options.selector) { | 1475 | if (this.$element[0] instanceof document.constructor && !this.options.selector) { |
@@ -1332,7 +1502,15 @@ if (typeof jQuery === 'undefined') { | |||
1332 | } | 1502 | } |
1333 | 1503 | ||
1334 | Tooltip.prototype.getOptions = function (options) { | 1504 | Tooltip.prototype.getOptions = function (options) { |
1335 | options = $.extend({}, this.getDefaults(), this.$element.data(), options) | 1505 | var dataAttributes = this.$element.data() |
1506 | |||
1507 | for (var dataAttr in dataAttributes) { | ||
1508 | if (dataAttributes.hasOwnProperty(dataAttr) && $.inArray(dataAttr, DISALLOWED_ATTRIBUTES) !== -1) { | ||
1509 | delete dataAttributes[dataAttr] | ||
1510 | } | ||
1511 | } | ||
1512 | |||
1513 | options = $.extend({}, this.getDefaults(), dataAttributes, options) | ||
1336 | 1514 | ||
1337 | if (options.delay && typeof options.delay == 'number') { | 1515 | if (options.delay && typeof options.delay == 'number') { |
1338 | options.delay = { | 1516 | options.delay = { |
@@ -1341,6 +1519,10 @@ if (typeof jQuery === 'undefined') { | |||
1341 | } | 1519 | } |
1342 | } | 1520 | } |
1343 | 1521 | ||
1522 | if (options.sanitize) { | ||
1523 | options.template = sanitizeHtml(options.template, options.whiteList, options.sanitizeFn) | ||
1524 | } | ||
1525 | |||
1344 | return options | 1526 | return options |
1345 | } | 1527 | } |
1346 | 1528 | ||
@@ -1452,7 +1634,7 @@ if (typeof jQuery === 'undefined') { | |||
1452 | .addClass(placement) | 1634 | .addClass(placement) |
1453 | .data('bs.' + this.type, this) | 1635 | .data('bs.' + this.type, this) |
1454 | 1636 | ||
1455 | this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element) | 1637 | this.options.container ? $tip.appendTo($(document).find(this.options.container)) : $tip.insertAfter(this.$element) |
1456 | this.$element.trigger('inserted.bs.' + this.type) | 1638 | this.$element.trigger('inserted.bs.' + this.type) |
1457 | 1639 | ||
1458 | var pos = this.getPosition() | 1640 | var pos = this.getPosition() |
@@ -1554,7 +1736,16 @@ if (typeof jQuery === 'undefined') { | |||
1554 | var $tip = this.tip() | 1736 | var $tip = this.tip() |
1555 | var title = this.getTitle() | 1737 | var title = this.getTitle() |
1556 | 1738 | ||
1557 | $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title) | 1739 | if (this.options.html) { |
1740 | if (this.options.sanitize) { | ||
1741 | title = sanitizeHtml(title, this.options.whiteList, this.options.sanitizeFn) | ||
1742 | } | ||
1743 | |||
1744 | $tip.find('.tooltip-inner').html(title) | ||
1745 | } else { | ||
1746 | $tip.find('.tooltip-inner').text(title) | ||
1747 | } | ||
1748 | |||
1558 | $tip.removeClass('fade in top bottom left right') | 1749 | $tip.removeClass('fade in top bottom left right') |
1559 | } | 1750 | } |
1560 | 1751 | ||
@@ -1565,9 +1756,11 @@ if (typeof jQuery === 'undefined') { | |||
1565 | 1756 | ||
1566 | function complete() { | 1757 | function complete() { |
1567 | if (that.hoverState != 'in') $tip.detach() | 1758 | if (that.hoverState != 'in') $tip.detach() |
1568 | that.$element | 1759 | if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary. |
1569 | .removeAttr('aria-describedby') | 1760 | that.$element |
1570 | .trigger('hidden.bs.' + that.type) | 1761 | .removeAttr('aria-describedby') |
1762 | .trigger('hidden.bs.' + that.type) | ||
1763 | } | ||
1571 | callback && callback() | 1764 | callback && callback() |
1572 | } | 1765 | } |
1573 | 1766 | ||
@@ -1610,7 +1803,10 @@ if (typeof jQuery === 'undefined') { | |||
1610 | // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093 | 1803 | // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093 |
1611 | elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top }) | 1804 | elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top }) |
1612 | } | 1805 | } |
1613 | var elOffset = isBody ? { top: 0, left: 0 } : $element.offset() | 1806 | var isSvg = window.SVGElement && el instanceof window.SVGElement |
1807 | // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3. | ||
1808 | // See https://github.com/twbs/bootstrap/issues/20280 | ||
1809 | var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset()) | ||
1614 | var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() } | 1810 | var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() } |
1615 | var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null | 1811 | var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null |
1616 | 1812 | ||
@@ -1726,9 +1922,13 @@ if (typeof jQuery === 'undefined') { | |||
1726 | that.$tip = null | 1922 | that.$tip = null |
1727 | that.$arrow = null | 1923 | that.$arrow = null |
1728 | that.$viewport = null | 1924 | that.$viewport = null |
1925 | that.$element = null | ||
1729 | }) | 1926 | }) |
1730 | } | 1927 | } |
1731 | 1928 | ||
1929 | Tooltip.prototype.sanitizeHtml = function (unsafeHtml) { | ||
1930 | return sanitizeHtml(unsafeHtml, this.options.whiteList, this.options.sanitizeFn) | ||
1931 | } | ||
1732 | 1932 | ||
1733 | // TOOLTIP PLUGIN DEFINITION | 1933 | // TOOLTIP PLUGIN DEFINITION |
1734 | // ========================= | 1934 | // ========================= |
@@ -1762,10 +1962,10 @@ if (typeof jQuery === 'undefined') { | |||
1762 | }(jQuery); | 1962 | }(jQuery); |
1763 | 1963 | ||
1764 | /* ======================================================================== | 1964 | /* ======================================================================== |
1765 | * Bootstrap: popover.js v3.3.6 | 1965 | * Bootstrap: popover.js v3.4.1 |
1766 | * http://getbootstrap.com/javascript/#popovers | 1966 | * https://getbootstrap.com/docs/3.4/javascript/#popovers |
1767 | * ======================================================================== | 1967 | * ======================================================================== |
1768 | * Copyright 2011-2015 Twitter, Inc. | 1968 | * Copyright 2011-2019 Twitter, Inc. |
1769 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 1969 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
1770 | * ======================================================================== */ | 1970 | * ======================================================================== */ |
1771 | 1971 | ||
@@ -1782,7 +1982,7 @@ if (typeof jQuery === 'undefined') { | |||
1782 | 1982 | ||
1783 | if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js') | 1983 | if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js') |
1784 | 1984 | ||
1785 | Popover.VERSION = '3.3.6' | 1985 | Popover.VERSION = '3.4.1' |
1786 | 1986 | ||
1787 | Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, { | 1987 | Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, { |
1788 | placement: 'right', | 1988 | placement: 'right', |
@@ -1808,10 +2008,25 @@ if (typeof jQuery === 'undefined') { | |||
1808 | var title = this.getTitle() | 2008 | var title = this.getTitle() |
1809 | var content = this.getContent() | 2009 | var content = this.getContent() |
1810 | 2010 | ||
1811 | $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title) | 2011 | if (this.options.html) { |
1812 | $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events | 2012 | var typeContent = typeof content |
1813 | this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text' | 2013 | |
1814 | ](content) | 2014 | if (this.options.sanitize) { |
2015 | title = this.sanitizeHtml(title) | ||
2016 | |||
2017 | if (typeContent === 'string') { | ||
2018 | content = this.sanitizeHtml(content) | ||
2019 | } | ||
2020 | } | ||
2021 | |||
2022 | $tip.find('.popover-title').html(title) | ||
2023 | $tip.find('.popover-content').children().detach().end()[ | ||
2024 | typeContent === 'string' ? 'html' : 'append' | ||
2025 | ](content) | ||
2026 | } else { | ||
2027 | $tip.find('.popover-title').text(title) | ||
2028 | $tip.find('.popover-content').children().detach().end().text(content) | ||
2029 | } | ||
1815 | 2030 | ||
1816 | $tip.removeClass('fade top bottom left right in') | 2031 | $tip.removeClass('fade top bottom left right in') |
1817 | 2032 | ||
@@ -1830,8 +2045,8 @@ if (typeof jQuery === 'undefined') { | |||
1830 | 2045 | ||
1831 | return $e.attr('data-content') | 2046 | return $e.attr('data-content') |
1832 | || (typeof o.content == 'function' ? | 2047 | || (typeof o.content == 'function' ? |
1833 | o.content.call($e[0]) : | 2048 | o.content.call($e[0]) : |
1834 | o.content) | 2049 | o.content) |
1835 | } | 2050 | } |
1836 | 2051 | ||
1837 | Popover.prototype.arrow = function () { | 2052 | Popover.prototype.arrow = function () { |
@@ -1871,10 +2086,10 @@ if (typeof jQuery === 'undefined') { | |||
1871 | }(jQuery); | 2086 | }(jQuery); |
1872 | 2087 | ||
1873 | /* ======================================================================== | 2088 | /* ======================================================================== |
1874 | * Bootstrap: scrollspy.js v3.3.6 | 2089 | * Bootstrap: scrollspy.js v3.4.1 |
1875 | * http://getbootstrap.com/javascript/#scrollspy | 2090 | * https://getbootstrap.com/docs/3.4/javascript/#scrollspy |
1876 | * ======================================================================== | 2091 | * ======================================================================== |
1877 | * Copyright 2011-2015 Twitter, Inc. | 2092 | * Copyright 2011-2019 Twitter, Inc. |
1878 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 2093 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
1879 | * ======================================================================== */ | 2094 | * ======================================================================== */ |
1880 | 2095 | ||
@@ -1900,7 +2115,7 @@ if (typeof jQuery === 'undefined') { | |||
1900 | this.process() | 2115 | this.process() |
1901 | } | 2116 | } |
1902 | 2117 | ||
1903 | ScrollSpy.VERSION = '3.3.6' | 2118 | ScrollSpy.VERSION = '3.4.1' |
1904 | 2119 | ||
1905 | ScrollSpy.DEFAULTS = { | 2120 | ScrollSpy.DEFAULTS = { |
1906 | offset: 10 | 2121 | offset: 10 |
@@ -2044,10 +2259,10 @@ if (typeof jQuery === 'undefined') { | |||
2044 | }(jQuery); | 2259 | }(jQuery); |
2045 | 2260 | ||
2046 | /* ======================================================================== | 2261 | /* ======================================================================== |
2047 | * Bootstrap: tab.js v3.3.6 | 2262 | * Bootstrap: tab.js v3.4.1 |
2048 | * http://getbootstrap.com/javascript/#tabs | 2263 | * https://getbootstrap.com/docs/3.4/javascript/#tabs |
2049 | * ======================================================================== | 2264 | * ======================================================================== |
2050 | * Copyright 2011-2015 Twitter, Inc. | 2265 | * Copyright 2011-2019 Twitter, Inc. |
2051 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 2266 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
2052 | * ======================================================================== */ | 2267 | * ======================================================================== */ |
2053 | 2268 | ||
@@ -2064,7 +2279,7 @@ if (typeof jQuery === 'undefined') { | |||
2064 | // jscs:enable requireDollarBeforejQueryAssignment | 2279 | // jscs:enable requireDollarBeforejQueryAssignment |
2065 | } | 2280 | } |
2066 | 2281 | ||
2067 | Tab.VERSION = '3.3.6' | 2282 | Tab.VERSION = '3.4.1' |
2068 | 2283 | ||
2069 | Tab.TRANSITION_DURATION = 150 | 2284 | Tab.TRANSITION_DURATION = 150 |
2070 | 2285 | ||
@@ -2093,7 +2308,7 @@ if (typeof jQuery === 'undefined') { | |||
2093 | 2308 | ||
2094 | if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return | 2309 | if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return |
2095 | 2310 | ||
2096 | var $target = $(selector) | 2311 | var $target = $(document).find(selector) |
2097 | 2312 | ||
2098 | this.activate($this.closest('li'), $ul) | 2313 | this.activate($this.closest('li'), $ul) |
2099 | this.activate($target, $target.parent(), function () { | 2314 | this.activate($target, $target.parent(), function () { |
@@ -2118,15 +2333,15 @@ if (typeof jQuery === 'undefined') { | |||
2118 | $active | 2333 | $active |
2119 | .removeClass('active') | 2334 | .removeClass('active') |
2120 | .find('> .dropdown-menu > .active') | 2335 | .find('> .dropdown-menu > .active') |
2121 | .removeClass('active') | 2336 | .removeClass('active') |
2122 | .end() | 2337 | .end() |
2123 | .find('[data-toggle="tab"]') | 2338 | .find('[data-toggle="tab"]') |
2124 | .attr('aria-expanded', false) | 2339 | .attr('aria-expanded', false) |
2125 | 2340 | ||
2126 | element | 2341 | element |
2127 | .addClass('active') | 2342 | .addClass('active') |
2128 | .find('[data-toggle="tab"]') | 2343 | .find('[data-toggle="tab"]') |
2129 | .attr('aria-expanded', true) | 2344 | .attr('aria-expanded', true) |
2130 | 2345 | ||
2131 | if (transition) { | 2346 | if (transition) { |
2132 | element[0].offsetWidth // reflow for transition | 2347 | element[0].offsetWidth // reflow for transition |
@@ -2138,10 +2353,10 @@ if (typeof jQuery === 'undefined') { | |||
2138 | if (element.parent('.dropdown-menu').length) { | 2353 | if (element.parent('.dropdown-menu').length) { |
2139 | element | 2354 | element |
2140 | .closest('li.dropdown') | 2355 | .closest('li.dropdown') |
2141 | .addClass('active') | 2356 | .addClass('active') |
2142 | .end() | 2357 | .end() |
2143 | .find('[data-toggle="tab"]') | 2358 | .find('[data-toggle="tab"]') |
2144 | .attr('aria-expanded', true) | 2359 | .attr('aria-expanded', true) |
2145 | } | 2360 | } |
2146 | 2361 | ||
2147 | callback && callback() | 2362 | callback && callback() |
@@ -2200,10 +2415,10 @@ if (typeof jQuery === 'undefined') { | |||
2200 | }(jQuery); | 2415 | }(jQuery); |
2201 | 2416 | ||
2202 | /* ======================================================================== | 2417 | /* ======================================================================== |
2203 | * Bootstrap: affix.js v3.3.6 | 2418 | * Bootstrap: affix.js v3.4.1 |
2204 | * http://getbootstrap.com/javascript/#affix | 2419 | * https://getbootstrap.com/docs/3.4/javascript/#affix |
2205 | * ======================================================================== | 2420 | * ======================================================================== |
2206 | * Copyright 2011-2015 Twitter, Inc. | 2421 | * Copyright 2011-2019 Twitter, Inc. |
2207 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) | 2422 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) |
2208 | * ======================================================================== */ | 2423 | * ======================================================================== */ |
2209 | 2424 | ||
@@ -2217,7 +2432,9 @@ if (typeof jQuery === 'undefined') { | |||
2217 | var Affix = function (element, options) { | 2432 | var Affix = function (element, options) { |
2218 | this.options = $.extend({}, Affix.DEFAULTS, options) | 2433 | this.options = $.extend({}, Affix.DEFAULTS, options) |
2219 | 2434 | ||
2220 | this.$target = $(this.options.target) | 2435 | var target = this.options.target === Affix.DEFAULTS.target ? $(this.options.target) : $(document).find(this.options.target) |
2436 | |||
2437 | this.$target = target | ||
2221 | .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this)) | 2438 | .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this)) |
2222 | .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this)) | 2439 | .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this)) |
2223 | 2440 | ||
@@ -2229,7 +2446,7 @@ if (typeof jQuery === 'undefined') { | |||
2229 | this.checkPosition() | 2446 | this.checkPosition() |
2230 | } | 2447 | } |
2231 | 2448 | ||
2232 | Affix.VERSION = '3.3.6' | 2449 | Affix.VERSION = '3.4.1' |
2233 | 2450 | ||
2234 | Affix.RESET = 'affix affix-top affix-bottom' | 2451 | Affix.RESET = 'affix affix-top affix-bottom' |
2235 | 2452 | ||
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js new file mode 100644 index 0000000000..eb0a8b410f --- /dev/null +++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js | |||
@@ -0,0 +1,6 @@ | |||
1 | /*! | ||
2 | * Bootstrap v3.4.1 (https://getbootstrap.com/) | ||
3 | * Copyright 2011-2019 Twitter, Inc. | ||
4 | * Licensed under the MIT license | ||
5 | */ | ||
6 | if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");!function(t){"use strict";var e=jQuery.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||3<e[0])throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(),function(n){"use strict";n.fn.emulateTransitionEnd=function(t){var e=!1,i=this;n(this).one("bsTransitionEnd",function(){e=!0});return setTimeout(function(){e||n(i).trigger(n.support.transition.end)},t),this},n(function(){n.support.transition=function o(){var t=document.createElement("bootstrap"),e={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var i in e)if(t.style[i]!==undefined)return{end:e[i]};return!1}(),n.support.transition&&(n.event.special.bsTransitionEnd={bindType:n.support.transition.end,delegateType:n.support.transition.end,handle:function(t){if(n(t.target).is(this))return t.handleObj.handler.apply(this,arguments)}})})}(jQuery),function(s){"use strict";var e='[data-dismiss="alert"]',a=function(t){s(t).on("click",e,this.close)};a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.close=function(t){var e=s(this),i=e.attr("data-target");i||(i=(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),i="#"===i?[]:i;var o=s(document).find(i);function n(){o.detach().trigger("closed.bs.alert").remove()}t&&t.preventDefault(),o.length||(o=e.closest(".alert")),o.trigger(t=s.Event("close.bs.alert")),t.isDefaultPrevented()||(o.removeClass("in"),s.support.transition&&o.hasClass("fade")?o.one("bsTransitionEnd",n).emulateTransitionEnd(a.TRANSITION_DURATION):n())};var t=s.fn.alert;s.fn.alert=function o(i){return this.each(function(){var t=s(this),e=t.data("bs.alert");e||t.data("bs.alert",e=new a(this)),"string"==typeof i&&e[i].call(t)})},s.fn.alert.Constructor=a,s.fn.alert.noConflict=function(){return s.fn.alert=t,this},s(document).on("click.bs.alert.data-api",e,a.prototype.close)}(jQuery),function(s){"use strict";var n=function(t,e){this.$element=s(t),this.options=s.extend({},n.DEFAULTS,e),this.isLoading=!1};function i(o){return this.each(function(){var t=s(this),e=t.data("bs.button"),i="object"==typeof o&&o;e||t.data("bs.button",e=new n(this,i)),"toggle"==o?e.toggle():o&&e.setState(o)})}n.VERSION="3.4.1",n.DEFAULTS={loadingText:"loading..."},n.prototype.setState=function(t){var e="disabled",i=this.$element,o=i.is("input")?"val":"html",n=i.data();t+="Text",null==n.resetText&&i.data("resetText",i[o]()),setTimeout(s.proxy(function(){i[o](null==n[t]?this.options[t]:n[t]),"loadingText"==t?(this.isLoading=!0,i.addClass(e).attr(e,e).prop(e,!0)):this.isLoading&&(this.isLoading=!1,i.removeClass(e).removeAttr(e).prop(e,!1))},this),0)},n.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var t=s.fn.button;s.fn.button=i,s.fn.button.Constructor=n,s.fn.button.noConflict=function(){return s.fn.button=t,this},s(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(t){var e=s(t.target).closest(".btn");i.call(e,"toggle"),s(t.target).is('input[type="radio"], input[type="checkbox"]')||(t.preventDefault(),e.is("input,button")?e.trigger("focus"):e.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(t){s(t.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(t.type))})}(jQuery),function(p){"use strict";var c=function(t,e){this.$element=p(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=e,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",p.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",p.proxy(this.pause,this)).on("mouseleave.bs.carousel",p.proxy(this.cycle,this))};function r(n){return this.each(function(){var t=p(this),e=t.data("bs.carousel"),i=p.extend({},c.DEFAULTS,t.data(),"object"==typeof n&&n),o="string"==typeof n?n:i.slide;e||t.data("bs.carousel",e=new c(this,i)),"number"==typeof n?e.to(n):o?e[o]():i.interval&&e.pause().cycle()})}c.VERSION="3.4.1",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},c.prototype.cycle=function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(p.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},c.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e);if(("prev"==t&&0===i||"next"==t&&i==this.$items.length-1)&&!this.options.wrap)return e;var o=(i+("prev"==t?-1:1))%this.$items.length;return this.$items.eq(o)},c.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(t>this.$items.length-1||t<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(i<t?"next":"prev",this.$items.eq(t))},c.prototype.pause=function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&p.support.transition&&(this.$element.trigger(p.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(t,e){var i=this.$element.find(".item.active"),o=e||this.getItemForDirection(t,i),n=this.interval,s="next"==t?"left":"right",a=this;if(o.hasClass("active"))return this.sliding=!1;var r=o[0],l=p.Event("slide.bs.carousel",{relatedTarget:r,direction:s});if(this.$element.trigger(l),!l.isDefaultPrevented()){if(this.sliding=!0,n&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var h=p(this.$indicators.children()[this.getItemIndex(o)]);h&&h.addClass("active")}var d=p.Event("slid.bs.carousel",{relatedTarget:r,direction:s});return p.support.transition&&this.$element.hasClass("slide")?(o.addClass(t),"object"==typeof o&&o.length&&o[0].offsetWidth,i.addClass(s),o.addClass(s),i.one("bsTransitionEnd",function(){o.removeClass([t,s].join(" ")).addClass("active"),i.removeClass(["active",s].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger(d)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(i.removeClass("active"),o.addClass("active"),this.sliding=!1,this.$element.trigger(d)),n&&this.cycle(),this}};var t=p.fn.carousel;p.fn.carousel=r,p.fn.carousel.Constructor=c,p.fn.carousel.noConflict=function(){return p.fn.carousel=t,this};var e=function(t){var e=p(this),i=e.attr("href");i&&(i=i.replace(/.*(?=#[^\s]+$)/,""));var o=e.attr("data-target")||i,n=p(document).find(o);if(n.hasClass("carousel")){var s=p.extend({},n.data(),e.data()),a=e.attr("data-slide-to");a&&(s.interval=!1),r.call(n,s),a&&n.data("bs.carousel").to(a),t.preventDefault()}};p(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),p(window).on("load",function(){p('[data-ride="carousel"]').each(function(){var t=p(this);r.call(t,t.data())})})}(jQuery),function(a){"use strict";var r=function(t,e){this.$element=a(t),this.options=a.extend({},r.DEFAULTS,e),this.$trigger=a('[data-toggle="collapse"][href="#'+t.id+'"],[data-toggle="collapse"][data-target="#'+t.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};function n(t){var e,i=t.attr("data-target")||(e=t.attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"");return a(document).find(i)}function l(o){return this.each(function(){var t=a(this),e=t.data("bs.collapse"),i=a.extend({},r.DEFAULTS,t.data(),"object"==typeof o&&o);!e&&i.toggle&&/show|hide/.test(o)&&(i.toggle=!1),e||t.data("bs.collapse",e=new r(this,i)),"string"==typeof o&&e[o]()})}r.VERSION="3.4.1",r.TRANSITION_DURATION=350,r.DEFAULTS={toggle:!0},r.prototype.dimension=function(){return this.$element.hasClass("width")?"width":"height"},r.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var t,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(t=e.data("bs.collapse"))&&t.transitioning)){var i=a.Event("show.bs.collapse");if(this.$element.trigger(i),!i.isDefaultPrevented()){e&&e.length&&(l.call(e,"hide"),t||e.data("bs.collapse",null));var o=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[o](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var n=function(){this.$element.removeClass("collapsing").addClass("collapse in")[o](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return n.call(this);var s=a.camelCase(["scroll",o].join("-"));this.$element.one("bsTransitionEnd",a.proxy(n,this)).emulateTransitionEnd(r.TRANSITION_DURATION)[o](this.$element[0][s])}}}},r.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var t=a.Event("hide.bs.collapse");if(this.$element.trigger(t),!t.isDefaultPrevented()){var e=this.dimension();this.$element[e](this.$element[e]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var i=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};if(!a.support.transition)return i.call(this);this.$element[e](0).one("bsTransitionEnd",a.proxy(i,this)).emulateTransitionEnd(r.TRANSITION_DURATION)}}},r.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},r.prototype.getParent=function(){return a(document).find(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(t,e){var i=a(e);this.addAriaAndCollapsedClass(n(i),i)},this)).end()},r.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var t=a.fn.collapse;a.fn.collapse=l,a.fn.collapse.Constructor=r,a.fn.collapse.noConflict=function(){return a.fn.collapse=t,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(t){var e=a(this);e.attr("data-target")||t.preventDefault();var i=n(e),o=i.data("bs.collapse")?"toggle":e.data();l.call(i,o)})}(jQuery),function(a){"use strict";var r='[data-toggle="dropdown"]',o=function(t){a(t).on("click.bs.dropdown",this.toggle)};function l(t){var e=t.attr("data-target");e||(e=(e=t.attr("href"))&&/#[A-Za-z]/.test(e)&&e.replace(/.*(?=#[^\s]*$)/,""));var i="#"!==e?a(document).find(e):null;return i&&i.length?i:t.parent()}function s(o){o&&3===o.which||(a(".dropdown-backdrop").remove(),a(r).each(function(){var t=a(this),e=l(t),i={relatedTarget:this};e.hasClass("open")&&(o&&"click"==o.type&&/input|textarea/i.test(o.target.tagName)&&a.contains(e[0],o.target)||(e.trigger(o=a.Event("hide.bs.dropdown",i)),o.isDefaultPrevented()||(t.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",i)))))}))}o.VERSION="3.4.1",o.prototype.toggle=function(t){var e=a(this);if(!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(s(),!o){"ontouchstart"in document.documentElement&&!i.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",s);var n={relatedTarget:this};if(i.trigger(t=a.Event("show.bs.dropdown",n)),t.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),i.toggleClass("open").trigger(a.Event("shown.bs.dropdown",n))}return!1}},o.prototype.keydown=function(t){if(/(38|40|27|32)/.test(t.which)&&!/input|textarea/i.test(t.target.tagName)){var e=a(this);if(t.preventDefault(),t.stopPropagation(),!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(!o&&27!=t.which||o&&27==t.which)return 27==t.which&&i.find(r).trigger("focus"),e.trigger("click");var n=i.find(".dropdown-menu li:not(.disabled):visible a");if(n.length){var s=n.index(t.target);38==t.which&&0<s&&s--,40==t.which&&s<n.length-1&&s++,~s||(s=0),n.eq(s).trigger("focus")}}}};var t=a.fn.dropdown;a.fn.dropdown=function e(i){return this.each(function(){var t=a(this),e=t.data("bs.dropdown");e||t.data("bs.dropdown",e=new o(this)),"string"==typeof i&&e[i].call(t)})},a.fn.dropdown.Constructor=o,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=t,this},a(document).on("click.bs.dropdown.data-api",s).on("click.bs.dropdown.data-api",".dropdown form",function(t){t.stopPropagation()}).on("click.bs.dropdown.data-api",r,o.prototype.toggle).on("keydown.bs.dropdown.data-api",r,o.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",o.prototype.keydown)}(jQuery),function(a){"use strict";var s=function(t,e){this.options=e,this.$body=a(document.body),this.$element=a(t),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.fixedContent=".navbar-fixed-top, .navbar-fixed-bottom",this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};function r(o,n){return this.each(function(){var t=a(this),e=t.data("bs.modal"),i=a.extend({},s.DEFAULTS,t.data(),"object"==typeof o&&o);e||t.data("bs.modal",e=new s(this,i)),"string"==typeof o?e[o](n):i.show&&e.show(n)})}s.VERSION="3.4.1",s.TRANSITION_DURATION=300,s.BACKDROP_TRANSITION_DURATION=150,s.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},s.prototype.toggle=function(t){return this.isShown?this.hide():this.show(t)},s.prototype.show=function(i){var o=this,t=a.Event("show.bs.modal",{relatedTarget:i});this.$element.trigger(t),this.isShown||t.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){o.$element.one("mouseup.dismiss.bs.modal",function(t){a(t.target).is(o.$element)&&(o.ignoreBackdropClick=!0)})}),this.backdrop(function(){var t=a.support.transition&&o.$element.hasClass("fade");o.$element.parent().length||o.$element.appendTo(o.$body),o.$element.show().scrollTop(0),o.adjustDialog(),t&&o.$element[0].offsetWidth,o.$element.addClass("in"),o.enforceFocus();var e=a.Event("shown.bs.modal",{relatedTarget:i});t?o.$dialog.one("bsTransitionEnd",function(){o.$element.trigger("focus").trigger(e)}).emulateTransitionEnd(s.TRANSITION_DURATION):o.$element.trigger("focus").trigger(e)}))},s.prototype.hide=function(t){t&&t.preventDefault(),t=a.Event("hide.bs.modal"),this.$element.trigger(t),this.isShown&&!t.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(s.TRANSITION_DURATION):this.hideModal())},s.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(t){document===t.target||this.$element[0]===t.target||this.$element.has(t.target).length||this.$element.trigger("focus")},this))},s.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(t){27==t.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},s.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},s.prototype.hideModal=function(){var t=this;this.$element.hide(),this.backdrop(function(){t.$body.removeClass("modal-open"),t.resetAdjustments(),t.resetScrollbar(),t.$element.trigger("hidden.bs.modal")})},s.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},s.prototype.backdrop=function(t){var e=this,i=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var o=a.support.transition&&i;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+i).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(t){this.ignoreBackdropClick?this.ignoreBackdropClick=!1:t.target===t.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide())},this)),o&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!t)return;o?this.$backdrop.one("bsTransitionEnd",t).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):t()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var n=function(){e.removeBackdrop(),t&&t()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",n).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):n()}else t&&t()},s.prototype.handleUpdate=function(){this.adjustDialog()},s.prototype.adjustDialog=function(){var t=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},s.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},s.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidth<t,this.scrollbarWidth=this.measureScrollbar()},s.prototype.setScrollbar=function(){var t=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"";var n=this.scrollbarWidth;this.bodyIsOverflowing&&(this.$body.css("padding-right",t+n),a(this.fixedContent).each(function(t,e){var i=e.style.paddingRight,o=a(e).css("padding-right");a(e).data("padding-right",i).css("padding-right",parseFloat(o)+n+"px")}))},s.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad),a(this.fixedContent).each(function(t,e){var i=a(e).data("padding-right");a(e).removeData("padding-right"),e.style.paddingRight=i||""})},s.prototype.measureScrollbar=function(){var t=document.createElement("div");t.className="modal-scrollbar-measure",this.$body.append(t);var e=t.offsetWidth-t.clientWidth;return this.$body[0].removeChild(t),e};var t=a.fn.modal;a.fn.modal=r,a.fn.modal.Constructor=s,a.fn.modal.noConflict=function(){return a.fn.modal=t,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(t){var e=a(this),i=e.attr("href"),o=e.attr("data-target")||i&&i.replace(/.*(?=#[^\s]+$)/,""),n=a(document).find(o),s=n.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(i)&&i},n.data(),e.data());e.is("a")&&t.preventDefault(),n.one("show.bs.modal",function(t){t.isDefaultPrevented()||n.one("hidden.bs.modal",function(){e.is(":visible")&&e.trigger("focus")})}),r.call(n,s,this)})}(jQuery),function(g){"use strict";var o=["sanitize","whiteList","sanitizeFn"],a=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],t={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},r=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,l=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function u(t,e){var i=t.nodeName.toLowerCase();if(-1!==g.inArray(i,e))return-1===g.inArray(i,a)||Boolean(t.nodeValue.match(r)||t.nodeValue.match(l));for(var o=g(e).filter(function(t,e){return e instanceof RegExp}),n=0,s=o.length;n<s;n++)if(i.match(o[n]))return!0;return!1}function n(t,e,i){if(0===t.length)return t;if(i&&"function"==typeof i)return i(t);if(!document.implementation||!document.implementation.createHTMLDocument)return t;var o=document.implementation.createHTMLDocument("sanitization");o.body.innerHTML=t;for(var n=g.map(e,function(t,e){return e}),s=g(o.body).find("*"),a=0,r=s.length;a<r;a++){var l=s[a],h=l.nodeName.toLowerCase();if(-1!==g.inArray(h,n))for(var d=g.map(l.attributes,function(t){return t}),p=[].concat(e["*"]||[],e[h]||[]),c=0,f=d.length;c<f;c++)u(d[c],p)||l.removeAttribute(d[c].nodeName);else l.parentNode.removeChild(l)}return o.body.innerHTML}var m=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};m.VERSION="3.4.1",m.TRANSITION_DURATION=150,m.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0},sanitize:!0,sanitizeFn:null,whiteList:t},m.prototype.init=function(t,e,i){if(this.enabled=!0,this.type=t,this.$element=g(e),this.options=this.getOptions(i),this.$viewport=this.options.viewport&&g(document).find(g.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var o=this.options.trigger.split(" "),n=o.length;n--;){var s=o[n];if("click"==s)this.$element.on("click."+this.type,this.options.selector,g.proxy(this.toggle,this));else if("manual"!=s){var a="hover"==s?"mouseenter":"focusin",r="hover"==s?"mouseleave":"focusout";this.$element.on(a+"."+this.type,this.options.selector,g.proxy(this.enter,this)),this.$element.on(r+"."+this.type,this.options.selector,g.proxy(this.leave,this))}}this.options.selector?this._options=g.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},m.prototype.getDefaults=function(){return m.DEFAULTS},m.prototype.getOptions=function(t){var e=this.$element.data();for(var i in e)e.hasOwnProperty(i)&&-1!==g.inArray(i,o)&&delete e[i];return(t=g.extend({},this.getDefaults(),e,t)).delay&&"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),t.sanitize&&(t.template=n(t.template,t.whiteList,t.sanitizeFn)),t},m.prototype.getDelegateOptions=function(){var i={},o=this.getDefaults();return this._options&&g.each(this._options,function(t,e){o[t]!=e&&(i[t]=e)}),i},m.prototype.enter=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusin"==t.type?"focus":"hover"]=!0),e.tip().hasClass("in")||"in"==e.hoverState)e.hoverState="in";else{if(clearTimeout(e.timeout),e.hoverState="in",!e.options.delay||!e.options.delay.show)return e.show();e.timeout=setTimeout(function(){"in"==e.hoverState&&e.show()},e.options.delay.show)}},m.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},m.prototype.leave=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusout"==t.type?"focus":"hover"]=!1),!e.isInStateTrue()){if(clearTimeout(e.timeout),e.hoverState="out",!e.options.delay||!e.options.delay.hide)return e.hide();e.timeout=setTimeout(function(){"out"==e.hoverState&&e.hide()},e.options.delay.hide)}},m.prototype.show=function(){var t=g.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(t);var e=g.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(t.isDefaultPrevented()||!e)return;var i=this,o=this.tip(),n=this.getUID(this.type);this.setContent(),o.attr("id",n),this.$element.attr("aria-describedby",n),this.options.animation&&o.addClass("fade");var s="function"==typeof this.options.placement?this.options.placement.call(this,o[0],this.$element[0]):this.options.placement,a=/\s?auto?\s?/i,r=a.test(s);r&&(s=s.replace(a,"")||"top"),o.detach().css({top:0,left:0,display:"block"}).addClass(s).data("bs."+this.type,this),this.options.container?o.appendTo(g(document).find(this.options.container)):o.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var l=this.getPosition(),h=o[0].offsetWidth,d=o[0].offsetHeight;if(r){var p=s,c=this.getPosition(this.$viewport);s="bottom"==s&&l.bottom+d>c.bottom?"top":"top"==s&&l.top-d<c.top?"bottom":"right"==s&&l.right+h>c.width?"left":"left"==s&&l.left-h<c.left?"right":s,o.removeClass(p).addClass(s)}var f=this.getCalculatedOffset(s,l,h,d);this.applyPlacement(f,s);var u=function(){var t=i.hoverState;i.$element.trigger("shown.bs."+i.type),i.hoverState=null,"out"==t&&i.leave(i)};g.support.transition&&this.$tip.hasClass("fade")?o.one("bsTransitionEnd",u).emulateTransitionEnd(m.TRANSITION_DURATION):u()}},m.prototype.applyPlacement=function(t,e){var i=this.tip(),o=i[0].offsetWidth,n=i[0].offsetHeight,s=parseInt(i.css("margin-top"),10),a=parseInt(i.css("margin-left"),10);isNaN(s)&&(s=0),isNaN(a)&&(a=0),t.top+=s,t.left+=a,g.offset.setOffset(i[0],g.extend({using:function(t){i.css({top:Math.round(t.top),left:Math.round(t.left)})}},t),0),i.addClass("in");var r=i[0].offsetWidth,l=i[0].offsetHeight;"top"==e&&l!=n&&(t.top=t.top+n-l);var h=this.getViewportAdjustedDelta(e,t,r,l);h.left?t.left+=h.left:t.top+=h.top;var d=/top|bottom/.test(e),p=d?2*h.left-o+r:2*h.top-n+l,c=d?"offsetWidth":"offsetHeight";i.offset(t),this.replaceArrow(p,i[0][c],d)},m.prototype.replaceArrow=function(t,e,i){this.arrow().css(i?"left":"top",50*(1-t/e)+"%").css(i?"top":"left","")},m.prototype.setContent=function(){var t=this.tip(),e=this.getTitle();this.options.html?(this.options.sanitize&&(e=n(e,this.options.whiteList,this.options.sanitizeFn)),t.find(".tooltip-inner").html(e)):t.find(".tooltip-inner").text(e),t.removeClass("fade in top bottom left right")},m.prototype.hide=function(t){var e=this,i=g(this.$tip),o=g.Event("hide.bs."+this.type);function n(){"in"!=e.hoverState&&i.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),t&&t()}if(this.$element.trigger(o),!o.isDefaultPrevented())return i.removeClass("in"),g.support.transition&&i.hasClass("fade")?i.one("bsTransitionEnd",n).emulateTransitionEnd(m.TRANSITION_DURATION):n(),this.hoverState=null,this},m.prototype.fixTitle=function(){var t=this.$element;(t.attr("title")||"string"!=typeof t.attr("data-original-title"))&&t.attr("data-original-title",t.attr("title")||"").attr("title","")},m.prototype.hasContent=function(){return this.getTitle()},m.prototype.getPosition=function(t){var e=(t=t||this.$element)[0],i="BODY"==e.tagName,o=e.getBoundingClientRect();null==o.width&&(o=g.extend({},o,{width:o.right-o.left,height:o.bottom-o.top}));var n=window.SVGElement&&e instanceof window.SVGElement,s=i?{top:0,left:0}:n?null:t.offset(),a={scroll:i?document.documentElement.scrollTop||document.body.scrollTop:t.scrollTop()},r=i?{width:g(window).width(),height:g(window).height()}:null;return g.extend({},o,a,r,s)},m.prototype.getCalculatedOffset=function(t,e,i,o){return"bottom"==t?{top:e.top+e.height,left:e.left+e.width/2-i/2}:"top"==t?{top:e.top-o,left:e.left+e.width/2-i/2}:"left"==t?{top:e.top+e.height/2-o/2,left:e.left-i}:{top:e.top+e.height/2-o/2,left:e.left+e.width}},m.prototype.getViewportAdjustedDelta=function(t,e,i,o){var n={top:0,left:0};if(!this.$viewport)return n;var s=this.options.viewport&&this.options.viewport.padding||0,a=this.getPosition(this.$viewport);if(/right|left/.test(t)){var r=e.top-s-a.scroll,l=e.top+s-a.scroll+o;r<a.top?n.top=a.top-r:l>a.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;h<a.left?n.left=a.left-h:d>a.right&&(n.left=a.left+a.width-d)}return n},m.prototype.getTitle=function(){var t=this.$element,e=this.options;return t.attr("data-original-title")||("function"==typeof e.title?e.title.call(t[0]):e.title)},m.prototype.getUID=function(t){for(;t+=~~(1e6*Math.random()),document.getElementById(t););return t},m.prototype.tip=function(){if(!this.$tip&&(this.$tip=g(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},m.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},m.prototype.enable=function(){this.enabled=!0},m.prototype.disable=function(){this.enabled=!1},m.prototype.toggleEnabled=function(){this.enabled=!this.enabled},m.prototype.toggle=function(t){var e=this;t&&((e=g(t.currentTarget).data("bs."+this.type))||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e))),t?(e.inState.click=!e.inState.click,e.isInStateTrue()?e.enter(e):e.leave(e)):e.tip().hasClass("in")?e.leave(e):e.enter(e)},m.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})},m.prototype.sanitizeHtml=function(t){return n(t,this.options.whiteList,this.options.sanitizeFn)};var e=g.fn.tooltip;g.fn.tooltip=function i(o){return this.each(function(){var t=g(this),e=t.data("bs.tooltip"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.tooltip",e=new m(this,i)),"string"==typeof o&&e[o]())})},g.fn.tooltip.Constructor=m,g.fn.tooltip.noConflict=function(){return g.fn.tooltip=e,this}}(jQuery),function(n){"use strict";var s=function(t,e){this.init("popover",t,e)};if(!n.fn.tooltip)throw new Error("Popover requires tooltip.js");s.VERSION="3.4.1",s.DEFAULTS=n.extend({},n.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),((s.prototype=n.extend({},n.fn.tooltip.Constructor.prototype)).constructor=s).prototype.getDefaults=function(){return s.DEFAULTS},s.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();if(this.options.html){var o=typeof i;this.options.sanitize&&(e=this.sanitizeHtml(e),"string"===o&&(i=this.sanitizeHtml(i))),t.find(".popover-title").html(e),t.find(".popover-content").children().detach().end()["string"===o?"html":"append"](i)}else t.find(".popover-title").text(e),t.find(".popover-content").children().detach().end().text(i);t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},s.prototype.hasContent=function(){return this.getTitle()||this.getContent()},s.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},s.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var t=n.fn.popover;n.fn.popover=function e(o){return this.each(function(){var t=n(this),e=t.data("bs.popover"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.popover",e=new s(this,i)),"string"==typeof o&&e[o]())})},n.fn.popover.Constructor=s,n.fn.popover.noConflict=function(){return n.fn.popover=t,this}}(jQuery),function(s){"use strict";function n(t,e){this.$body=s(document.body),this.$scrollElement=s(t).is(document.body)?s(window):s(t),this.options=s.extend({},n.DEFAULTS,e),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",s.proxy(this.process,this)),this.refresh(),this.process()}function e(o){return this.each(function(){var t=s(this),e=t.data("bs.scrollspy"),i="object"==typeof o&&o;e||t.data("bs.scrollspy",e=new n(this,i)),"string"==typeof o&&e[o]()})}n.VERSION="3.4.1",n.DEFAULTS={offset:10},n.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},n.prototype.refresh=function(){var t=this,o="offset",n=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),s.isWindow(this.$scrollElement[0])||(o="position",n=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var t=s(this),e=t.data("target")||t.attr("href"),i=/^#./.test(e)&&s(e);return i&&i.length&&i.is(":visible")&&[[i[o]().top+n,e]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},n.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),o<=e)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e<n[0])return this.activeTarget=null,this.clear();for(t=n.length;t--;)a!=s[t]&&e>=n[t]&&(n[t+1]===undefined||e<n[t+1])&&this.activate(s[t])},n.prototype.activate=function(t){this.activeTarget=t,this.clear();var e=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',i=s(e).parents("li").addClass("active");i.parent(".dropdown-menu").length&&(i=i.closest("li.dropdown").addClass("active")),i.trigger("activate.bs.scrollspy")},n.prototype.clear=function(){s(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var t=s.fn.scrollspy;s.fn.scrollspy=e,s.fn.scrollspy.Constructor=n,s.fn.scrollspy.noConflict=function(){return s.fn.scrollspy=t,this},s(window).on("load.bs.scrollspy.data-api",function(){s('[data-spy="scroll"]').each(function(){var t=s(this);e.call(t,t.data())})})}(jQuery),function(r){"use strict";var a=function(t){this.element=r(t)};function e(i){return this.each(function(){var t=r(this),e=t.data("bs.tab");e||t.data("bs.tab",e=new a(this)),"string"==typeof i&&e[i]()})}a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.show=function(){var t=this.element,e=t.closest("ul:not(.dropdown-menu)"),i=t.data("target");if(i||(i=(i=t.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),!t.parent("li").hasClass("active")){var o=e.find(".active:last a"),n=r.Event("hide.bs.tab",{relatedTarget:t[0]}),s=r.Event("show.bs.tab",{relatedTarget:o[0]});if(o.trigger(n),t.trigger(s),!s.isDefaultPrevented()&&!n.isDefaultPrevented()){var a=r(document).find(i);this.activate(t.closest("li"),e),this.activate(a,a.parent(),function(){o.trigger({type:"hidden.bs.tab",relatedTarget:t[0]}),t.trigger({type:"shown.bs.tab",relatedTarget:o[0]})})}}},a.prototype.activate=function(t,e,i){var o=e.find("> .active"),n=i&&r.support.transition&&(o.length&&o.hasClass("fade")||!!e.find("> .fade").length);function s(){o.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),t.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),n?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu").length&&t.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),i&&i()}o.length&&n?o.one("bsTransitionEnd",s).emulateTransitionEnd(a.TRANSITION_DURATION):s(),o.removeClass("in")};var t=r.fn.tab;r.fn.tab=e,r.fn.tab.Constructor=a,r.fn.tab.noConflict=function(){return r.fn.tab=t,this};var i=function(t){t.preventDefault(),e.call(r(this),"show")};r(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',i).on("click.bs.tab.data-api",'[data-toggle="pill"]',i)}(jQuery),function(l){"use strict";var h=function(t,e){this.options=l.extend({},h.DEFAULTS,e);var i=this.options.target===h.DEFAULTS.target?l(this.options.target):l(document).find(this.options.target);this.$target=i.on("scroll.bs.affix.data-api",l.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",l.proxy(this.checkPositionWithEventLoop,this)),this.$element=l(t),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};function i(o){return this.each(function(){var t=l(this),e=t.data("bs.affix"),i="object"==typeof o&&o;e||t.data("bs.affix",e=new h(this,i)),"string"==typeof o&&e[o]()})}h.VERSION="3.4.1",h.RESET="affix affix-top affix-bottom",h.DEFAULTS={offset:0,target:window},h.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return n<i&&"top";if("bottom"==this.affixed)return null!=i?!(n+this.unpin<=s.top)&&"bottom":!(n+a<=t-o)&&"bottom";var r=null==this.affixed,l=r?n:s.top;return null!=i&&n<=i?"top":null!=o&&t-o<=l+(r?a:e)&&"bottom"},h.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(h.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},h.prototype.checkPositionWithEventLoop=function(){setTimeout(l.proxy(this.checkPosition,this),1)},h.prototype.checkPosition=function(){if(this.$element.is(":visible")){var t=this.$element.height(),e=this.options.offset,i=e.top,o=e.bottom,n=Math.max(l(document).height(),l(document.body).height());"object"!=typeof e&&(o=i=e),"function"==typeof i&&(i=e.top(this.$element)),"function"==typeof o&&(o=e.bottom(this.$element));var s=this.getState(n,t,i,o);if(this.affixed!=s){null!=this.unpin&&this.$element.css("top","");var a="affix"+(s?"-"+s:""),r=l.Event(a+".bs.affix");if(this.$element.trigger(r),r.isDefaultPrevented())return;this.affixed=s,this.unpin="bottom"==s?this.getPinnedOffset():null,this.$element.removeClass(h.RESET).addClass(a).trigger(a.replace("affix","affixed")+".bs.affix")}"bottom"==s&&this.$element.offset({top:n-t-o})}};var t=l.fn.affix;l.fn.affix=i,l.fn.affix.Constructor=h,l.fn.affix.noConflict=function(){return l.fn.affix=t,this},l(window).on("load",function(){l('[data-spy="affix"]').each(function(){var t=l(this),e=t.data();e.offset=e.offset||{},null!=e.offsetBottom&&(e.offset.bottom=e.offsetBottom),null!=e.offsetTop&&(e.offset.top=e.offsetTop),i.call(t,e)})})}(jQuery); \ No newline at end of file | ||
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js deleted file mode 100644 index c4a924160d..0000000000 --- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js +++ /dev/null | |||
@@ -1,7 +0,0 @@ | |||
1 | /*! | ||
2 | * Bootstrap v3.3.6 (http://getbootstrap.com) | ||
3 | * Copyright 2011-2016 Twitter, Inc. | ||
4 | * Licensed under the MIT license | ||
5 | */ | ||
6 | if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){return a(b.target).is(this)?b.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.6",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a(f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.6",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target);d.hasClass("btn")||(d=d.closest(".btn")),b.call(d,"toggle"),a(c.target).is('input[type="radio"]')||a(c.target).is('input[type="checkbox"]')||c.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));return a>this.$items.length-1||0>a?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){return this.sliding?void 0:this.slide("next")},c.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.6",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.6",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),c.isInStateTrue()?void 0:(clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide())},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);return this.$element.trigger(g),g.isDefaultPrevented()?void 0:(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this)},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=d?{top:0,left:0}:b.offset(),g={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},h=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,g,h,f)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.6",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.6",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,this.clear();var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")), | ||
7 | d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.6",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return c>e?"top":!1;if("bottom"==this.affixed)return null!=c?e+this.unpin<=f.top?!1:"bottom":a-d>=e+g?!1:"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&c>=e?"top":null!=d&&i+j>=a-d?"bottom":!1},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); \ No newline at end of file | ||
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js new file mode 100644 index 0000000000..7f37b5d991 --- /dev/null +++ b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js | |||
@@ -0,0 +1,2 @@ | |||
1 | /*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */ | ||
2 | !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}function fe(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}ce.fn=ce.prototype={jquery:t,constructor:ce,length:0,toArray:function(){return ae.call(this)},get:function(e){return null==e?ae.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=ce.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return ce.each(this,e)},map:function(n){return this.pushStack(ce.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(ae.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},even:function(){return this.pushStack(ce.grep(this,function(e,t){return(t+1)%2}))},odd:function(){return this.pushStack(ce.grep(this,function(e,t){return t%2}))},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:s,sort:oe.sort,splice:oe.splice},ce.extend=ce.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||v(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(ce.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||ce.isPlainObject(n)?n:{},i=!1,a[t]=ce.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},ce.extend({expando:"jQuery"+(t+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==i.call(e))&&(!(t=r(e))||"function"==typeof(n=ue.call(t,"constructor")&&t.constructor)&&o.call(n)===a)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t,n){m(e,{nonce:t&&t.nonce},n)},each:function(e,t){var n,r=0;if(c(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},text:function(e){var t,n="",r=0,i=e.nodeType;if(!i)while(t=e[r++])n+=ce.text(t);return 1===i||11===i?e.textContent:9===i?e.documentElement.textContent:3===i||4===i?e.nodeValue:n},makeArray:function(e,t){var n=t||[];return null!=e&&(c(Object(e))?ce.merge(n,"string"==typeof e?[e]:e):s.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:se.call(t,e,n)},isXMLDoc:function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!l.test(t||n&&n.nodeName||"HTML")},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(c(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g(a)},guid:1,support:le}),"function"==typeof Symbol&&(ce.fn[Symbol.iterator]=oe[Symbol.iterator]),ce.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var pe=oe.pop,de=oe.sort,he=oe.splice,ge="[\\x20\\t\\r\\n\\f]",ve=new RegExp("^"+ge+"+|((?:^|[^\\\\])(?:\\\\.)*)"+ge+"+$","g");ce.contains=function(e,t){var n=t&&t.parentNode;return e===n||!(!n||1!==n.nodeType||!(e.contains?e.contains(n):e.compareDocumentPosition&&16&e.compareDocumentPosition(n)))};var f=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\x80-\uFFFF\w-]/g;function p(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e}ce.escapeSelector=function(e){return(e+"").replace(f,p)};var ye=C,me=s;!function(){var e,b,w,o,a,T,r,C,d,i,k=me,S=ce.expando,E=0,n=0,s=W(),c=W(),u=W(),h=W(),l=function(e,t){return e===t&&(a=!0),0},f="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",t="(?:\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+",p="\\["+ge+"*("+t+")(?:"+ge+"*([*^$|!~]?=)"+ge+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+t+"))|)"+ge+"*\\]",g=":("+t+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+p+")*)|.*)\\)|)",v=new RegExp(ge+"+","g"),y=new RegExp("^"+ge+"*,"+ge+"*"),m=new RegExp("^"+ge+"*([>+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="<a id='"+S+"' href='' disabled='disabled'></a><select id='"+S+"-\r\\' disabled='disabled'><option selected=''></option></select>",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0<I(t,T,null,[e]).length},I.contains=function(e,t){return(e.ownerDocument||e)!=T&&V(e),ce.contains(e,t)},I.attr=function(e,t){(e.ownerDocument||e)!=T&&V(e);var n=b.attrHandle[t.toLowerCase()],r=n&&ue.call(b.attrHandle,t.toLowerCase())?n(e,t,!C):void 0;return void 0!==r?r:e.getAttribute(t)},I.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},ce.uniqueSort=function(e){var t,n=[],r=0,i=0;if(a=!le.sortStable,o=!le.sortStable&&ae.call(e,0),de.call(e,l),a){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)he.call(e,n[r],1)}return o=null,e},ce.fn.uniqueSort=function(){return this.pushStack(ce.uniqueSort(ae.apply(this)))},(b=ce.expr={cacheLength:50,createPseudo:F,match:D,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(v," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(d,e,t,h,g){var v="nth"!==d.slice(0,3),y="last"!==d.slice(-4),m="of-type"===e;return 1===h&&0===g?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u=v!==y?"nextSibling":"previousSibling",l=e.parentNode,c=m&&e.nodeName.toLowerCase(),f=!n&&!m,p=!1;if(l){if(v){while(u){o=e;while(o=o[u])if(m?fe(o,c):1===o.nodeType)return!1;s=u="only"===d&&!s&&"nextSibling"}return!0}if(s=[y?l.firstChild:l.lastChild],y&&f){p=(a=(r=(i=l[S]||(l[S]={}))[d]||[])[0]===E&&r[1])&&r[2],o=a&&l.childNodes[a];while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if(1===o.nodeType&&++p&&o===e){i[d]=[E,a,p];break}}else if(f&&(p=a=(r=(i=e[S]||(e[S]={}))[d]||[])[0]===E&&r[1]),!1===p)while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if((m?fe(o,c):1===o.nodeType)&&++p&&(f&&((i=o[S]||(o[S]={}))[d]=[E,p]),o===e))break;return(p-=g)===h||p%h==0&&0<=p/h}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||I.error("unsupported pseudo: "+e);return a[S]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?F(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=se.call(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:F(function(e){var r=[],i=[],s=ne(e.replace(ve,"$1"));return s[S]?F(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:F(function(t){return function(e){return 0<I(t,e).length}}),contains:F(function(t){return t=t.replace(O,P),function(e){return-1<(e.textContent||ce.text(e)).indexOf(t)}}),lang:F(function(n){return A.test(n||"")||I.error("unsupported lang: "+n),n=n.replace(O,P).toLowerCase(),function(e){var t;do{if(t=C?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=ie.location&&ie.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===r},focus:function(e){return e===function(){try{return T.activeElement}catch(e){}}()&&T.hasFocus()&&!!(e.type||e.href||~e.tabIndex)},enabled:z(!1),disabled:z(!0),checked:function(e){return fe(e,"input")&&!!e.checked||fe(e,"option")&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return q.test(e.nodeName)},input:function(e){return N.test(e.nodeName)},button:function(e){return fe(e,"input")&&"button"===e.type||fe(e,"button")},text:function(e){var t;return fe(e,"input")&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:X(function(){return[0]}),last:X(function(e,t){return[t-1]}),eq:X(function(e,t,n){return[n<0?n+t:n]}),even:X(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:X(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:X(function(e,t,n){var r;for(r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:X(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=B(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=_(e);function G(){}function Y(e,t){var n,r,i,o,a,s,u,l=c[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=y.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=m.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(ve," ")}),a=a.slice(n.length)),b.filter)!(r=D[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?I.error(e):c(e,s).slice(0)}function Q(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function J(a,e,t){var s=e.dir,u=e.next,l=u||s,c=t&&"parentNode"===l,f=n++;return e.first?function(e,t,n){while(e=e[s])if(1===e.nodeType||c)return a(e,t,n);return!1}:function(e,t,n){var r,i,o=[E,f];if(n){while(e=e[s])if((1===e.nodeType||c)&&a(e,t,n))return!0}else while(e=e[s])if(1===e.nodeType||c)if(i=e[S]||(e[S]={}),u&&fe(e,u))e=e[s]||e;else{if((r=i[l])&&r[0]===E&&r[1]===f)return o[2]=r[2];if((i[l]=o)[2]=a(e,t,n))return!0}return!1}}function K(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Z(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function ee(d,h,g,v,y,e){return v&&!v[S]&&(v=ee(v)),y&&!y[S]&&(y=ee(y,e)),F(function(e,t,n,r){var i,o,a,s,u=[],l=[],c=t.length,f=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)I(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),p=!d||!e&&h?f:Z(f,u,d,n,r);if(g?g(p,s=y||(e?d:c||v)?[]:t,n,r):s=p,v){i=Z(s,l),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(s[l[o]]=!(p[l[o]]=a))}if(e){if(y||d){if(y){i=[],o=s.length;while(o--)(a=s[o])&&i.push(p[o]=a);y(null,s=[],i,r)}o=s.length;while(o--)(a=s[o])&&-1<(i=y?se.call(e,a):u[o])&&(e[i]=!(t[i]=a))}}else s=Z(s===t?s.splice(c,s.length):s),y?y(null,t,s,r):k.apply(t,s)})}function te(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=J(function(e){return e===i},a,!0),l=J(function(e){return-1<se.call(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!=w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[J(K(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[S]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return ee(1<s&&K(c),1<s&&Q(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(ve,"$1"),t,s<n&&te(e.slice(s,n)),n<r&&te(e=e.slice(n)),n<r&&Q(e))}c.push(t)}return K(c)}function ne(e,t){var n,v,y,m,x,r,i=[],o=[],a=u[e+" "];if(!a){t||(t=Y(e)),n=t.length;while(n--)(a=te(t[n]))[S]?i.push(a):o.push(a);(a=u(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=E+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t==T||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument==T||(V(o),n=!C);while(s=v[a++])if(s(o,t||T,n)){k.call(r,o);break}i&&(E=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=pe.call(r));f=Z(f)}k.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&ce.uniqueSort(r)}return i&&(E=h,w=p),c},m?F(r):r))).selector=e}return a}function re(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&Y(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&C&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(O,P),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=D.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(O,P),H.test(o[0].type)&&U(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&Q(o)))return k.apply(n,r),n;break}}}return(l||ne(e,c))(r,t,!C,n,!t||H.test(e)&&U(t.parentNode)||t),n}G.prototype=b.filters=b.pseudos,b.setFilters=new G,le.sortStable=S.split("").sort(l).join("")===S,V(),le.sortDetached=$(function(e){return 1&e.compareDocumentPosition(T.createElement("fieldset"))}),ce.find=I,ce.expr[":"]=ce.expr.pseudos,ce.unique=ce.uniqueSort,I.compile=ne,I.select=re,I.setDocument=V,I.tokenize=Y,I.escape=ce.escapeSelector,I.getText=ce.text,I.isXML=ce.isXMLDoc,I.selectors=ce.expr,I.support=ce.support,I.uniqueSort=ce.uniqueSort}();var d=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&ce(e).is(n))break;r.push(e)}return r},h=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},b=ce.expr.match.needsContext,w=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1<se.call(n,e)!==r}):ce.filter(n,e,r)}ce.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?ce.find.matchesSelector(r,e)?[r]:[]:ce.find.matches(e,ce.grep(t,function(e){return 1===e.nodeType}))},ce.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(ce(e).filter(function(){for(t=0;t<r;t++)if(ce.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)ce.find(e,i[t],n);return 1<r?ce.uniqueSort(n):n},filter:function(e){return this.pushStack(T(this,e||[],!1))},not:function(e){return this.pushStack(T(this,e||[],!0))},is:function(e){return!!T(this,"string"==typeof e&&b.test(e)?ce(e):e||[],!1).length}});var k,S=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(ce.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&ce(e);if(!b.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&ce.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?ce.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?se.call(ce(e),this[0]):se.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(ce.uniqueSort(ce.merge(this.get(),ce(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),ce.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return d(e,"parentNode")},parentsUntil:function(e,t,n){return d(e,"parentNode",n)},next:function(e){return A(e,"nextSibling")},prev:function(e){return A(e,"previousSibling")},nextAll:function(e){return d(e,"nextSibling")},prevAll:function(e){return d(e,"previousSibling")},nextUntil:function(e,t,n){return d(e,"nextSibling",n)},prevUntil:function(e,t,n){return d(e,"previousSibling",n)},siblings:function(e){return h((e.parentNode||{}).firstChild,e)},children:function(e){return h(e.firstChild)},contents:function(e){return null!=e.contentDocument&&r(e.contentDocument)?e.contentDocument:(fe(e,"template")&&(e=e.content||e),ce.merge([],e.childNodes))}},function(r,i){ce.fn[r]=function(e,t){var n=ce.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=ce.filter(t,n)),1<this.length&&(j[r]||ce.uniqueSort(n),E.test(r)&&n.reverse()),this.pushStack(n)}});var D=/[^\x20\t\r\n\f]+/g;function N(e){return e}function q(e){throw e}function L(e,t,n,r){var i;try{e&&v(i=e.promise)?i.call(e).done(t).fail(n):e&&v(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}ce.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},ce.each(e.match(D)||[],function(e,t){n[t]=!0}),n):ce.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){ce.each(e,function(e,t){v(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==x(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return ce.each(arguments,function(e,t){var n;while(-1<(n=ce.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<ce.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},ce.extend({Deferred:function(e){var o=[["notify","progress",ce.Callbacks("memory"),ce.Callbacks("memory"),2],["resolve","done",ce.Callbacks("once memory"),ce.Callbacks("once memory"),0,"resolved"],["reject","fail",ce.Callbacks("once memory"),ce.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return ce.Deferred(function(r){ce.each(o,function(e,t){var n=v(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&v(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,v(t)?s?t.call(e,l(u,o,N,s),l(u,o,q,s)):(u++,t.call(e,l(u,o,N,s),l(u,o,q,s),l(u,o,N,o.notifyWith))):(a!==N&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){ce.Deferred.exceptionHook&&ce.Deferred.exceptionHook(e,t.error),u<=i+1&&(a!==q&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(ce.Deferred.getErrorHook?t.error=ce.Deferred.getErrorHook():ce.Deferred.getStackHook&&(t.error=ce.Deferred.getStackHook()),ie.setTimeout(t))}}return ce.Deferred(function(e){o[0][3].add(l(0,e,v(r)?r:N,e.notifyWith)),o[1][3].add(l(0,e,v(t)?t:N)),o[2][3].add(l(0,e,v(n)?n:q))}).promise()},promise:function(e){return null!=e?ce.extend(e,a):a}},s={};return ce.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=ae.call(arguments),o=ce.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?ae.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(L(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||v(i[t]&&i[t].then)))return o.then();while(t--)L(i[t],a(t),o.reject);return o.promise()}});var H=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;ce.Deferred.exceptionHook=function(e,t){ie.console&&ie.console.warn&&e&&H.test(e.name)&&ie.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},ce.readyException=function(e){ie.setTimeout(function(){throw e})};var O=ce.Deferred();function P(){C.removeEventListener("DOMContentLoaded",P),ie.removeEventListener("load",P),ce.ready()}ce.fn.ready=function(e){return O.then(e)["catch"](function(e){ce.readyException(e)}),this},ce.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--ce.readyWait:ce.isReady)||(ce.isReady=!0)!==e&&0<--ce.readyWait||O.resolveWith(C,[ce])}}),ce.ready.then=O.then,"complete"===C.readyState||"loading"!==C.readyState&&!C.documentElement.doScroll?ie.setTimeout(ce.ready):(C.addEventListener("DOMContentLoaded",P),ie.addEventListener("load",P));var M=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n))for(s in i=!0,n)M(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,v(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(ce(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},R=/^-ms-/,I=/-([a-z])/g;function W(e,t){return t.toUpperCase()}function F(e){return e.replace(R,"ms-").replace(I,W)}var $=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function B(){this.expando=ce.expando+B.uid++}B.uid=1,B.prototype={cache:function(e){var t=e[this.expando];return t||(t={},$(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[F(t)]=n;else for(r in t)i[F(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][F(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(F):(t=F(t))in r?[t]:t.match(D)||[]).length;while(n--)delete r[t[n]]}(void 0===t||ce.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!ce.isEmptyObject(t)}};var _=new B,z=new B,X=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,U=/[A-Z]/g;function V(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(U,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:X.test(i)?JSON.parse(i):i)}catch(e){}z.set(e,t,n)}else n=void 0;return n}ce.extend({hasData:function(e){return z.hasData(e)||_.hasData(e)},data:function(e,t,n){return z.access(e,t,n)},removeData:function(e,t){z.remove(e,t)},_data:function(e,t,n){return _.access(e,t,n)},_removeData:function(e,t){_.remove(e,t)}}),ce.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=z.get(o),1===o.nodeType&&!_.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=F(r.slice(5)),V(o,r,i[r]));_.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){z.set(this,n)}):M(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=z.get(o,n))?t:void 0!==(t=V(o,n))?t:void 0;this.each(function(){z.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){z.remove(this,e)})}}),ce.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=_.get(e,t),n&&(!r||Array.isArray(n)?r=_.access(e,t,ce.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=ce.queue(e,t),r=n.length,i=n.shift(),o=ce._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){ce.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return _.get(e,n)||_.access(e,n,{empty:ce.Callbacks("once memory").add(function(){_.remove(e,[t+"queue",n])})})}}),ce.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?ce.queue(this[0],t):void 0===n?this:this.each(function(){var e=ce.queue(this,t,n);ce._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&ce.dequeue(this,t)})},dequeue:function(e){return this.each(function(){ce.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=ce.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=_.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var G=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,Y=new RegExp("^(?:([+-])=|)("+G+")([a-z%]*)$","i"),Q=["Top","Right","Bottom","Left"],J=C.documentElement,K=function(e){return ce.contains(e.ownerDocument,e)},Z={composed:!0};J.getRootNode&&(K=function(e){return ce.contains(e.ownerDocument,e)||e.getRootNode(Z)===e.ownerDocument});var ee=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&K(e)&&"none"===ce.css(e,"display")};function te(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return ce.css(e,t,"")},u=s(),l=n&&n[3]||(ce.cssNumber[t]?"":"px"),c=e.nodeType&&(ce.cssNumber[t]||"px"!==l&&+u)&&Y.exec(ce.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)ce.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,ce.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ne={};function re(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=_.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&ee(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ne[s])||(o=a.body.appendChild(a.createElement(s)),u=ce.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ne[s]=u)))):"none"!==n&&(l[c]="none",_.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}ce.fn.extend({show:function(){return re(this,!0)},hide:function(){return re(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){ee(this)?ce(this).show():ce(this).hide()})}});var xe,be,we=/^(?:checkbox|radio)$/i,Te=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="<textarea>x</textarea>",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="<option></option>",le.option=!!xe.lastChild;var ke={thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n<r;n++)_.set(e[n],"globalEval",!t||_.get(t[n],"globalEval"))}ke.tbody=ke.tfoot=ke.colgroup=ke.caption=ke.thead,ke.th=ke.td,le.option||(ke.optgroup=ke.option=[1,"<select multiple='multiple'>","</select>"]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===x(o))ce.merge(p,o.nodeType?[o]:o);else if(je.test(o)){a=a||f.appendChild(t.createElement("div")),s=(Te.exec(o)||["",""])[1].toLowerCase(),u=ke[s]||ke._default,a.innerHTML=u[1]+ce.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;ce.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<ce.inArray(o,r))i&&i.push(o);else if(l=K(o),a=Se(f.appendChild(o),"script"),l&&Ee(a),n){c=0;while(o=a[c++])Ce.test(o.type||"")&&n.push(o)}return f}var De=/^([^.]*)(?:\.(.+)|)/;function Ne(){return!0}function qe(){return!1}function Le(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Le(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=qe;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return ce().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=ce.guid++)),e.each(function(){ce.event.add(this,t,i,r,n)})}function He(e,r,t){t?(_.set(e,r,!1),ce.event.add(e,r,{namespace:!1,handler:function(e){var t,n=_.get(this,r);if(1&e.isTrigger&&this[r]){if(n)(ce.event.special[r]||{}).delegateType&&e.stopPropagation();else if(n=ae.call(arguments),_.set(this,r,n),this[r](),t=_.get(this,r),_.set(this,r,!1),n!==t)return e.stopImmediatePropagation(),e.preventDefault(),t}else n&&(_.set(this,r,ce.event.trigger(n[0],n.slice(1),this)),e.stopPropagation(),e.isImmediatePropagationStopped=Ne)}})):void 0===_.get(e,r)&&ce.event.add(e,r,Ne)}ce.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.get(t);if($(t)){n.handler&&(n=(o=n).handler,i=o.selector),i&&ce.find.matchesSelector(J,i),n.guid||(n.guid=ce.guid++),(u=v.events)||(u=v.events=Object.create(null)),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof ce&&ce.event.triggered!==e.type?ce.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(D)||[""]).length;while(l--)d=g=(s=De.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=ce.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=ce.event.special[d]||{},c=ce.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&ce.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),ce.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.hasData(e)&&_.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(D)||[""]).length;while(l--)if(d=g=(s=De.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=ce.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||ce.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)ce.event.remove(e,d+t[l],n,r,!0);ce.isEmptyObject(u)&&_.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=new Array(arguments.length),u=ce.event.fix(e),l=(_.get(this,"events")||Object.create(null))[u.type]||[],c=ce.event.special[u.type]||{};for(s[0]=u,t=1;t<arguments.length;t++)s[t]=arguments[t];if(u.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,u)){a=ce.event.handlers.call(this,u,l),t=0;while((i=a[t++])&&!u.isPropagationStopped()){u.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!u.isImmediatePropagationStopped())u.rnamespace&&!1!==o.namespace&&!u.rnamespace.test(o.namespace)||(u.handleObj=o,u.data=o.data,void 0!==(r=((ce.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,s))&&!1===(u.result=r)&&(u.preventDefault(),u.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,u),u.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<ce(i,this).index(l):ce.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(ce.Event.prototype,t,{enumerable:!0,configurable:!0,get:v(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[ce.expando]?e:new ce.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click",!0),!1},trigger:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click"),!0},_default:function(e){var t=e.target;return we.test(t.type)&&t.click&&fe(t,"input")&&_.get(t,"click")||fe(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},ce.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},ce.Event=function(e,t){if(!(this instanceof ce.Event))return new ce.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?Ne:qe,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&ce.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[ce.expando]=!0},ce.Event.prototype={constructor:ce.Event,isDefaultPrevented:qe,isPropagationStopped:qe,isImmediatePropagationStopped:qe,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=Ne,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=Ne,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=Ne,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},ce.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:!0},ce.event.addProp),ce.each({focus:"focusin",blur:"focusout"},function(r,i){function o(e){if(C.documentMode){var t=_.get(this,"handle"),n=ce.event.fix(e);n.type="focusin"===e.type?"focus":"blur",n.isSimulated=!0,t(e),n.target===n.currentTarget&&t(n)}else ce.event.simulate(i,e.target,ce.event.fix(e))}ce.event.special[r]={setup:function(){var e;if(He(this,r,!0),!C.documentMode)return!1;(e=_.get(this,i))||this.addEventListener(i,o),_.set(this,i,(e||0)+1)},trigger:function(){return He(this,r),!0},teardown:function(){var e;if(!C.documentMode)return!1;(e=_.get(this,i)-1)?_.set(this,i,e):(this.removeEventListener(i,o),_.remove(this,i))},_default:function(e){return _.get(e.target,r)},delegateType:i},ce.event.special[i]={setup:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i);n||(C.documentMode?this.addEventListener(i,o):e.addEventListener(r,o,!0)),_.set(t,i,(n||0)+1)},teardown:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i)-1;n?_.set(t,i,n):(C.documentMode?this.removeEventListener(i,o):e.removeEventListener(r,o,!0),_.remove(t,i))}}}),ce.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){ce.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||ce.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),ce.fn.extend({on:function(e,t,n,r){return Le(this,e,t,n,r)},one:function(e,t,n,r){return Le(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,ce(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=qe),this.each(function(){ce.event.remove(this,e,n,t)})}});var Oe=/<script|<style|<link/i,Pe=/checked\s*(?:[^=]|=\s*.checked.)/i,Me=/^\s*<!\[CDATA\[|\]\]>\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n<r;n++)ce.event.add(t,i,s[i][n]);z.hasData(e)&&(o=z.access(e),a=ce.extend({},o),z.set(t,a))}}function $e(n,r,i,o){r=g(r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=v(d);if(h||1<f&&"string"==typeof d&&!le.checkClone&&Pe.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),$e(t,r,i,o)});if(f&&(t=(e=Ae(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=ce.map(Se(e,"script"),Ie)).length;c<f;c++)u=e,c!==p&&(u=ce.clone(u,!0,!0),s&&ce.merge(a,Se(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,ce.map(a,We),c=0;c<s;c++)u=a[c],Ce.test(u.type||"")&&!_.access(u,"globalEval")&&ce.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?ce._evalUrl&&!u.noModule&&ce._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")},l):m(u.textContent.replace(Me,""),u,l))}return n}function Be(e,t,n){for(var r,i=t?ce.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||ce.cleanData(Se(r)),r.parentNode&&(n&&K(r)&&Ee(Se(r,"script")),r.parentNode.removeChild(r));return e}ce.extend({htmlPrefilter:function(e){return e},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=K(e);if(!(le.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||ce.isXMLDoc(e)))for(a=Se(c),r=0,i=(o=Se(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&we.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||Se(e),a=a||Se(c),r=0,i=o.length;r<i;r++)Fe(o[r],a[r]);else Fe(e,c);return 0<(a=Se(c,"script")).length&&Ee(a,!f&&Se(e,"script")),c},cleanData:function(e){for(var t,n,r,i=ce.event.special,o=0;void 0!==(n=e[o]);o++)if($(n)){if(t=n[_.expando]){if(t.events)for(r in t.events)i[r]?ce.event.remove(n,r):ce.removeEvent(n,r,t.handle);n[_.expando]=void 0}n[z.expando]&&(n[z.expando]=void 0)}}}),ce.fn.extend({detach:function(e){return Be(this,e,!0)},remove:function(e){return Be(this,e)},text:function(e){return M(this,function(e){return void 0===e?ce.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return $e(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Re(this,e).appendChild(e)})},prepend:function(){return $e(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Re(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(ce.cleanData(Se(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return ce.clone(this,e,t)})},html:function(e){return M(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Oe.test(e)&&!ke[(Te.exec(e)||["",""])[1].toLowerCase()]){e=ce.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(ce.cleanData(Se(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return $e(this,arguments,function(e){var t=this.parentNode;ce.inArray(this,n)<0&&(ce.cleanData(Se(this)),t&&t.replaceChild(e,this))},n)}}),ce.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){ce.fn[e]=function(e){for(var t,n=[],r=ce(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),ce(r[o])[a](t),s.apply(n,t.get());return this.pushStack(n)}});var _e=new RegExp("^("+G+")(?!px)[a-z%]+$","i"),ze=/^--/,Xe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=ie),t.getComputedStyle(e)},Ue=function(e,t,n){var r,i,o={};for(i in t)o[i]=e.style[i],e.style[i]=t[i];for(i in r=n.call(e),t)e.style[i]=o[i];return r},Ve=new RegExp(Q.join("|"),"i");function Ge(e,t,n){var r,i,o,a,s=ze.test(t),u=e.style;return(n=n||Xe(e))&&(a=n.getPropertyValue(t)||n[t],s&&a&&(a=a.replace(ve,"$1")||void 0),""!==a||K(e)||(a=ce.style(e,t)),!le.pixelBoxStyles()&&_e.test(a)&&Ve.test(t)&&(r=u.width,i=u.minWidth,o=u.maxWidth,u.minWidth=u.maxWidth=u.width=a,a=n.width,u.width=r,u.minWidth=i,u.maxWidth=o)),void 0!==a?a+"":a}function Ye(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(l){u.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",l.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",J.appendChild(u).appendChild(l);var e=ie.getComputedStyle(l);n="1%"!==e.top,s=12===t(e.marginLeft),l.style.right="60%",o=36===t(e.right),r=36===t(e.width),l.style.position="absolute",i=12===t(l.offsetWidth/3),J.removeChild(u),l=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s,u=C.createElement("div"),l=C.createElement("div");l.style&&(l.style.backgroundClip="content-box",l.cloneNode(!0).style.backgroundClip="",le.clearCloneStyle="content-box"===l.style.backgroundClip,ce.extend(le,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),s},scrollboxSize:function(){return e(),i},reliableTrDimensions:function(){var e,t,n,r;return null==a&&(e=C.createElement("table"),t=C.createElement("tr"),n=C.createElement("div"),e.style.cssText="position:absolute;left:-11111px;border-collapse:separate",t.style.cssText="box-sizing:content-box;border:1px solid",t.style.height="1px",n.style.height="9px",n.style.display="block",J.appendChild(e).appendChild(t).appendChild(n),r=ie.getComputedStyle(t),a=parseInt(r.height,10)+parseInt(r.borderTopWidth,10)+parseInt(r.borderBottomWidth,10)===t.offsetHeight,J.removeChild(e)),a}}))}();var Qe=["Webkit","Moz","ms"],Je=C.createElement("div").style,Ke={};function Ze(e){var t=ce.cssProps[e]||Ke[e];return t||(e in Je?e:Ke[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Qe.length;while(n--)if((e=Qe[n]+t)in Je)return e}(e)||e)}var et=/^(none|table(?!-c[ea]).+)/,tt={position:"absolute",visibility:"hidden",display:"block"},nt={letterSpacing:"0",fontWeight:"400"};function rt(e,t,n){var r=Y.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function it(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0,l=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(l+=ce.css(e,n+Q[a],!0,i)),r?("content"===n&&(u-=ce.css(e,"padding"+Q[a],!0,i)),"margin"!==n&&(u-=ce.css(e,"border"+Q[a]+"Width",!0,i))):(u+=ce.css(e,"padding"+Q[a],!0,i),"padding"!==n?u+=ce.css(e,"border"+Q[a]+"Width",!0,i):s+=ce.css(e,"border"+Q[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u+l}function ot(e,t,n){var r=Xe(e),i=(!le.boxSizingReliable()||n)&&"border-box"===ce.css(e,"boxSizing",!1,r),o=i,a=Ge(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if(_e.test(a)){if(!n)return a;a="auto"}return(!le.boxSizingReliable()&&i||!le.reliableTrDimensions()&&fe(e,"tr")||"auto"===a||!parseFloat(a)&&"inline"===ce.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===ce.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+it(e,t,n||(i?"border":"content"),o,r,a)+"px"}function at(e,t,n,r,i){return new at.prototype.init(e,t,n,r,i)}ce.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Ge(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,aspectRatio:!0,borderImageSlice:!0,columnCount:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,scale:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeMiterlimit:!0,strokeOpacity:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=F(t),u=ze.test(t),l=e.style;if(u||(t=Ze(s)),a=ce.cssHooks[t]||ce.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=Y.exec(n))&&i[1]&&(n=te(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(ce.cssNumber[s]?"":"px")),le.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=F(t);return ze.test(t)||(t=Ze(s)),(a=ce.cssHooks[t]||ce.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Ge(e,t,r)),"normal"===i&&t in nt&&(i=nt[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),ce.each(["height","width"],function(e,u){ce.cssHooks[u]={get:function(e,t,n){if(t)return!et.test(ce.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?ot(e,u,n):Ue(e,tt,function(){return ot(e,u,n)})},set:function(e,t,n){var r,i=Xe(e),o=!le.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===ce.css(e,"boxSizing",!1,i),s=n?it(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-it(e,u,"border",!1,i)-.5)),s&&(r=Y.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=ce.css(e,u)),rt(0,t,s)}}}),ce.cssHooks.marginLeft=Ye(le.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Ge(e,"marginLeft"))||e.getBoundingClientRect().left-Ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),ce.each({margin:"",padding:"",border:"Width"},function(i,o){ce.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+Q[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(ce.cssHooks[i+o].set=rt)}),ce.fn.extend({css:function(e,t){return M(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Xe(e),i=t.length;a<i;a++)o[t[a]]=ce.css(e,t[a],!1,r);return o}return void 0!==n?ce.style(e,t,n):ce.css(e,t)},e,t,1<arguments.length)}}),((ce.Tween=at).prototype={constructor:at,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||ce.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(ce.cssNumber[n]?"":"px")},cur:function(){var e=at.propHooks[this.prop];return e&&e.get?e.get(this):at.propHooks._default.get(this)},run:function(e){var t,n=at.propHooks[this.prop];return this.options.duration?this.pos=t=ce.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):at.propHooks._default.set(this),this}}).init.prototype=at.prototype,(at.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=ce.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){ce.fx.step[e.prop]?ce.fx.step[e.prop](e):1!==e.elem.nodeType||!ce.cssHooks[e.prop]&&null==e.elem.style[Ze(e.prop)]?e.elem[e.prop]=e.now:ce.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=at.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},ce.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},ce.fx=at.prototype.init,ce.fx.step={};var st,ut,lt,ct,ft=/^(?:toggle|show|hide)$/,pt=/queueHooks$/;function dt(){ut&&(!1===C.hidden&&ie.requestAnimationFrame?ie.requestAnimationFrame(dt):ie.setTimeout(dt,ce.fx.interval),ce.fx.tick())}function ht(){return ie.setTimeout(function(){st=void 0}),st=Date.now()}function gt(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=Q[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function vt(e,t,n){for(var r,i=(yt.tweeners[t]||[]).concat(yt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function yt(o,e,t){var n,a,r=0,i=yt.prefilters.length,s=ce.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=st||ht(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:ce.extend({},e),opts:ce.extend(!0,{specialEasing:{},easing:ce.easing._default},t),originalProperties:e,originalOptions:t,startTime:st||ht(),duration:t.duration,tweens:[],createTween:function(e,t){var n=ce.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=F(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=ce.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=yt.prefilters[r].call(l,o,c,l.opts))return v(n.stop)&&(ce._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return ce.map(c,vt,l),v(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),ce.fx.timer(ce.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}ce.Animation=ce.extend(yt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return te(n.elem,e,Y.exec(t),n),n}]},tweener:function(e,t){v(e)?(t=e,e=["*"]):e=e.match(D);for(var n,r=0,i=e.length;r<i;r++)n=e[r],yt.tweeners[n]=yt.tweeners[n]||[],yt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&ee(e),v=_.get(e,"fxshow");for(r in n.queue||(null==(a=ce._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,ce.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],ft.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||ce.style(e,r)}if((u=!ce.isEmptyObject(t))||!ce.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=_.get(e,"display")),"none"===(c=ce.css(e,"display"))&&(l?c=l:(re([e],!0),l=e.style.display||l,c=ce.css(e,"display"),re([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===ce.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=_.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&re([e],!0),p.done(function(){for(r in g||re([e]),_.remove(e,"fxshow"),d)ce.style(e,r,d[r])})),u=vt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?yt.prefilters.unshift(e):yt.prefilters.push(e)}}),ce.speed=function(e,t,n){var r=e&&"object"==typeof e?ce.extend({},e):{complete:n||!n&&t||v(e)&&e,duration:e,easing:n&&t||t&&!v(t)&&t};return ce.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in ce.fx.speeds?r.duration=ce.fx.speeds[r.duration]:r.duration=ce.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){v(r.old)&&r.old.call(this),r.queue&&ce.dequeue(this,r.queue)},r},ce.fn.extend({fadeTo:function(e,t,n,r){return this.filter(ee).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=ce.isEmptyObject(t),o=ce.speed(e,n,r),a=function(){var e=yt(this,ce.extend({},t),o);(i||_.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=ce.timers,r=_.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&pt.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||ce.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=_.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=ce.timers,o=n?n.length:0;for(t.finish=!0,ce.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),ce.each(["toggle","show","hide"],function(e,r){var i=ce.fn[r];ce.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(gt(r,!0),e,t,n)}}),ce.each({slideDown:gt("show"),slideUp:gt("hide"),slideToggle:gt("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){ce.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),ce.timers=[],ce.fx.tick=function(){var e,t=0,n=ce.timers;for(st=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||ce.fx.stop(),st=void 0},ce.fx.timer=function(e){ce.timers.push(e),ce.fx.start()},ce.fx.interval=13,ce.fx.start=function(){ut||(ut=!0,dt())},ce.fx.stop=function(){ut=null},ce.fx.speeds={slow:600,fast:200,_default:400},ce.fn.delay=function(r,e){return r=ce.fx&&ce.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=ie.setTimeout(e,r);t.stop=function(){ie.clearTimeout(n)}})},lt=C.createElement("input"),ct=C.createElement("select").appendChild(C.createElement("option")),lt.type="checkbox",le.checkOn=""!==lt.value,le.optSelected=ct.selected,(lt=C.createElement("input")).value="t",lt.type="radio",le.radioValue="t"===lt.value;var mt,xt=ce.expr.attrHandle;ce.fn.extend({attr:function(e,t){return M(this,ce.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){ce.removeAttr(this,e)})}}),ce.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?ce.prop(e,t,n):(1===o&&ce.isXMLDoc(e)||(i=ce.attrHooks[t.toLowerCase()]||(ce.expr.match.bool.test(t)?mt:void 0)),void 0!==n?null===n?void ce.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=ce.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!le.radioValue&&"radio"===t&&fe(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(D);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),mt={set:function(e,t,n){return!1===t?ce.removeAttr(e,n):e.setAttribute(n,n),n}},ce.each(ce.expr.match.bool.source.match(/\w+/g),function(e,t){var a=xt[t]||ce.find.attr;xt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=xt[o],xt[o]=r,r=null!=a(e,t,n)?o:null,xt[o]=i),r}});var bt=/^(?:input|select|textarea|button)$/i,wt=/^(?:a|area)$/i;function Tt(e){return(e.match(D)||[]).join(" ")}function Ct(e){return e.getAttribute&&e.getAttribute("class")||""}function kt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(D)||[]}ce.fn.extend({prop:function(e,t){return M(this,ce.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[ce.propFix[e]||e]})}}),ce.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&ce.isXMLDoc(e)||(t=ce.propFix[t]||t,i=ce.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=ce.find.attr(e,"tabindex");return t?parseInt(t,10):bt.test(e.nodeName)||wt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),le.optSelected||(ce.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),ce.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){ce.propFix[this.toLowerCase()]=this}),ce.fn.extend({addClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).addClass(t.call(this,e,Ct(this)))}):(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++)i=e[o],n.indexOf(" "+i+" ")<0&&(n+=i+" ");a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this},removeClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).removeClass(t.call(this,e,Ct(this)))}):arguments.length?(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++){i=e[o];while(-1<n.indexOf(" "+i+" "))n=n.replace(" "+i+" "," ")}a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this:this.attr("class","")},toggleClass:function(t,n){var e,r,i,o,a=typeof t,s="string"===a||Array.isArray(t);return v(t)?this.each(function(e){ce(this).toggleClass(t.call(this,e,Ct(this),n),n)}):"boolean"==typeof n&&s?n?this.addClass(t):this.removeClass(t):(e=kt(t),this.each(function(){if(s)for(o=ce(this),i=0;i<e.length;i++)r=e[i],o.hasClass(r)?o.removeClass(r):o.addClass(r);else void 0!==t&&"boolean"!==a||((r=Ct(this))&&_.set(this,"__className__",r),this.setAttribute&&this.setAttribute("class",r||!1===t?"":_.get(this,"__className__")||""))}))},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+Tt(Ct(n))+" ").indexOf(t))return!0;return!1}});var St=/\r/g;ce.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=v(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,ce(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=ce.map(t,function(e){return null==e?"":e+""})),(r=ce.valHooks[this.type]||ce.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=ce.valHooks[t.type]||ce.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(St,""):null==e?"":e:void 0}}),ce.extend({valHooks:{option:{get:function(e){var t=ce.find.attr(e,"value");return null!=t?t:Tt(ce.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!fe(n.parentNode,"optgroup"))){if(t=ce(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=ce.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<ce.inArray(ce.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),ce.each(["radio","checkbox"],function(){ce.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<ce.inArray(ce(e).val(),t)}},le.checkOn||(ce.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var Et=ie.location,jt={guid:Date.now()},At=/\?/;ce.parseXML=function(e){var t,n;if(!e||"string"!=typeof e)return null;try{t=(new ie.DOMParser).parseFromString(e,"text/xml")}catch(e){}return n=t&&t.getElementsByTagName("parsererror")[0],t&&!n||ce.error("Invalid XML: "+(n?ce.map(n.childNodes,function(e){return e.textContent}).join("\n"):e)),t};var Dt=/^(?:focusinfocus|focusoutblur)$/,Nt=function(e){e.stopPropagation()};ce.extend(ce.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||C],d=ue.call(e,"type")?e.type:e,h=ue.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||C,3!==n.nodeType&&8!==n.nodeType&&!Dt.test(d+ce.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[ce.expando]?e:new ce.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:ce.makeArray(t,[e]),c=ce.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!y(n)){for(s=c.delegateType||d,Dt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||C)&&p.push(a.defaultView||a.parentWindow||ie)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(_.get(o,"events")||Object.create(null))[e.type]&&_.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&$(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!$(n)||u&&v(n[d])&&!y(n)&&((a=n[u])&&(n[u]=null),ce.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Nt),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Nt),ce.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=ce.extend(new ce.Event,n,{type:e,isSimulated:!0});ce.event.trigger(r,null,t)}}),ce.fn.extend({trigger:function(e,t){return this.each(function(){ce.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return ce.event.trigger(e,t,n,!0)}});var qt=/\[\]$/,Lt=/\r?\n/g,Ht=/^(?:submit|button|image|reset|file)$/i,Ot=/^(?:input|select|textarea|keygen)/i;function Pt(n,e,r,i){var t;if(Array.isArray(e))ce.each(e,function(e,t){r||qt.test(n)?i(n,t):Pt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==x(e))i(n,e);else for(t in e)Pt(n+"["+t+"]",e[t],r,i)}ce.param=function(e,t){var n,r=[],i=function(e,t){var n=v(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!ce.isPlainObject(e))ce.each(e,function(){i(this.name,this.value)});else for(n in e)Pt(n,e[n],t,i);return r.join("&")},ce.fn.extend({serialize:function(){return ce.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=ce.prop(this,"elements");return e?ce.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!ce(this).is(":disabled")&&Ot.test(this.nodeName)&&!Ht.test(e)&&(this.checked||!we.test(e))}).map(function(e,t){var n=ce(this).val();return null==n?null:Array.isArray(n)?ce.map(n,function(e){return{name:t.name,value:e.replace(Lt,"\r\n")}}):{name:t.name,value:n.replace(Lt,"\r\n")}}).get()}});var Mt=/%20/g,Rt=/#.*$/,It=/([?&])_=[^&]*/,Wt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Ft=/^(?:GET|HEAD)$/,$t=/^\/\//,Bt={},_t={},zt="*/".concat("*"),Xt=C.createElement("a");function Ut(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(D)||[];if(v(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function Vt(t,i,o,a){var s={},u=t===_t;function l(e){var r;return s[e]=!0,ce.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function Gt(e,t){var n,r,i=ce.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&ce.extend(!0,e,r),e}Xt.href=Et.href,ce.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":zt,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":ce.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Gt(Gt(e,ce.ajaxSettings),t):Gt(ce.ajaxSettings,e)},ajaxPrefilter:Ut(Bt),ajaxTransport:Ut(_t),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=ce.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?ce(y):ce.event,x=ce.Deferred(),b=ce.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Wt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace($t,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(D)||[""],null==v.crossDomain){r=C.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Xt.protocol+"//"+Xt.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=ce.param(v.data,v.traditional)),Vt(Bt,v,t,T),h)return T;for(i in(g=ce.event&&v.global)&&0==ce.active++&&ce.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Ft.test(v.type),f=v.url.replace(Rt,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Mt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(At.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(It,"$1"),o=(At.test(f)?"&":"?")+"_="+jt.guid+++o),v.url=f+o),v.ifModified&&(ce.lastModified[f]&&T.setRequestHeader("If-Modified-Since",ce.lastModified[f]),ce.etag[f]&&T.setRequestHeader("If-None-Match",ce.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+zt+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=Vt(_t,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=ie.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&ie.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),!i&&-1<ce.inArray("script",v.dataTypes)&&ce.inArray("json",v.dataTypes)<0&&(v.converters["text script"]=function(){}),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(ce.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(ce.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--ce.active||ce.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return ce.get(e,t,n,"json")},getScript:function(e,t){return ce.get(e,void 0,t,"script")}}),ce.each(["get","post"],function(e,i){ce[i]=function(e,t,n,r){return v(t)&&(r=r||n,n=t,t=void 0),ce.ajax(ce.extend({url:e,type:i,dataType:r,data:t,success:n},ce.isPlainObject(e)&&e))}}),ce.ajaxPrefilter(function(e){var t;for(t in e.headers)"content-type"===t.toLowerCase()&&(e.contentType=e.headers[t]||"")}),ce._evalUrl=function(e,t,n){return ce.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){ce.globalEval(e,t,n)}})},ce.fn.extend({wrapAll:function(e){var t;return this[0]&&(v(e)&&(e=e.call(this[0])),t=ce(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return v(n)?this.each(function(e){ce(this).wrapInner(n.call(this,e))}):this.each(function(){var e=ce(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=v(t);return this.each(function(e){ce(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){ce(this).replaceWith(this.childNodes)}),this}}),ce.expr.pseudos.hidden=function(e){return!ce.expr.pseudos.visible(e)},ce.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},ce.ajaxSettings.xhr=function(){try{return new ie.XMLHttpRequest}catch(e){}};var Yt={0:200,1223:204},Qt=ce.ajaxSettings.xhr();le.cors=!!Qt&&"withCredentials"in Qt,le.ajax=Qt=!!Qt,ce.ajaxTransport(function(i){var o,a;if(le.cors||Qt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Yt[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&ie.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),ce.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),ce.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return ce.globalEval(e),e}}}),ce.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),ce.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=ce("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=Tt(e.slice(s)),e=e.slice(0,s)),v(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&ce.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?ce("<div>").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}});var en=/^[\s\uFEFF\xA0]+|([^\s\uFEFF\xA0])[\s\uFEFF\xA0]+$/g;ce.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),v(e))return r=ae.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(ae.call(arguments)))}).guid=e.guid=e.guid||ce.guid++,i},ce.holdReady=function(e){e?ce.readyWait++:ce.ready(!0)},ce.isArray=Array.isArray,ce.parseJSON=JSON.parse,ce.nodeName=fe,ce.isFunction=v,ce.isWindow=y,ce.camelCase=F,ce.type=x,ce.now=Date.now,ce.isNumeric=function(e){var t=ce.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},ce.trim=function(e){return null==e?"":(e+"").replace(en,"$1")},"function"==typeof define&&define.amd&&define("jquery",[],function(){return ce});var tn=ie.jQuery,nn=ie.$;return ce.noConflict=function(e){return ie.$===ce&&(ie.$=nn),e&&ie.jQuery===ce&&(ie.jQuery=tn),ce},"undefined"==typeof e&&(ie.jQuery=ie.$=ce),ce}); | ||
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map new file mode 100644 index 0000000000..db38af5893 --- /dev/null +++ b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map | |||
@@ -0,0 +1 @@ | |||
{"version":3,"sources":["jquery-3.7.1.js"],"names":["global","factory","module","exports","document","w","Error","window","this","noGlobal","arr","getProto","Object","getPrototypeOf","slice","flat","array","call","concat","apply","push","indexOf","class2type","toString","hasOwn","hasOwnProperty","fnToString","ObjectFunctionString","support","isFunction","obj","nodeType","item","isWindow","preservedScriptAttributes","type","src","nonce","noModule","DOMEval","code","node","doc","i","val","script","createElement","text","getAttribute","setAttribute","head","appendChild","parentNode","removeChild","toType","version","rhtmlSuffix","jQuery","selector","context","fn","init","isArrayLike","length","nodeName","elem","name","toLowerCase","prototype","jquery","constructor","toArray","get","num","pushStack","elems","ret","merge","prevObject","each","callback","map","arguments","first","eq","last","even","grep","_elem","odd","len","j","end","sort","splice","extend","options","copy","copyIsArray","clone","target","deep","isPlainObject","Array","isArray","undefined","expando","Math","random","replace","isReady","error","msg","noop","proto","Ctor","isEmptyObject","globalEval","textContent","documentElement","nodeValue","makeArray","results","inArray","isXMLDoc","namespace","namespaceURI","docElem","ownerDocument","test","second","invert","matches","callbackExpect","arg","value","guid","Symbol","iterator","split","_i","pop","whitespace","rtrimCSS","RegExp","contains","a","b","bup","compareDocumentPosition","rcssescape","fcssescape","ch","asCodePoint","charCodeAt","escapeSelector","sel","preferredDoc","pushNative","Expr","outermostContext","sortInput","hasDuplicate","documentIsHTML","rbuggyQSA","dirruns","done","classCache","createCache","tokenCache","compilerCache","nonnativeSelectorCache","sortOrder","booleans","identifier","attributes","pseudos","rwhitespace","rcomma","rleadingCombinator","rdescend","rpseudo","ridentifier","matchExpr","ID","CLASS","TAG","ATTR","PSEUDO","CHILD","bool","needsContext","rinputs","rheader","rquickExpr","rsibling","runescape","funescape","escape","nonHex","high","String","fromCharCode","unloadHandler","setDocument","inDisabledFieldset","addCombinator","disabled","dir","next","childNodes","e","els","find","seed","m","nid","match","groups","newSelector","newContext","exec","getElementById","id","getElementsByTagName","getElementsByClassName","testContext","scope","tokenize","toSelector","join","querySelectorAll","qsaError","removeAttribute","select","keys","cache","key","cacheLength","shift","markFunction","assert","el","createInputPseudo","createButtonPseudo","createDisabledPseudo","isDisabled","createPositionalPseudo","argument","matchIndexes","subWindow","webkitMatchesSelector","msMatchesSelector","defaultView","top","addEventListener","getById","getElementsByName","disconnectedMatch","cssHas","querySelector","filter","attrId","getAttributeNode","tag","className","input","innerHTML","compare","sortDetached","expr","elements","matchesSelector","attr","attrHandle","uniqueSort","duplicates","sortStable","createPseudo","relative",">"," ","+","~","preFilter","excess","unquoted","nodeNameSelector","expectedNodeName","pattern","operator","check","result","what","_argument","simple","forward","ofType","_context","xml","outerCache","nodeIndex","start","parent","useCache","diff","firstChild","lastChild","pseudo","args","setFilters","idx","matched","not","matcher","compile","unmatched","has","lang","elemLang","hash","location","root","focus","activeElement","err","safeActiveElement","hasFocus","href","tabIndex","enabled","checked","selected","selectedIndex","empty","nextSibling","header","button","_matchIndexes","lt","gt","nth","radio","checkbox","file","password","image","submit","reset","parseOnly","tokens","soFar","preFilters","cached","combinator","base","skip","checkNonElements","doneName","oldCache","newCache","elementMatcher","matchers","condense","newUnmatched","mapped","setMatcher","postFilter","postFinder","postSelector","temp","matcherOut","preMap","postMap","preexisting","contexts","multipleContexts","matcherIn","matcherFromTokens","checkContext","leadingRelative","implicitRelative","matchContext","matchAnyContext","elementMatchers","setMatchers","bySet","byElement","superMatcher","outermost","matchedCount","setMatched","contextBackup","dirrunsUnique","token","compiled","filters","unique","getText","isXML","selectors","until","truncate","is","siblings","n","rneedsContext","rsingleTag","winnow","qualifier","self","rootjQuery","parseHTML","ready","rparentsprev","guaranteedUnique","children","contents","prev","sibling","cur","targets","l","closest","index","prevAll","add","addBack","parents","parentsUntil","nextAll","nextUntil","prevUntil","contentDocument","content","reverse","rnothtmlwhite","Identity","v","Thrower","ex","adoptValue","resolve","reject","noValue","method","promise","fail","then","Callbacks","object","_","flag","firing","memory","fired","locked","list","queue","firingIndex","fire","once","stopOnFalse","remove","disable","lock","fireWith","Deferred","func","tuples","state","always","deferred","catch","pipe","fns","newDefer","tuple","returned","progress","notify","onFulfilled","onRejected","onProgress","maxDepth","depth","handler","special","that","mightThrow","TypeError","notifyWith","resolveWith","process","exceptionHook","rejectWith","getErrorHook","getStackHook","setTimeout","stateString","when","singleValue","remaining","resolveContexts","resolveValues","primary","updateFunc","rerrorNames","asyncError","console","warn","message","stack","readyException","readyList","completed","removeEventListener","readyWait","wait","readyState","doScroll","access","chainable","emptyGet","raw","bulk","_key","rmsPrefix","rdashAlpha","fcamelCase","_all","letter","toUpperCase","camelCase","string","acceptData","owner","Data","uid","defineProperty","configurable","set","data","prop","hasData","dataPriv","dataUser","rbrace","rmultiDash","dataAttr","JSON","parse","removeData","_data","_removeData","attrs","dequeue","startLength","hooks","_queueHooks","unshift","stop","setter","clearQueue","tmp","count","defer","pnum","source","rcssNum","cssExpand","isAttached","composed","getRootNode","isHiddenWithinTree","style","display","css","adjustCSS","valueParts","tween","adjusted","scale","maxIterations","currentValue","initial","unit","cssNumber","initialInUnit","defaultDisplayMap","showHide","show","values","body","hide","toggle","div","rcheckableType","rtagName","rscriptType","createDocumentFragment","checkClone","cloneNode","noCloneChecked","defaultValue","option","wrapMap","thead","col","tr","td","_default","getAll","setGlobalEval","refElements","tbody","tfoot","colgroup","caption","th","optgroup","rhtml","buildFragment","scripts","selection","ignored","wrap","attached","fragment","nodes","htmlPrefilter","createTextNode","rtypenamespace","returnTrue","returnFalse","on","types","one","origFn","event","off","leverageNative","isSetup","saved","isTrigger","delegateType","stopPropagation","stopImmediatePropagation","preventDefault","trigger","isImmediatePropagationStopped","handleObjIn","eventHandle","events","t","handleObj","handlers","namespaces","origType","elemData","create","handle","triggered","dispatch","bindType","delegateCount","setup","mappedTypes","origCount","teardown","removeEvent","nativeEvent","handlerQueue","fix","delegateTarget","preDispatch","isPropagationStopped","currentTarget","rnamespace","postDispatch","matchedHandlers","matchedSelectors","addProp","hook","Event","enumerable","originalEvent","writable","load","noBubble","click","beforeunload","returnValue","props","isDefaultPrevented","defaultPrevented","relatedTarget","timeStamp","Date","now","isSimulated","altKey","bubbles","cancelable","changedTouches","ctrlKey","detail","eventPhase","metaKey","pageX","pageY","shiftKey","view","char","charCode","keyCode","buttons","clientX","clientY","offsetX","offsetY","pointerId","pointerType","screenX","screenY","targetTouches","toElement","touches","which","blur","focusMappedHandler","documentMode","simulate","attaches","dataHolder","mouseenter","mouseleave","pointerenter","pointerleave","orig","related","rnoInnerhtml","rchecked","rcleanScript","manipulationTarget","disableScript","restoreScript","cloneCopyEvent","dest","udataOld","udataCur","domManip","collection","hasScripts","iNoClone","valueIsFunction","html","_evalUrl","keepData","cleanData","dataAndEvents","deepDataAndEvents","srcElements","destElements","inPage","detach","append","prepend","insertBefore","before","after","replaceWith","replaceChild","appendTo","prependTo","insertAfter","replaceAll","original","insert","rnumnonpx","rcustomProp","getStyles","opener","getComputedStyle","swap","old","rboxStyle","curCSS","computed","width","minWidth","maxWidth","isCustomProp","getPropertyValue","pixelBoxStyles","addGetHookIf","conditionFn","hookFn","computeStyleTests","container","cssText","divStyle","pixelPositionVal","reliableMarginLeftVal","roundPixelMeasures","marginLeft","right","pixelBoxStylesVal","boxSizingReliableVal","position","scrollboxSizeVal","offsetWidth","measure","round","parseFloat","reliableTrDimensionsVal","backgroundClip","clearCloneStyle","boxSizingReliable","pixelPosition","reliableMarginLeft","scrollboxSize","reliableTrDimensions","table","trChild","trStyle","height","parseInt","borderTopWidth","borderBottomWidth","offsetHeight","cssPrefixes","emptyStyle","vendorProps","finalPropName","final","cssProps","capName","vendorPropName","rdisplayswap","cssShow","visibility","cssNormalTransform","letterSpacing","fontWeight","setPositiveNumber","subtract","max","boxModelAdjustment","dimension","box","isBorderBox","styles","computedVal","extra","delta","marginDelta","ceil","getWidthOrHeight","valueIsBorderBox","offsetProp","getClientRects","Tween","easing","cssHooks","opacity","animationIterationCount","aspectRatio","borderImageSlice","columnCount","flexGrow","flexShrink","gridArea","gridColumn","gridColumnEnd","gridColumnStart","gridRow","gridRowEnd","gridRowStart","lineHeight","order","orphans","widows","zIndex","zoom","fillOpacity","floodOpacity","stopOpacity","strokeMiterlimit","strokeOpacity","origName","setProperty","isFinite","getBoundingClientRect","scrollboxSizeBuggy","left","margin","padding","border","prefix","suffix","expand","expanded","parts","propHooks","run","percent","eased","duration","pos","step","fx","scrollTop","scrollLeft","linear","p","swing","cos","PI","fxNow","inProgress","opt","rfxtypes","rrun","schedule","hidden","requestAnimationFrame","interval","tick","createFxNow","genFx","includeWidth","createTween","animation","Animation","tweeners","properties","stopped","prefilters","currentTime","startTime","tweens","opts","specialEasing","originalProperties","originalOptions","gotoEnd","propFilter","bind","complete","timer","anim","*","tweener","oldfire","propTween","restoreDisplay","isBox","dataShow","unqueued","overflow","overflowX","overflowY","prefilter","speed","speeds","fadeTo","to","animate","optall","doAnimation","finish","stopQueue","timers","cssFn","slideDown","slideUp","slideToggle","fadeIn","fadeOut","fadeToggle","slow","fast","delay","time","timeout","clearTimeout","checkOn","optSelected","radioValue","boolHook","removeAttr","nType","attrHooks","attrNames","getter","lowercaseName","rfocusable","rclickable","stripAndCollapse","getClass","classesToArray","removeProp","propFix","tabindex","for","class","addClass","classNames","curValue","finalValue","removeClass","toggleClass","stateVal","isValidValue","hasClass","rreturn","valHooks","optionSet","rquery","parseXML","parserErrorElem","DOMParser","parseFromString","rfocusMorph","stopPropagationCallback","onlyHandlers","bubbleType","ontype","lastElement","eventPath","parentWindow","triggerHandler","rbracket","rCRLF","rsubmitterTypes","rsubmittable","buildParams","traditional","param","s","valueOrFunction","encodeURIComponent","serialize","serializeArray","r20","rhash","rantiCache","rheaders","rnoContent","rprotocol","transports","allTypes","originAnchor","addToPrefiltersOrTransports","structure","dataTypeExpression","dataType","dataTypes","inspectPrefiltersOrTransports","jqXHR","inspected","seekingTransport","inspect","prefilterOrFactory","dataTypeOrTransport","ajaxExtend","flatOptions","ajaxSettings","active","lastModified","etag","url","isLocal","protocol","processData","async","contentType","accepts","json","responseFields","converters","* text","text html","text json","text xml","ajaxSetup","settings","ajaxPrefilter","ajaxTransport","ajax","transport","cacheURL","responseHeadersString","responseHeaders","timeoutTimer","urlAnchor","fireGlobals","uncached","callbackContext","globalEventContext","completeDeferred","statusCode","requestHeaders","requestHeadersNames","strAbort","getResponseHeader","getAllResponseHeaders","setRequestHeader","overrideMimeType","mimeType","status","abort","statusText","finalText","crossDomain","host","hasContent","ifModified","headers","beforeSend","success","send","nativeStatusText","responses","isSuccess","response","modified","ct","finalDataType","firstDataType","ajaxHandleResponses","conv2","current","conv","dataFilter","throws","ajaxConvert","getJSON","getScript","text script","wrapAll","firstElementChild","wrapInner","htmlIsFunction","unwrap","visible","xhr","XMLHttpRequest","xhrSuccessStatus","0","1223","xhrSupported","cors","errorCallback","open","username","xhrFields","onload","onerror","onabort","ontimeout","onreadystatechange","responseType","responseText","binary","scriptAttrs","charset","scriptCharset","evt","oldCallbacks","rjsonp","jsonp","jsonpCallback","originalSettings","callbackName","overwritten","responseContainer","jsonProp","createHTMLDocument","implementation","keepScripts","parsed","params","animated","offset","setOffset","curPosition","curLeft","curCSSTop","curTop","curOffset","curCSSLeft","curElem","using","rect","win","pageYOffset","pageXOffset","offsetParent","parentOffset","scrollTo","Height","Width","","defaultExtra","funcName","unbind","delegate","undelegate","hover","fnOver","fnOut","rtrim","proxy","holdReady","hold","parseJSON","isNumeric","isNaN","trim","define","amd","_jQuery","_$","$","noConflict"],"mappings":";CAUA,SAAYA,EAAQC,GAEnB,aAEuB,iBAAXC,QAAiD,iBAAnBA,OAAOC,QAShDD,OAAOC,QAAUH,EAAOI,SACvBH,EAASD,GAAQ,GACjB,SAAUK,GACT,IAAMA,EAAED,SACP,MAAM,IAAIE,MAAO,4CAElB,OAAOL,EAASI,IAGlBJ,EAASD,GAtBX,CA0BuB,oBAAXO,OAAyBA,OAASC,KAAM,SAAUD,GAAQE,GAMtE,aAEA,IAAIC,GAAM,GAENC,EAAWC,OAAOC,eAElBC,GAAQJ,GAAII,MAEZC,EAAOL,GAAIK,KAAO,SAAUC,GAC/B,OAAON,GAAIK,KAAKE,KAAMD,IACnB,SAAUA,GACb,OAAON,GAAIQ,OAAOC,MAAO,GAAIH,IAI1BI,EAAOV,GAAIU,KAEXC,GAAUX,GAAIW,QAEdC,EAAa,GAEbC,EAAWD,EAAWC,SAEtBC,GAASF,EAAWG,eAEpBC,EAAaF,GAAOD,SAEpBI,EAAuBD,EAAWT,KAAML,QAExCgB,GAAU,GAEVC,EAAa,SAAqBC,GASpC,MAAsB,mBAARA,GAA8C,iBAAjBA,EAAIC,UAC1B,mBAAbD,EAAIE,MAIVC,EAAW,SAAmBH,GAChC,OAAc,MAAPA,GAAeA,IAAQA,EAAIvB,QAIhCH,EAAWG,GAAOH,SAIjB8B,EAA4B,CAC/BC,MAAM,EACNC,KAAK,EACLC,OAAO,EACPC,UAAU,GAGX,SAASC,EAASC,EAAMC,EAAMC,GAG7B,IAAIC,EAAGC,EACNC,GAHDH,EAAMA,GAAOtC,GAGC0C,cAAe,UAG7B,GADAD,EAAOE,KAAOP,EACTC,EACJ,IAAME,KAAKT,GAYVU,EAAMH,EAAME,IAAOF,EAAKO,cAAgBP,EAAKO,aAAcL,KAE1DE,EAAOI,aAAcN,EAAGC,GAI3BF,EAAIQ,KAAKC,YAAaN,GAASO,WAAWC,YAAaR,GAIzD,SAASS,EAAQxB,GAChB,OAAY,MAAPA,EACGA,EAAM,GAIQ,iBAARA,GAAmC,mBAARA,EACxCR,EAAYC,EAASN,KAAMa,KAAW,gBAC/BA,EAQT,IAAIyB,EAAU,QAEbC,EAAc,SAGdC,GAAS,SAAUC,EAAUC,GAI5B,OAAO,IAAIF,GAAOG,GAAGC,KAAMH,EAAUC,IAmYvC,SAASG,EAAahC,GAMrB,IAAIiC,IAAWjC,GAAO,WAAYA,GAAOA,EAAIiC,OAC5C5B,EAAOmB,EAAQxB,GAEhB,OAAKD,EAAYC,KAASG,EAAUH,KAIpB,UAATK,GAA+B,IAAX4B,GACR,iBAAXA,GAAgC,EAATA,GAAgBA,EAAS,KAAOjC,GAIhE,SAASkC,GAAUC,EAAMC,GAExB,OAAOD,EAAKD,UAAYC,EAAKD,SAASG,gBAAkBD,EAAKC,cApZ9DV,GAAOG,GAAKH,GAAOW,UAAY,CAG9BC,OAAQd,EAERe,YAAab,GAGbM,OAAQ,EAERQ,QAAS,WACR,OAAOzD,GAAMG,KAAMT,OAKpBgE,IAAK,SAAUC,GAGd,OAAY,MAAPA,EACG3D,GAAMG,KAAMT,MAIbiE,EAAM,EAAIjE,KAAMiE,EAAMjE,KAAKuD,QAAWvD,KAAMiE,IAKpDC,UAAW,SAAUC,GAGpB,IAAIC,EAAMnB,GAAOoB,MAAOrE,KAAK8D,cAAeK,GAM5C,OAHAC,EAAIE,WAAatE,KAGVoE,GAIRG,KAAM,SAAUC,GACf,OAAOvB,GAAOsB,KAAMvE,KAAMwE,IAG3BC,IAAK,SAAUD,GACd,OAAOxE,KAAKkE,UAAWjB,GAAOwB,IAAKzE,KAAM,SAAUyD,EAAMtB,GACxD,OAAOqC,EAAS/D,KAAMgD,EAAMtB,EAAGsB,OAIjCnD,MAAO,WACN,OAAON,KAAKkE,UAAW5D,GAAMK,MAAOX,KAAM0E,aAG3CC,MAAO,WACN,OAAO3E,KAAK4E,GAAI,IAGjBC,KAAM,WACL,OAAO7E,KAAK4E,IAAK,IAGlBE,KAAM,WACL,OAAO9E,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAASA,EAAI,GAAM,MAIrB8C,IAAK,WACJ,OAAOjF,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAAOA,EAAI,MAIbyC,GAAI,SAAUzC,GACb,IAAI+C,EAAMlF,KAAKuD,OACd4B,GAAKhD,GAAMA,EAAI,EAAI+C,EAAM,GAC1B,OAAOlF,KAAKkE,UAAgB,GAALiB,GAAUA,EAAID,EAAM,CAAElF,KAAMmF,IAAQ,KAG5DC,IAAK,WACJ,OAAOpF,KAAKsE,YAActE,KAAK8D,eAKhClD,KAAMA,EACNyE,KAAMnF,GAAImF,KACVC,OAAQpF,GAAIoF,QAGbrC,GAAOsC,OAAStC,GAAOG,GAAGmC,OAAS,WAClC,IAAIC,EAAS9B,EAAM9B,EAAK6D,EAAMC,EAAaC,EAC1CC,EAASlB,UAAW,IAAO,GAC3BvC,EAAI,EACJoB,EAASmB,UAAUnB,OACnBsC,GAAO,EAsBR,IAnBuB,kBAAXD,IACXC,EAAOD,EAGPA,EAASlB,UAAWvC,IAAO,GAC3BA,KAIsB,iBAAXyD,GAAwBvE,EAAYuE,KAC/CA,EAAS,IAILzD,IAAMoB,IACVqC,EAAS5F,KACTmC,KAGOA,EAAIoB,EAAQpB,IAGnB,GAAqC,OAA9BqD,EAAUd,UAAWvC,IAG3B,IAAMuB,KAAQ8B,EACbC,EAAOD,EAAS9B,GAIF,cAATA,GAAwBkC,IAAWH,IAKnCI,GAAQJ,IAAUxC,GAAO6C,cAAeL,KAC1CC,EAAcK,MAAMC,QAASP,MAC/B7D,EAAMgE,EAAQlC,GAIbiC,EADID,IAAgBK,MAAMC,QAASpE,GAC3B,GACI8D,GAAgBzC,GAAO6C,cAAelE,GAG1CA,EAFA,GAIT8D,GAAc,EAGdE,EAAQlC,GAAST,GAAOsC,OAAQM,EAAMF,EAAOF,SAGzBQ,IAATR,IACXG,EAAQlC,GAAS+B,IAOrB,OAAOG,GAGR3C,GAAOsC,OAAQ,CAGdW,QAAS,UAAanD,EAAUoD,KAAKC,UAAWC,QAAS,MAAO,IAGhEC,SAAS,EAETC,MAAO,SAAUC,GAChB,MAAM,IAAI1G,MAAO0G,IAGlBC,KAAM,aAENX,cAAe,SAAUxE,GACxB,IAAIoF,EAAOC,EAIX,SAAMrF,GAAgC,oBAAzBP,EAASN,KAAMa,QAI5BoF,EAAQvG,EAAUmB,KASK,mBADvBqF,EAAO3F,GAAOP,KAAMiG,EAAO,gBAAmBA,EAAM5C,cACf5C,EAAWT,KAAMkG,KAAWxF,IAGlEyF,cAAe,SAAUtF,GACxB,IAAIoC,EAEJ,IAAMA,KAAQpC,EACb,OAAO,EAER,OAAO,GAKRuF,WAAY,SAAU7E,EAAMwD,EAAStD,GACpCH,EAASC,EAAM,CAAEH,MAAO2D,GAAWA,EAAQ3D,OAASK,IAGrDqC,KAAM,SAAUjD,EAAKkD,GACpB,IAAIjB,EAAQpB,EAAI,EAEhB,GAAKmB,EAAahC,IAEjB,IADAiC,EAASjC,EAAIiC,OACLpB,EAAIoB,EAAQpB,IACnB,IAAgD,IAA3CqC,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,WAIF,IAAMA,KAAKb,EACV,IAAgD,IAA3CkD,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,MAKH,OAAOb,GAKRiB,KAAM,SAAUkB,GACf,IAAIxB,EACHmC,EAAM,GACNjC,EAAI,EACJZ,EAAWkC,EAAKlC,SAEjB,IAAMA,EAGL,MAAUU,EAAOwB,EAAMtB,KAGtBiC,GAAOnB,GAAOV,KAAMN,GAGtB,OAAkB,IAAbV,GAA+B,KAAbA,EACfkC,EAAKqD,YAEK,IAAbvF,EACGkC,EAAKsD,gBAAgBD,YAEX,IAAbvF,GAA+B,IAAbA,EACfkC,EAAKuD,UAKN5C,GAIR6C,UAAW,SAAU/G,EAAKgH,GACzB,IAAI9C,EAAM8C,GAAW,GAarB,OAXY,MAAPhH,IACCoD,EAAalD,OAAQF,IACzB+C,GAAOoB,MAAOD,EACE,iBAARlE,EACN,CAAEA,GAAQA,GAGZU,EAAKH,KAAM2D,EAAKlE,IAIXkE,GAGR+C,QAAS,SAAU1D,EAAMvD,EAAKiC,GAC7B,OAAc,MAAPjC,GAAe,EAAIW,GAAQJ,KAAMP,EAAKuD,EAAMtB,IAGpDiF,SAAU,SAAU3D,GACnB,IAAI4D,EAAY5D,GAAQA,EAAK6D,aAC5BC,EAAU9D,IAAUA,EAAK+D,eAAiB/D,GAAOsD,gBAIlD,OAAQ/D,EAAYyE,KAAMJ,GAAaE,GAAWA,EAAQ/D,UAAY,SAKvEa,MAAO,SAAUM,EAAO+C,GAKvB,IAJA,IAAIxC,GAAOwC,EAAOnE,OACjB4B,EAAI,EACJhD,EAAIwC,EAAMpB,OAEH4B,EAAID,EAAKC,IAChBR,EAAOxC,KAAQuF,EAAQvC,GAKxB,OAFAR,EAAMpB,OAASpB,EAERwC,GAGRI,KAAM,SAAUZ,EAAOK,EAAUmD,GAShC,IARA,IACCC,EAAU,GACVzF,EAAI,EACJoB,EAASY,EAAMZ,OACfsE,GAAkBF,EAIXxF,EAAIoB,EAAQpB,KACAqC,EAAUL,EAAOhC,GAAKA,KAChB0F,GACxBD,EAAQhH,KAAMuD,EAAOhC,IAIvB,OAAOyF,GAIRnD,IAAK,SAAUN,EAAOK,EAAUsD,GAC/B,IAAIvE,EAAQwE,EACX5F,EAAI,EACJiC,EAAM,GAGP,GAAKd,EAAaa,GAEjB,IADAZ,EAASY,EAAMZ,OACPpB,EAAIoB,EAAQpB,IAGL,OAFd4F,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,QAMZ,IAAM5F,KAAKgC,EAGI,OAFd4D,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,GAMb,OAAOxH,EAAM6D,IAId4D,KAAM,EAIN5G,QAASA,KAGa,mBAAX6G,SACXhF,GAAOG,GAAI6E,OAAOC,UAAahI,GAAK+H,OAAOC,WAI5CjF,GAAOsB,KAAM,uEAAuE4D,MAAO,KAC1F,SAAUC,EAAI1E,GACb5C,EAAY,WAAa4C,EAAO,KAAQA,EAAKC,gBA0B/C,IAAI0E,GAAMnI,GAAImI,IAGVhD,GAAOnF,GAAImF,KAGXC,GAASpF,GAAIoF,OAGbgD,GAAa,sBAGbC,GAAW,IAAIC,OAClB,IAAMF,GAAa,8BAAgCA,GAAa,KAChE,KAODrF,GAAOwF,SAAW,SAAUC,EAAGC,GAC9B,IAAIC,EAAMD,GAAKA,EAAE/F,WAEjB,OAAO8F,IAAME,MAAWA,GAAwB,IAAjBA,EAAIrH,YAIlCmH,EAAED,SACDC,EAAED,SAAUG,GACZF,EAAEG,yBAA8D,GAAnCH,EAAEG,wBAAyBD,MAS3D,IAAIE,EAAa,+CAEjB,SAASC,EAAYC,EAAIC,GACxB,OAAKA,EAGQ,OAAPD,EACG,SAIDA,EAAG1I,MAAO,GAAI,GAAM,KAAO0I,EAAGE,WAAYF,EAAGzF,OAAS,GAAIxC,SAAU,IAAO,IAI5E,KAAOiI,EAGf/F,GAAOkG,eAAiB,SAAUC,GACjC,OAASA,EAAM,IAAK/C,QAASyC,EAAYC,IAM1C,IAAIM,GAAezJ,EAClB0J,GAAa1I,GAEd,WAEA,IAAIuB,EACHoH,EACAC,EACAC,EACAC,EAIA9J,EACAmH,EACA4C,EACAC,EACAhC,EAPAhH,EAAO0I,GAUPpD,EAAUjD,GAAOiD,QACjB2D,EAAU,EACVC,EAAO,EACPC,EAAaC,IACbC,EAAaD,IACbE,EAAgBF,IAChBG,EAAyBH,IACzBI,EAAY,SAAU1B,EAAGC,GAIxB,OAHKD,IAAMC,IACVe,GAAe,GAET,GAGRW,EAAW,6HAMXC,EAAa,0BAA4BhC,GACxC,0CAGDiC,EAAa,MAAQjC,GAAa,KAAOgC,EAAa,OAAShC,GAG9D,gBAAkBA,GAGlB,2DAA6DgC,EAAa,OAC1EhC,GAAa,OAEdkC,EAAU,KAAOF,EAAa,wFAOAC,EAAa,eAO3CE,EAAc,IAAIjC,OAAQF,GAAa,IAAK,KAE5CoC,EAAS,IAAIlC,OAAQ,IAAMF,GAAa,KAAOA,GAAa,KAC5DqC,EAAqB,IAAInC,OAAQ,IAAMF,GAAa,WAAaA,GAAa,IAC7EA,GAAa,KACdsC,EAAW,IAAIpC,OAAQF,GAAa,MAEpCuC,EAAU,IAAIrC,OAAQgC,GACtBM,EAAc,IAAItC,OAAQ,IAAM8B,EAAa,KAE7CS,EAAY,CACXC,GAAI,IAAIxC,OAAQ,MAAQ8B,EAAa,KACrCW,MAAO,IAAIzC,OAAQ,QAAU8B,EAAa,KAC1CY,IAAK,IAAI1C,OAAQ,KAAO8B,EAAa,SACrCa,KAAM,IAAI3C,OAAQ,IAAM+B,GACxBa,OAAQ,IAAI5C,OAAQ,IAAMgC,GAC1Ba,MAAO,IAAI7C,OACV,yDACCF,GAAa,+BAAiCA,GAAa,cAC3DA,GAAa,aAAeA,GAAa,SAAU,KACrDgD,KAAM,IAAI9C,OAAQ,OAAS6B,EAAW,KAAM,KAI5CkB,aAAc,IAAI/C,OAAQ,IAAMF,GAC/B,mDAAqDA,GACrD,mBAAqBA,GAAa,mBAAoB,MAGxDkD,EAAU,sCACVC,EAAU,SAGVC,EAAa,mCAEbC,EAAW,OAIXC,EAAY,IAAIpD,OAAQ,uBAAyBF,GAChD,uBAAwB,KACzBuD,EAAY,SAAUC,EAAQC,GAC7B,IAAIC,EAAO,KAAOF,EAAOxL,MAAO,GAAM,MAEtC,OAAKyL,IAUEC,EAAO,EACbC,OAAOC,aAAcF,EAAO,OAC5BC,OAAOC,aAAcF,GAAQ,GAAK,MAAe,KAAPA,EAAe,SAO3DG,EAAgB,WACfC,KAGDC,EAAqBC,EACpB,SAAU7I,GACT,OAAyB,IAAlBA,EAAK8I,UAAqB/I,GAAUC,EAAM,aAElD,CAAE+I,IAAK,aAAcC,KAAM,WAa7B,IACC7L,EAAKD,MACFT,GAAMI,GAAMG,KAAM4I,GAAaqD,YACjCrD,GAAaqD,YAMdxM,GAAKmJ,GAAaqD,WAAWnJ,QAAShC,SACrC,MAAQoL,GACT/L,EAAO,CACND,MAAO,SAAUiF,EAAQgH,GACxBtD,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMmM,KAEvCnM,KAAM,SAAUmF,GACf0D,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMiE,UAAW,MAKpD,SAASmI,EAAM3J,EAAUC,EAAS+D,EAAS4F,GAC1C,IAAIC,EAAG5K,EAAGsB,EAAMuJ,EAAKC,EAAOC,EAAQC,EACnCC,EAAajK,GAAWA,EAAQqE,cAGhCjG,EAAW4B,EAAUA,EAAQ5B,SAAW,EAKzC,GAHA2F,EAAUA,GAAW,GAGI,iBAAbhE,IAA0BA,GACxB,IAAb3B,GAA+B,IAAbA,GAA+B,KAAbA,EAEpC,OAAO2F,EAIR,IAAM4F,IACLV,EAAajJ,GACbA,EAAUA,GAAWvD,EAEhB+J,GAAiB,CAIrB,GAAkB,KAAbpI,IAAqB0L,EAAQvB,EAAW2B,KAAMnK,IAGlD,GAAO6J,EAAIE,EAAO,IAGjB,GAAkB,IAAb1L,EAAiB,CACrB,KAAOkC,EAAON,EAAQmK,eAAgBP,IASrC,OAAO7F,EALP,GAAKzD,EAAK8J,KAAOR,EAEhB,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,OAWT,GAAKkG,IAAgB3J,EAAO2J,EAAWE,eAAgBP,KACtDF,EAAKpE,SAAUtF,EAASM,IACxBA,EAAK8J,KAAOR,EAGZ,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,MAKH,CAAA,GAAK+F,EAAO,GAElB,OADArM,EAAKD,MAAOuG,EAAS/D,EAAQqK,qBAAsBtK,IAC5CgE,EAGD,IAAO6F,EAAIE,EAAO,KAAS9J,EAAQsK,uBAEzC,OADA7M,EAAKD,MAAOuG,EAAS/D,EAAQsK,uBAAwBV,IAC9C7F,EAKT,KAAMiD,EAAwBjH,EAAW,MACrC0G,GAAcA,EAAUnC,KAAMvE,IAAe,CAYhD,GAVAiK,EAAcjK,EACdkK,EAAajK,EASK,IAAb5B,IACFqJ,EAASnD,KAAMvE,IAAcyH,EAAmBlD,KAAMvE,IAAe,EAGvEkK,EAAazB,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAC9DO,IAQkBA,GAAY/B,GAAQuM,SAG/BX,EAAM7J,EAAQX,aAAc,OAClCwK,EAAM/J,GAAOkG,eAAgB6D,GAE7B7J,EAAQV,aAAc,KAAQuK,EAAM9G,IAMtC/D,GADA+K,EAASU,EAAU1K,IACRK,OACX,MAAQpB,IACP+K,EAAQ/K,IAAQ6K,EAAM,IAAMA,EAAM,UAAa,IAC9Ca,EAAYX,EAAQ/K,IAEtBgL,EAAcD,EAAOY,KAAM,KAG5B,IAIC,OAHAlN,EAAKD,MAAOuG,EACXkG,EAAWW,iBAAkBZ,IAEvBjG,EACN,MAAQ8G,GACT7D,EAAwBjH,GAAU,GACjC,QACI8J,IAAQ9G,GACZ/C,EAAQ8K,gBAAiB,QAQ9B,OAAOC,GAAQhL,EAASmD,QAASkC,GAAU,MAAQpF,EAAS+D,EAAS4F,GAStE,SAAS9C,IACR,IAAImE,EAAO,GAaX,OAXA,SAASC,EAAOC,EAAKtG,GASpB,OALKoG,EAAKvN,KAAMyN,EAAM,KAAQ9E,EAAK+E,oBAG3BF,EAAOD,EAAKI,SAEXH,EAAOC,EAAM,KAAQtG,GAShC,SAASyG,EAAcpL,GAEtB,OADAA,EAAI8C,IAAY,EACT9C,EAOR,SAASqL,EAAQrL,GAChB,IAAIsL,EAAK9O,EAAS0C,cAAe,YAEjC,IACC,QAASc,EAAIsL,GACZ,MAAQ/B,GACT,OAAO,EACN,QAGI+B,EAAG9L,YACP8L,EAAG9L,WAAWC,YAAa6L,GAI5BA,EAAK,MAQP,SAASC,EAAmBhN,GAC3B,OAAO,SAAU8B,GAChB,OAAOD,GAAUC,EAAM,UAAaA,EAAK9B,OAASA,GAQpD,SAASiN,EAAoBjN,GAC5B,OAAO,SAAU8B,GAChB,OAASD,GAAUC,EAAM,UAAaD,GAAUC,EAAM,YACrDA,EAAK9B,OAASA,GAQjB,SAASkN,EAAsBtC,GAG9B,OAAO,SAAU9I,GAKhB,MAAK,SAAUA,EASTA,EAAKb,aAAgC,IAAlBa,EAAK8I,SAGvB,UAAW9I,EACV,UAAWA,EAAKb,WACba,EAAKb,WAAW2J,WAAaA,EAE7B9I,EAAK8I,WAAaA,EAMpB9I,EAAKqL,aAAevC,GAG1B9I,EAAKqL,cAAgBvC,GACpBF,EAAoB5I,KAAW8I,EAG3B9I,EAAK8I,WAAaA,EAKd,UAAW9I,GACfA,EAAK8I,WAAaA,GAY5B,SAASwC,EAAwB3L,GAChC,OAAOoL,EAAc,SAAUQ,GAE9B,OADAA,GAAYA,EACLR,EAAc,SAAU1B,EAAMlF,GACpC,IAAIzC,EACH8J,EAAe7L,EAAI,GAAI0J,EAAKvJ,OAAQyL,GACpC7M,EAAI8M,EAAa1L,OAGlB,MAAQpB,IACF2K,EAAQ3H,EAAI8J,EAAc9M,MAC9B2K,EAAM3H,KAASyC,EAASzC,GAAM2H,EAAM3H,SAYzC,SAASuI,EAAavK,GACrB,OAAOA,GAAmD,oBAAjCA,EAAQqK,sBAAwCrK,EAQ1E,SAASiJ,EAAanK,GACrB,IAAIiN,EACHhN,EAAMD,EAAOA,EAAKuF,eAAiBvF,EAAOoH,GAO3C,OAAKnH,GAAOtC,GAA6B,IAAjBsC,EAAIX,UAAmBW,EAAI6E,kBAMnDA,GADAnH,EAAWsC,GACgB6E,gBAC3B4C,GAAkB1G,GAAOmE,SAAUxH,GAInCgI,EAAUb,EAAgBa,SACzBb,EAAgBoI,uBAChBpI,EAAgBqI,kBAOZrI,EAAgBqI,mBAMpB/F,IAAgBzJ,IACdsP,EAAYtP,EAASyP,cAAiBH,EAAUI,MAAQJ,GAG1DA,EAAUK,iBAAkB,SAAUpD,GAOvC/K,GAAQoO,QAAUf,EAAQ,SAAUC,GAEnC,OADA3H,EAAgBpE,YAAa+L,GAAKnB,GAAKtK,GAAOiD,SACtCtG,EAAS6P,oBACf7P,EAAS6P,kBAAmBxM,GAAOiD,SAAU3C,SAMhDnC,GAAQsO,kBAAoBjB,EAAQ,SAAUC,GAC7C,OAAO9G,EAAQnH,KAAMiO,EAAI,OAK1BtN,GAAQuM,MAAQc,EAAQ,WACvB,OAAO7O,EAASmO,iBAAkB,YAYnC3M,GAAQuO,OAASlB,EAAQ,WACxB,IAEC,OADA7O,EAASgQ,cAAe,oBACjB,EACN,MAAQjD,GACT,OAAO,KAKJvL,GAAQoO,SACZjG,EAAKsG,OAAO7E,GAAK,SAAUuC,GAC1B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,OAAOA,EAAKjB,aAAc,QAAWsN,IAGvCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAIlG,EAAON,EAAQmK,eAAgBC,GACnC,OAAO9J,EAAO,CAAEA,GAAS,OAI3B8F,EAAKsG,OAAO7E,GAAM,SAAUuC,GAC3B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,IAAIxB,EAAwC,oBAA1BwB,EAAKsM,kBACtBtM,EAAKsM,iBAAkB,MACxB,OAAO9N,GAAQA,EAAK8F,QAAU+H,IAMhCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAI1H,EAAME,EAAGgC,EACZV,EAAON,EAAQmK,eAAgBC,GAEhC,GAAK9J,EAAO,CAIX,IADAxB,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAIVU,EAAQhB,EAAQsM,kBAAmBlC,GACnCpL,EAAI,EACJ,MAAUsB,EAAOU,EAAOhC,KAEvB,IADAF,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAKZ,MAAO,MAMV8F,EAAKsD,KAAK3B,IAAM,SAAU8E,EAAK7M,GAC9B,MAA6C,oBAAjCA,EAAQqK,qBACZrK,EAAQqK,qBAAsBwC,GAI9B7M,EAAQ4K,iBAAkBiC,IAKnCzG,EAAKsD,KAAK5B,MAAQ,SAAUgF,EAAW9M,GACtC,GAA+C,oBAAnCA,EAAQsK,wBAA0C9D,EAC7D,OAAOxG,EAAQsK,uBAAwBwC,IASzCrG,EAAY,GAIZ6E,EAAQ,SAAUC,GAEjB,IAAIwB,EAEJnJ,EAAgBpE,YAAa+L,GAAKyB,UACjC,UAAYjK,EAAU,iDACLA,EAAU,oEAKtBwI,EAAGX,iBAAkB,cAAexK,QACzCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,aAAe+B,EAAW,KAI1DqE,EAAGX,iBAAkB,QAAU7H,EAAU,MAAO3C,QACrDqG,EAAUhJ,KAAM,MAMX8N,EAAGX,iBAAkB,KAAO7H,EAAU,MAAO3C,QAClDqG,EAAUhJ,KAAM,YAOX8N,EAAGX,iBAAkB,YAAaxK,QACvCqG,EAAUhJ,KAAM,aAKjBsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,UAC5BiM,EAAG/L,YAAauN,GAAQzN,aAAc,OAAQ,KAQ9CsE,EAAgBpE,YAAa+L,GAAKnC,UAAW,EACM,IAA9CmC,EAAGX,iBAAkB,aAAcxK,QACvCqG,EAAUhJ,KAAM,WAAY,cAQ7BsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,IAC5BiM,EAAG/L,YAAauN,GACVxB,EAAGX,iBAAkB,aAAcxK,QACxCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,QAAUA,GAAa,KAC3DA,GAAa,kBAIVlH,GAAQuO,QAQb/F,EAAUhJ,KAAM,QAGjBgJ,EAAYA,EAAUrG,QAAU,IAAIiF,OAAQoB,EAAUkE,KAAM,MAM5D1D,EAAY,SAAU1B,EAAGC,GAGxB,GAAKD,IAAMC,EAEV,OADAe,GAAe,EACR,EAIR,IAAI0G,GAAW1H,EAAEG,yBAA2BF,EAAEE,wBAC9C,OAAKuH,IAgBU,GAPfA,GAAY1H,EAAElB,eAAiBkB,KAASC,EAAEnB,eAAiBmB,GAC1DD,EAAEG,wBAAyBF,GAG3B,KAIGvH,GAAQiP,cAAgB1H,EAAEE,wBAAyBH,KAAQ0H,EAOzD1H,IAAM9I,GAAY8I,EAAElB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcX,IACrB,EAOJC,IAAM/I,GAAY+I,EAAEnB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcV,GACtB,EAIDc,EACJ5I,GAAQJ,KAAMgJ,EAAWf,GAAM7H,GAAQJ,KAAMgJ,EAAWd,GAC1D,EAGe,EAAVyH,GAAe,EAAI,KAGpBxQ,EAqpBR,IAAMuC,KAlpBN0K,EAAKjF,QAAU,SAAU0I,EAAMC,GAC9B,OAAO1D,EAAMyD,EAAM,KAAM,KAAMC,IAGhC1D,EAAK2D,gBAAkB,SAAU/M,EAAM6M,GAGtC,GAFAlE,EAAa3I,GAERkG,IACHQ,EAAwBmG,EAAO,QAC7B1G,IAAcA,EAAUnC,KAAM6I,IAEjC,IACC,IAAIlM,EAAMwD,EAAQnH,KAAMgD,EAAM6M,GAG9B,GAAKlM,GAAOhD,GAAQsO,mBAIlBjM,EAAK7D,UAAuC,KAA3B6D,EAAK7D,SAAS2B,SAChC,OAAO6C,EAEP,MAAQuI,GACTxC,EAAwBmG,GAAM,GAIhC,OAAuD,EAAhDzD,EAAMyD,EAAM1Q,EAAU,KAAM,CAAE6D,IAASF,QAG/CsJ,EAAKpE,SAAW,SAAUtF,EAASM,GAUlC,OAHON,EAAQqE,eAAiBrE,IAAavD,GAC5CwM,EAAajJ,GAEPF,GAAOwF,SAAUtF,EAASM,IAIlCoJ,EAAK4D,KAAO,SAAUhN,EAAMC,IAOpBD,EAAK+D,eAAiB/D,IAAU7D,GACtCwM,EAAa3I,GAGd,IAAIL,EAAKmG,EAAKmH,WAAYhN,EAAKC,eAG9BvB,EAAMgB,GAAMpC,GAAOP,KAAM8I,EAAKmH,WAAYhN,EAAKC,eAC9CP,EAAIK,EAAMC,GAAOiG,QACjB1D,EAEF,YAAaA,IAAR7D,EACGA,EAGDqB,EAAKjB,aAAckB,IAG3BmJ,EAAKtG,MAAQ,SAAUC,GACtB,MAAM,IAAI1G,MAAO,0CAA4C0G,IAO9DvD,GAAO0N,WAAa,SAAUzJ,GAC7B,IAAIzD,EACHmN,EAAa,GACbzL,EAAI,EACJhD,EAAI,EAWL,GAJAuH,GAAgBtI,GAAQyP,WACxBpH,GAAarI,GAAQyP,YAAcvQ,GAAMG,KAAMyG,EAAS,GACxD7B,GAAK5E,KAAMyG,EAASkD,GAEfV,EAAe,CACnB,MAAUjG,EAAOyD,EAAS/E,KACpBsB,IAASyD,EAAS/E,KACtBgD,EAAIyL,EAAWhQ,KAAMuB,IAGvB,MAAQgD,IACPG,GAAO7E,KAAMyG,EAAS0J,EAAYzL,GAAK,GAQzC,OAFAsE,EAAY,KAELvC,GAGRjE,GAAOG,GAAGuN,WAAa,WACtB,OAAO3Q,KAAKkE,UAAWjB,GAAO0N,WAAYrQ,GAAMK,MAAOX,UAGxDuJ,EAAOtG,GAAOqN,KAAO,CAGpBhC,YAAa,GAEbwC,aAActC,EAEdvB,MAAOlC,EAEP2F,WAAY,GAEZ7D,KAAM,GAENkE,SAAU,CACTC,IAAK,CAAExE,IAAK,aAAc7H,OAAO,GACjCsM,IAAK,CAAEzE,IAAK,cACZ0E,IAAK,CAAE1E,IAAK,kBAAmB7H,OAAO,GACtCwM,IAAK,CAAE3E,IAAK,oBAGb4E,UAAW,CACVjG,KAAM,SAAU8B,GAWf,OAVAA,EAAO,GAAMA,EAAO,GAAI5G,QAASuF,EAAWC,GAG5CoB,EAAO,IAAQA,EAAO,IAAOA,EAAO,IAAOA,EAAO,IAAO,IACvD5G,QAASuF,EAAWC,GAEF,OAAfoB,EAAO,KACXA,EAAO,GAAM,IAAMA,EAAO,GAAM,KAG1BA,EAAM3M,MAAO,EAAG,IAGxB+K,MAAO,SAAU4B,GAkChB,OAtBAA,EAAO,GAAMA,EAAO,GAAItJ,cAEU,QAA7BsJ,EAAO,GAAI3M,MAAO,EAAG,IAGnB2M,EAAO,IACZJ,EAAKtG,MAAO0G,EAAO,IAKpBA,EAAO,KAASA,EAAO,GACtBA,EAAO,IAAQA,EAAO,IAAO,GAC7B,GAAqB,SAAfA,EAAO,IAAiC,QAAfA,EAAO,KAEvCA,EAAO,KAAWA,EAAO,GAAMA,EAAO,IAAwB,QAAfA,EAAO,KAG3CA,EAAO,IAClBJ,EAAKtG,MAAO0G,EAAO,IAGbA,GAGR7B,OAAQ,SAAU6B,GACjB,IAAIoE,EACHC,GAAYrE,EAAO,IAAOA,EAAO,GAElC,OAAKlC,EAAUM,MAAM5D,KAAMwF,EAAO,IAC1B,MAIHA,EAAO,GACXA,EAAO,GAAMA,EAAO,IAAOA,EAAO,IAAO,GAG9BqE,GAAYzG,EAAQpD,KAAM6J,KAGnCD,EAASzD,EAAU0D,GAAU,MAG7BD,EAASC,EAASzQ,QAAS,IAAKyQ,EAAS/N,OAAS8N,GAAWC,EAAS/N,UAGxE0J,EAAO,GAAMA,EAAO,GAAI3M,MAAO,EAAG+Q,GAClCpE,EAAO,GAAMqE,EAAShR,MAAO,EAAG+Q,IAI1BpE,EAAM3M,MAAO,EAAG,MAIzBuP,OAAQ,CAEP3E,IAAK,SAAUqG,GACd,IAAIC,EAAmBD,EAAiBlL,QAASuF,EAAWC,GAAYlI,cACxE,MAA4B,MAArB4N,EACN,WACC,OAAO,GAER,SAAU9N,GACT,OAAOD,GAAUC,EAAM+N,KAI1BvG,MAAO,SAAUgF,GAChB,IAAIwB,EAAU1H,EAAYkG,EAAY,KAEtC,OAAOwB,IACJA,EAAU,IAAIjJ,OAAQ,MAAQF,GAAa,IAAM2H,EAClD,IAAM3H,GAAa,SACpByB,EAAYkG,EAAW,SAAUxM,GAChC,OAAOgO,EAAQhK,KACY,iBAAnBhE,EAAKwM,WAA0BxM,EAAKwM,WACb,oBAAtBxM,EAAKjB,cACXiB,EAAKjB,aAAc,UACpB,OAKL2I,KAAM,SAAUzH,EAAMgO,EAAUC,GAC/B,OAAO,SAAUlO,GAChB,IAAImO,EAAS/E,EAAK4D,KAAMhN,EAAMC,GAE9B,OAAe,MAAVkO,EACgB,OAAbF,GAEFA,IAINE,GAAU,GAEQ,MAAbF,EACGE,IAAWD,EAED,OAAbD,EACGE,IAAWD,EAED,OAAbD,EACGC,GAAqC,IAA5BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,IAAoC,EAA3BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,GAASC,EAAOtR,OAAQqR,EAAMpO,UAAaoO,EAEjC,OAAbD,GAEkB,GADb,IAAME,EAAOvL,QAASoE,EAAa,KAAQ,KAClD5J,QAAS8Q,GAEM,OAAbD,IACGE,IAAWD,GAASC,EAAOtR,MAAO,EAAGqR,EAAMpO,OAAS,KAAQoO,EAAQ,QAO9EtG,MAAO,SAAU1J,EAAMkQ,EAAMC,EAAWnN,EAAOE,GAC9C,IAAIkN,EAAgC,QAAvBpQ,EAAKrB,MAAO,EAAG,GAC3B0R,EAA+B,SAArBrQ,EAAKrB,OAAQ,GACvB2R,EAAkB,YAATJ,EAEV,OAAiB,IAAVlN,GAAwB,IAATE,EAGrB,SAAUpB,GACT,QAASA,EAAKb,YAGf,SAAUa,EAAMyO,EAAUC,GACzB,IAAI/D,EAAOgE,EAAYnQ,EAAMoQ,EAAWC,EACvC9F,EAAMuF,IAAWC,EAAU,cAAgB,kBAC3CO,EAAS9O,EAAKb,WACdc,EAAOuO,GAAUxO,EAAKD,SAASG,cAC/B6O,GAAYL,IAAQF,EACpBQ,GAAO,EAER,GAAKF,EAAS,CAGb,GAAKR,EAAS,CACb,MAAQvF,EAAM,CACbvK,EAAOwB,EACP,MAAUxB,EAAOA,EAAMuK,GACtB,GAAKyF,EACJzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,SAEL,OAAO,EAKT+Q,EAAQ9F,EAAe,SAAT7K,IAAoB2Q,GAAS,cAE5C,OAAO,EAMR,GAHAA,EAAQ,CAAEN,EAAUO,EAAOG,WAAaH,EAAOI,WAG1CX,GAAWQ,EAAW,CAM1BC,GADAJ,GADAjE,GADAgE,EAAaG,EAAQrM,KAAeqM,EAAQrM,GAAY,KACpCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KACzBA,EAAO,GAC3BnM,EAAOoQ,GAAaE,EAAO7F,WAAY2F,GAEvC,MAAUpQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAG3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAGlC,GAAuB,IAAlBpG,EAAKV,YAAoBkR,GAAQxQ,IAASwB,EAAO,CACrD2O,EAAYzQ,GAAS,CAAEkI,EAASwI,EAAWI,GAC3C,YAgBF,GATKD,IAIJC,EADAJ,GADAjE,GADAgE,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,KAChCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KAMhC,IAATqE,EAGJ,MAAUxQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAC3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAElC,IAAO4J,EACNzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,aACHkR,IAGGD,KACJJ,EAAanQ,EAAMiE,KAChBjE,EAAMiE,GAAY,KACTvE,GAAS,CAAEkI,EAAS4I,IAG5BxQ,IAASwB,GACb,MASL,OADAgP,GAAQ5N,KACQF,GAAW8N,EAAO9N,GAAU,GAAqB,GAAhB8N,EAAO9N,KAK5DyG,OAAQ,SAAUwH,EAAQ5D,GAMzB,IAAI6D,EACHzP,EAAKmG,EAAKiB,QAASoI,IAAYrJ,EAAKuJ,WAAYF,EAAOjP,gBACtDkJ,EAAKtG,MAAO,uBAAyBqM,GAKvC,OAAKxP,EAAI8C,GACD9C,EAAI4L,GAIK,EAAZ5L,EAAGG,QACPsP,EAAO,CAAED,EAAQA,EAAQ,GAAI5D,GACtBzF,EAAKuJ,WAAW7R,eAAgB2R,EAAOjP,eAC7C6K,EAAc,SAAU1B,EAAMlF,GAC7B,IAAImL,EACHC,EAAU5P,EAAI0J,EAAMkC,GACpB7M,EAAI6Q,EAAQzP,OACb,MAAQpB,IAEP2K,EADAiG,EAAMlS,GAAQJ,KAAMqM,EAAMkG,EAAS7Q,OAClByF,EAASmL,GAAQC,EAAS7Q,MAG7C,SAAUsB,GACT,OAAOL,EAAIK,EAAM,EAAGoP,KAIhBzP,IAIToH,QAAS,CAGRyI,IAAKzE,EAAc,SAAUtL,GAK5B,IAAIgN,EAAQ,GACXhJ,EAAU,GACVgM,EAAUC,GAASjQ,EAASmD,QAASkC,GAAU,OAEhD,OAAO2K,EAAShN,GACfsI,EAAc,SAAU1B,EAAMlF,EAASsK,EAAUC,GAChD,IAAI1O,EACH2P,EAAYF,EAASpG,EAAM,KAAMqF,EAAK,IACtChQ,EAAI2K,EAAKvJ,OAGV,MAAQpB,KACAsB,EAAO2P,EAAWjR,MACxB2K,EAAM3K,KAASyF,EAASzF,GAAMsB,MAIjC,SAAUA,EAAMyO,EAAUC,GAOzB,OANAjC,EAAO,GAAMzM,EACbyP,EAAShD,EAAO,KAAMiC,EAAKjL,GAI3BgJ,EAAO,GAAM,MACLhJ,EAAQmB,SAInBgL,IAAK7E,EAAc,SAAUtL,GAC5B,OAAO,SAAUO,GAChB,OAAuC,EAAhCoJ,EAAM3J,EAAUO,GAAOF,UAIhCkF,SAAU+F,EAAc,SAAUjM,GAEjC,OADAA,EAAOA,EAAK8D,QAASuF,EAAWC,GACzB,SAAUpI,GAChB,OAAsE,GAA7DA,EAAKqD,aAAe7D,GAAOV,KAAMkB,IAAS5C,QAAS0B,MAW9D+Q,KAAM9E,EAAc,SAAU8E,GAO7B,OAJMxI,EAAYrD,KAAM6L,GAAQ,KAC/BzG,EAAKtG,MAAO,qBAAuB+M,GAEpCA,EAAOA,EAAKjN,QAASuF,EAAWC,GAAYlI,cACrC,SAAUF,GAChB,IAAI8P,EACJ,GACC,GAAOA,EAAW5J,EACjBlG,EAAK6P,KACL7P,EAAKjB,aAAc,aAAgBiB,EAAKjB,aAAc,QAGtD,OADA+Q,EAAWA,EAAS5P,iBACA2P,GAA2C,IAAnCC,EAAS1S,QAASyS,EAAO,YAE3C7P,EAAOA,EAAKb,aAAkC,IAAlBa,EAAKlC,UAC7C,OAAO,KAKTqE,OAAQ,SAAUnC,GACjB,IAAI+P,EAAOzT,GAAO0T,UAAY1T,GAAO0T,SAASD,KAC9C,OAAOA,GAAQA,EAAKlT,MAAO,KAAQmD,EAAK8J,IAGzCmG,KAAM,SAAUjQ,GACf,OAAOA,IAASsD,GAGjB4M,MAAO,SAAUlQ,GAChB,OAAOA,IA5oCV,WACC,IACC,OAAO7D,EAASgU,cACf,MAAQC,KAyoCQC,IACflU,EAASmU,eACLtQ,EAAK9B,MAAQ8B,EAAKuQ,OAASvQ,EAAKwQ,WAItCC,QAASrF,GAAsB,GAC/BtC,SAAUsC,GAAsB,GAEhCsF,QAAS,SAAU1Q,GAIlB,OAASD,GAAUC,EAAM,YAAeA,EAAK0Q,SAC1C3Q,GAAUC,EAAM,aAAgBA,EAAK2Q,UAGzCA,SAAU,SAAU3Q,GAWnB,OALKA,EAAKb,YAETa,EAAKb,WAAWyR,eAGQ,IAAlB5Q,EAAK2Q,UAIbE,MAAO,SAAU7Q,GAMhB,IAAMA,EAAOA,EAAKiP,WAAYjP,EAAMA,EAAOA,EAAK8Q,YAC/C,GAAK9Q,EAAKlC,SAAW,EACpB,OAAO,EAGT,OAAO,GAGRgR,OAAQ,SAAU9O,GACjB,OAAQ8F,EAAKiB,QAAQ8J,MAAO7Q,IAI7B+Q,OAAQ,SAAU/Q,GACjB,OAAOgI,EAAQhE,KAAMhE,EAAKD,WAG3B0M,MAAO,SAAUzM,GAChB,OAAO+H,EAAQ/D,KAAMhE,EAAKD,WAG3BiR,OAAQ,SAAUhR,GACjB,OAAOD,GAAUC,EAAM,UAA2B,WAAdA,EAAK9B,MACxC6B,GAAUC,EAAM,WAGlBlB,KAAM,SAAUkB,GACf,IAAIgN,EACJ,OAAOjN,GAAUC,EAAM,UAA2B,SAAdA,EAAK9B,OAKI,OAAxC8O,EAAOhN,EAAKjB,aAAc,UACN,SAAvBiO,EAAK9M,gBAIRgB,MAAOoK,EAAwB,WAC9B,MAAO,CAAE,KAGVlK,KAAMkK,EAAwB,SAAU2F,EAAenR,GACtD,MAAO,CAAEA,EAAS,KAGnBqB,GAAImK,EAAwB,SAAU2F,EAAenR,EAAQyL,GAC5D,MAAO,CAAEA,EAAW,EAAIA,EAAWzL,EAASyL,KAG7ClK,KAAMiK,EAAwB,SAAUE,EAAc1L,GAErD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGRhK,IAAK8J,EAAwB,SAAUE,EAAc1L,GAEpD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR0F,GAAI5F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAC3D,IAAI7M,EAUJ,IAPCA,EADI6M,EAAW,EACXA,EAAWzL,EACOA,EAAXyL,EACPzL,EAEAyL,EAGU,KAAL7M,GACT8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR2F,GAAI7F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAE3D,IADA,IAAI7M,EAAI6M,EAAW,EAAIA,EAAWzL,EAASyL,IACjC7M,EAAIoB,GACb0L,EAAarO,KAAMuB,GAEpB,OAAO8M,OAKLzE,QAAQqK,IAAMtL,EAAKiB,QAAQ5F,GAGrB,CAAEkQ,OAAO,EAAMC,UAAU,EAAMC,MAAM,EAAMC,UAAU,EAAMC,OAAO,GAC5E3L,EAAKiB,QAASrI,GAAMwM,EAAmBxM,GAExC,IAAMA,IAAK,CAAEgT,QAAQ,EAAMC,OAAO,GACjC7L,EAAKiB,QAASrI,GAAMyM,EAAoBzM,GAIzC,SAAS2Q,KAIT,SAASlF,EAAU1K,EAAUmS,GAC5B,IAAIrC,EAAS/F,EAAOqI,EAAQ3T,EAC3B4T,EAAOrI,EAAQsI,EACfC,EAASxL,EAAY/G,EAAW,KAEjC,GAAKuS,EACJ,OAAOJ,EAAY,EAAII,EAAOnV,MAAO,GAGtCiV,EAAQrS,EACRgK,EAAS,GACTsI,EAAajM,EAAK6H,UAElB,MAAQmE,EAAQ,CA2Bf,IAAM5T,KAxBAqR,KAAa/F,EAAQvC,EAAO2C,KAAMkI,MAClCtI,IAGJsI,EAAQA,EAAMjV,MAAO2M,EAAO,GAAI1J,SAAYgS,GAE7CrI,EAAOtM,KAAQ0U,EAAS,KAGzBtC,GAAU,GAGH/F,EAAQtC,EAAmB0C,KAAMkI,MACvCvC,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EAGPrR,KAAMsL,EAAO,GAAI5G,QAASkC,GAAU,OAErCgN,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAIhBgG,EAAKsG,SACX5C,EAAQlC,EAAWpJ,GAAO0L,KAAMkI,KAAgBC,EAAY7T,MAChEsL,EAAQuI,EAAY7T,GAAQsL,MAC9B+F,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EACPrR,KAAMA,EACNiG,QAASqF,IAEVsI,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAI/B,IAAMyP,EACL,MAOF,OAAKqC,EACGE,EAAMhS,OAGPgS,EACN1I,EAAKtG,MAAOrD,GAGZ+G,EAAY/G,EAAUgK,GAAS5M,MAAO,GAGxC,SAASuN,EAAYyH,GAIpB,IAHA,IAAInT,EAAI,EACP+C,EAAMoQ,EAAO/R,OACbL,EAAW,GACJf,EAAI+C,EAAK/C,IAChBe,GAAYoS,EAAQnT,GAAI4F,MAEzB,OAAO7E,EAGR,SAASoJ,EAAe4G,EAASwC,EAAYC,GAC5C,IAAInJ,EAAMkJ,EAAWlJ,IACpBoJ,EAAOF,EAAWjJ,KAClB4B,EAAMuH,GAAQpJ,EACdqJ,EAAmBF,GAAgB,eAARtH,EAC3ByH,EAAWhM,IAEZ,OAAO4L,EAAW/Q,MAGjB,SAAUlB,EAAMN,EAASgP,GACxB,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAC3B,OAAO3C,EAASzP,EAAMN,EAASgP,GAGjC,OAAO,GAIR,SAAU1O,EAAMN,EAASgP,GACxB,IAAI4D,EAAU3D,EACb4D,EAAW,CAAEnM,EAASiM,GAGvB,GAAK3D,GACJ,MAAU1O,EAAOA,EAAM+I,GACtB,IAAuB,IAAlB/I,EAAKlC,UAAkBsU,IACtB3C,EAASzP,EAAMN,EAASgP,GAC5B,OAAO,OAKV,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAG3B,GAFAzD,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,IAE/C0P,GAAQpS,GAAUC,EAAMmS,GAC5BnS,EAAOA,EAAM+I,IAAS/I,MAChB,CAAA,IAAOsS,EAAW3D,EAAY/D,KACpC0H,EAAU,KAAQlM,GAAWkM,EAAU,KAAQD,EAG/C,OAASE,EAAU,GAAMD,EAAU,GAOnC,IAHA3D,EAAY/D,GAAQ2H,GAGH,GAAM9C,EAASzP,EAAMN,EAASgP,GAC9C,OAAO,EAMZ,OAAO,GAIV,SAAS8D,EAAgBC,GACxB,OAAyB,EAAlBA,EAAS3S,OACf,SAAUE,EAAMN,EAASgP,GACxB,IAAIhQ,EAAI+T,EAAS3S,OACjB,MAAQpB,IACP,IAAM+T,EAAU/T,GAAKsB,EAAMN,EAASgP,GACnC,OAAO,EAGT,OAAO,GAER+D,EAAU,GAYZ,SAASC,EAAU/C,EAAW3O,EAAKoL,EAAQ1M,EAASgP,GAOnD,IANA,IAAI1O,EACH2S,EAAe,GACfjU,EAAI,EACJ+C,EAAMkO,EAAU7P,OAChB8S,EAAgB,MAAP5R,EAEFtC,EAAI+C,EAAK/C,KACTsB,EAAO2P,EAAWjR,MAClB0N,IAAUA,EAAQpM,EAAMN,EAASgP,KACtCiE,EAAaxV,KAAM6C,GACd4S,GACJ5R,EAAI7D,KAAMuB,KAMd,OAAOiU,EAGR,SAASE,GAAYlF,EAAWlO,EAAUgQ,EAASqD,EAAYC,EAAYC,GAO1E,OANKF,IAAeA,EAAYrQ,KAC/BqQ,EAAaD,GAAYC,IAErBC,IAAeA,EAAYtQ,KAC/BsQ,EAAaF,GAAYE,EAAYC,IAE/BjI,EAAc,SAAU1B,EAAM5F,EAAS/D,EAASgP,GACtD,IAAIuE,EAAMvU,EAAGsB,EAAMkT,EAClBC,EAAS,GACTC,EAAU,GACVC,EAAc5P,EAAQ3D,OAGtBY,EAAQ2I,GA5CX,SAA2B5J,EAAU6T,EAAU7P,GAG9C,IAFA,IAAI/E,EAAI,EACP+C,EAAM6R,EAASxT,OACRpB,EAAI+C,EAAK/C,IAChB0K,EAAM3J,EAAU6T,EAAU5U,GAAK+E,GAEhC,OAAOA,EAuCJ8P,CAAkB9T,GAAY,IAC7BC,EAAQ5B,SAAW,CAAE4B,GAAYA,EAAS,IAG5C8T,GAAY7F,IAAetE,GAAS5J,EAEnCiB,EADAgS,EAAUhS,EAAOyS,EAAQxF,EAAWjO,EAASgP,GAsB/C,GAnBKe,EAaJA,EAAS+D,EATTN,EAAaH,IAAgB1J,EAAOsE,EAAY0F,GAAeP,GAG9D,GAGArP,EAG+B/D,EAASgP,GAEzCwE,EAAaM,EAITV,EAAa,CACjBG,EAAOP,EAAUQ,EAAYE,GAC7BN,EAAYG,EAAM,GAAIvT,EAASgP,GAG/BhQ,EAAIuU,EAAKnT,OACT,MAAQpB,KACAsB,EAAOiT,EAAMvU,MACnBwU,EAAYE,EAAS1U,MAAW8U,EAAWJ,EAAS1U,IAAQsB,IAK/D,GAAKqJ,GACJ,GAAK0J,GAAcpF,EAAY,CAC9B,GAAKoF,EAAa,CAGjBE,EAAO,GACPvU,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,KAGzBuU,EAAK9V,KAAQqW,EAAW9U,GAAMsB,GAGhC+S,EAAY,KAAQG,EAAa,GAAMD,EAAMvE,GAI9ChQ,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,MAC2C,GAAlEuU,EAAOF,EAAa3V,GAAQJ,KAAMqM,EAAMrJ,GAASmT,EAAQzU,MAE3D2K,EAAM4J,KAAYxP,EAASwP,GAASjT,UAOvCkT,EAAaR,EACZQ,IAAezP,EACdyP,EAAWrR,OAAQwR,EAAaH,EAAWpT,QAC3CoT,GAEGH,EACJA,EAAY,KAAMtP,EAASyP,EAAYxE,GAEvCvR,EAAKD,MAAOuG,EAASyP,KAMzB,SAASO,GAAmB5B,GA+B3B,IA9BA,IAAI6B,EAAcjE,EAAS/N,EAC1BD,EAAMoQ,EAAO/R,OACb6T,EAAkB7N,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAC7C0V,EAAmBD,GAAmB7N,EAAKwH,SAAU,KACrD5O,EAAIiV,EAAkB,EAAI,EAG1BE,EAAehL,EAAe,SAAU7I,GACvC,OAAOA,IAAS0T,GACdE,GAAkB,GACrBE,EAAkBjL,EAAe,SAAU7I,GAC1C,OAA6C,EAAtC5C,GAAQJ,KAAM0W,EAAc1T,IACjC4T,GAAkB,GACrBnB,EAAW,CAAE,SAAUzS,EAAMN,EAASgP,GAMrC,IAAI/N,GAASgT,IAAqBjF,GAAOhP,GAAWqG,MACjD2N,EAAehU,GAAU5B,SAC1B+V,EAAc7T,EAAMN,EAASgP,GAC7BoF,EAAiB9T,EAAMN,EAASgP,IAKlC,OADAgF,EAAe,KACR/S,IAGDjC,EAAI+C,EAAK/C,IAChB,GAAO+Q,EAAU3J,EAAKwH,SAAUuE,EAAQnT,GAAIR,MAC3CuU,EAAW,CAAE5J,EAAe2J,EAAgBC,GAAYhD,QAClD,CAIN,IAHAA,EAAU3J,EAAKsG,OAAQyF,EAAQnT,GAAIR,MAAOhB,MAAO,KAAM2U,EAAQnT,GAAIyF,UAGrD1B,GAAY,CAIzB,IADAf,IAAMhD,EACEgD,EAAID,EAAKC,IAChB,GAAKoE,EAAKwH,SAAUuE,EAAQnQ,GAAIxD,MAC/B,MAGF,OAAO2U,GACF,EAAJnU,GAAS8T,EAAgBC,GACrB,EAAJ/T,GAAS0L,EAGRyH,EAAOhV,MAAO,EAAG6B,EAAI,GACnBzB,OAAQ,CAAEqH,MAAgC,MAAzBuN,EAAQnT,EAAI,GAAIR,KAAe,IAAM,MACvD0E,QAASkC,GAAU,MACrB2K,EACA/Q,EAAIgD,GAAK+R,GAAmB5B,EAAOhV,MAAO6B,EAAGgD,IAC7CA,EAAID,GAAOgS,GAAqB5B,EAASA,EAAOhV,MAAO6E,IACvDA,EAAID,GAAO2I,EAAYyH,IAGzBY,EAAStV,KAAMsS,GAIjB,OAAO+C,EAAgBC,GAiIxB,SAAS/C,GAASjQ,EAAU+J,GAC3B,IAAI9K,EA/H8BqV,EAAiBC,EAC/CC,EACHC,EACAC,EA6HAH,EAAc,GACdD,EAAkB,GAClB/B,EAASvL,EAAehH,EAAW,KAEpC,IAAMuS,EAAS,CAGRxI,IACLA,EAAQW,EAAU1K,IAEnBf,EAAI8K,EAAM1J,OACV,MAAQpB,KACPsT,EAASyB,GAAmBjK,EAAO9K,KACtB+D,GACZuR,EAAY7W,KAAM6U,GAElB+B,EAAgB5W,KAAM6U,IAKxBA,EAASvL,EAAehH,GArJSsU,EAsJNA,EArJxBE,EAA6B,GADkBD,EAsJNA,GArJrBlU,OACvBoU,EAAqC,EAAzBH,EAAgBjU,OAC5BqU,EAAe,SAAU9K,EAAM3J,EAASgP,EAAKjL,EAAS2Q,GACrD,IAAIpU,EAAM0B,EAAG+N,EACZ4E,EAAe,EACf3V,EAAI,IACJiR,EAAYtG,GAAQ,GACpBiL,EAAa,GACbC,EAAgBxO,EAGhBrF,EAAQ2I,GAAQ6K,GAAapO,EAAKsD,KAAK3B,IAAK,IAAK2M,GAGjDI,EAAkBpO,GAA4B,MAAjBmO,EAAwB,EAAI7R,KAAKC,UAAY,GAC1ElB,EAAMf,EAAMZ,OAeb,IAbKsU,IAMJrO,EAAmBrG,GAAWvD,GAAYuD,GAAW0U,GAO9C1V,IAAM+C,GAAgC,OAAvBzB,EAAOU,EAAOhC,IAAeA,IAAM,CACzD,GAAKwV,GAAalU,EAAO,CACxB0B,EAAI,EAMEhC,GAAWM,EAAK+D,eAAiB5H,IACtCwM,EAAa3I,GACb0O,GAAOxI,GAER,MAAUuJ,EAAUsE,EAAiBrS,KACpC,GAAK+N,EAASzP,EAAMN,GAAWvD,EAAUuS,GAAQ,CAChDvR,EAAKH,KAAMyG,EAASzD,GACpB,MAGGoU,IACJhO,EAAUoO,GAKPP,KAGGjU,GAAQyP,GAAWzP,IACzBqU,IAIIhL,GACJsG,EAAUxS,KAAM6C,IAgBnB,GATAqU,GAAgB3V,EASXuV,GAASvV,IAAM2V,EAAe,CAClC3S,EAAI,EACJ,MAAU+N,EAAUuE,EAAatS,KAChC+N,EAASE,EAAW2E,EAAY5U,EAASgP,GAG1C,GAAKrF,EAAO,CAGX,GAAoB,EAAfgL,EACJ,MAAQ3V,IACCiR,EAAWjR,IAAO4V,EAAY5V,KACrC4V,EAAY5V,GAAMkG,GAAI5H,KAAMyG,IAM/B6Q,EAAa5B,EAAU4B,GAIxBnX,EAAKD,MAAOuG,EAAS6Q,GAGhBF,IAAc/K,GAA4B,EAApBiL,EAAWxU,QACG,EAAtCuU,EAAeL,EAAYlU,QAE7BN,GAAO0N,WAAYzJ,GAUrB,OALK2Q,IACJhO,EAAUoO,EACVzO,EAAmBwO,GAGb5E,GAGFsE,EACNlJ,EAAcoJ,GACdA,KA8BO1U,SAAWA,EAEnB,OAAOuS,EAYR,SAASvH,GAAQhL,EAAUC,EAAS+D,EAAS4F,GAC5C,IAAI3K,EAAGmT,EAAQ4C,EAAOvW,EAAMkL,EAC3BsL,EAA+B,mBAAbjV,GAA2BA,EAC7C+J,GAASH,GAAQc,EAAY1K,EAAWiV,EAASjV,UAAYA,GAM9D,GAJAgE,EAAUA,GAAW,GAIC,IAAjB+F,EAAM1J,OAAe,CAIzB,GAAqB,GADrB+R,EAASrI,EAAO,GAAMA,EAAO,GAAI3M,MAAO,IAC5BiD,QAA+C,QAA/B2U,EAAQ5C,EAAQ,IAAM3T,MAC3B,IAArBwB,EAAQ5B,UAAkBoI,GAAkBJ,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAAS,CAMjF,KAJAwB,GAAYoG,EAAKsD,KAAK7B,GACrBkN,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvC1I,IACI,IAAM,IAEV,OAAO+D,EAGIiR,IACXhV,EAAUA,EAAQP,YAGnBM,EAAWA,EAAS5C,MAAOgV,EAAO/G,QAAQxG,MAAMxE,QAIjDpB,EAAI4I,EAAUQ,aAAa9D,KAAMvE,GAAa,EAAIoS,EAAO/R,OACzD,MAAQpB,IAAM,CAIb,GAHA+V,EAAQ5C,EAAQnT,GAGXoH,EAAKwH,SAAYpP,EAAOuW,EAAMvW,MAClC,MAED,IAAOkL,EAAOtD,EAAKsD,KAAMlL,MAGjBmL,EAAOD,EACbqL,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvCF,EAASlE,KAAM6N,EAAQ,GAAI3T,OAC1B+L,EAAavK,EAAQP,aAAgBO,IACjC,CAKL,GAFAmS,EAAOhQ,OAAQnD,EAAG,KAClBe,EAAW4J,EAAKvJ,QAAUsK,EAAYyH,IAGrC,OADA1U,EAAKD,MAAOuG,EAAS4F,GACd5F,EAGR,QAeJ,OAPEiR,GAAYhF,GAASjQ,EAAU+J,IAChCH,EACA3J,GACCwG,EACDzC,GACC/D,GAAWwI,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAAgBO,GAExE+D,EArlBR4L,EAAWlP,UAAY2F,EAAK6O,QAAU7O,EAAKiB,QAC3CjB,EAAKuJ,WAAa,IAAIA,EA2lBtB1R,GAAQyP,WAAa3K,EAAQiC,MAAO,IAAK9C,KAAM+E,GAAY0D,KAAM,MAAS5H,EAG1EkG,IAIAhL,GAAQiP,aAAe5B,EAAQ,SAAUC,GAGxC,OAA4E,EAArEA,EAAG7F,wBAAyBjJ,EAAS0C,cAAe,eAG5DW,GAAO4J,KAAOA,EAGd5J,GAAOqN,KAAM,KAAQrN,GAAOqN,KAAK9F,QACjCvH,GAAOoV,OAASpV,GAAO0N,WAIvB9D,EAAKsG,QAAUA,GACftG,EAAKqB,OAASA,GACdrB,EAAKT,YAAcA,EACnBS,EAAKe,SAAWA,EAEhBf,EAAKf,OAAS7I,GAAOkG,eACrB0D,EAAKyL,QAAUrV,GAAOV,KACtBsK,EAAK0L,MAAQtV,GAAOmE,SACpByF,EAAK2L,UAAYvV,GAAOqN,KACxBzD,EAAKzL,QAAU6B,GAAO7B,QACtByL,EAAK8D,WAAa1N,GAAO0N,WAniEzB,GA0iEA,IAAInE,EAAM,SAAU/I,EAAM+I,EAAKiM,GAC9B,IAAIzF,EAAU,GACb0F,OAAqBzS,IAAVwS,EAEZ,OAAUhV,EAAOA,EAAM+I,KAA6B,IAAlB/I,EAAKlC,SACtC,GAAuB,IAAlBkC,EAAKlC,SAAiB,CAC1B,GAAKmX,GAAYzV,GAAQQ,GAAOkV,GAAIF,GACnC,MAEDzF,EAAQpS,KAAM6C,GAGhB,OAAOuP,GAIJ4F,EAAW,SAAUC,EAAGpV,GAG3B,IAFA,IAAIuP,EAAU,GAEN6F,EAAGA,EAAIA,EAAEtE,YACI,IAAfsE,EAAEtX,UAAkBsX,IAAMpV,GAC9BuP,EAAQpS,KAAMiY,GAIhB,OAAO7F,GAIJ8F,EAAgB7V,GAAOqN,KAAKrD,MAAM1B,aAElCwN,EAAa,kEAKjB,SAASC,EAAQzI,EAAU0I,EAAWhG,GACrC,OAAK5R,EAAY4X,GACThW,GAAO8B,KAAMwL,EAAU,SAAU9M,EAAMtB,GAC7C,QAAS8W,EAAUxY,KAAMgD,EAAMtB,EAAGsB,KAAWwP,IAK1CgG,EAAU1X,SACP0B,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAASA,IAASwV,IAAgBhG,IAKV,iBAAdgG,EACJhW,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAA4C,EAAnC5C,GAAQJ,KAAMwY,EAAWxV,KAAkBwP,IAK/ChQ,GAAO4M,OAAQoJ,EAAW1I,EAAU0C,GAG5ChQ,GAAO4M,OAAS,SAAUS,EAAMnM,EAAO8O,GACtC,IAAIxP,EAAOU,EAAO,GAMlB,OAJK8O,IACJ3C,EAAO,QAAUA,EAAO,KAGH,IAAjBnM,EAAMZ,QAAkC,IAAlBE,EAAKlC,SACxB0B,GAAO4J,KAAK2D,gBAAiB/M,EAAM6M,GAAS,CAAE7M,GAAS,GAGxDR,GAAO4J,KAAKjF,QAAS0I,EAAMrN,GAAO8B,KAAMZ,EAAO,SAAUV,GAC/D,OAAyB,IAAlBA,EAAKlC,aAId0B,GAAOG,GAAGmC,OAAQ,CACjBsH,KAAM,SAAU3J,GACf,IAAIf,EAAGiC,EACNc,EAAMlF,KAAKuD,OACX2V,EAAOlZ,KAER,GAAyB,iBAAbkD,EACX,OAAOlD,KAAKkE,UAAWjB,GAAQC,GAAW2M,OAAQ,WACjD,IAAM1N,EAAI,EAAGA,EAAI+C,EAAK/C,IACrB,GAAKc,GAAOwF,SAAUyQ,EAAM/W,GAAKnC,MAChC,OAAO,KAQX,IAFAoE,EAAMpE,KAAKkE,UAAW,IAEhB/B,EAAI,EAAGA,EAAI+C,EAAK/C,IACrBc,GAAO4J,KAAM3J,EAAUgW,EAAM/W,GAAKiC,GAGnC,OAAa,EAANc,EAAUjC,GAAO0N,WAAYvM,GAAQA,GAE7CyL,OAAQ,SAAU3M,GACjB,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtD+P,IAAK,SAAU/P,GACd,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtDyV,GAAI,SAAUzV,GACb,QAAS8V,EACRhZ,KAIoB,iBAAbkD,GAAyB4V,EAAcrR,KAAMvE,GACnDD,GAAQC,GACRA,GAAY,IACb,GACCK,UASJ,IAAI4V,EAMHzN,EAAa,uCAENzI,GAAOG,GAAGC,KAAO,SAAUH,EAAUC,EAASuQ,GACpD,IAAIzG,EAAOxJ,EAGX,IAAMP,EACL,OAAOlD,KAQR,GAHA0T,EAAOA,GAAQyF,EAGU,iBAAbjW,EAAwB,CAanC,KAPC+J,EALsB,MAAlB/J,EAAU,IACsB,MAApCA,EAAUA,EAASK,OAAS,IACT,GAAnBL,EAASK,OAGD,CAAE,KAAML,EAAU,MAGlBwI,EAAW2B,KAAMnK,MAIV+J,EAAO,IAAQ9J,EA6CxB,OAAMA,GAAWA,EAAQU,QACtBV,GAAWuQ,GAAO7G,KAAM3J,GAK1BlD,KAAK8D,YAAaX,GAAU0J,KAAM3J,GAhDzC,GAAK+J,EAAO,GAAM,CAYjB,GAXA9J,EAAUA,aAAmBF,GAASE,EAAS,GAAMA,EAIrDF,GAAOoB,MAAOrE,KAAMiD,GAAOmW,UAC1BnM,EAAO,GACP9J,GAAWA,EAAQ5B,SAAW4B,EAAQqE,eAAiBrE,EAAUvD,GACjE,IAIImZ,EAAWtR,KAAMwF,EAAO,KAAShK,GAAO6C,cAAe3C,GAC3D,IAAM8J,KAAS9J,EAGT9B,EAAYrB,KAAMiN,IACtBjN,KAAMiN,GAAS9J,EAAS8J,IAIxBjN,KAAKyQ,KAAMxD,EAAO9J,EAAS8J,IAK9B,OAAOjN,KAYP,OARAyD,EAAO7D,EAAS0N,eAAgBL,EAAO,OAKtCjN,KAAM,GAAMyD,EACZzD,KAAKuD,OAAS,GAERvD,KAcH,OAAKkD,EAAS3B,UACpBvB,KAAM,GAAMkD,EACZlD,KAAKuD,OAAS,EACPvD,MAIIqB,EAAY6B,QACD+C,IAAfyN,EAAK2F,MACX3F,EAAK2F,MAAOnW,GAGZA,EAAUD,IAGLA,GAAOgE,UAAW/D,EAAUlD,QAIhC4D,UAAYX,GAAOG,GAGxB+V,EAAalW,GAAQrD,GAGrB,IAAI0Z,EAAe,iCAGlBC,EAAmB,CAClBC,UAAU,EACVC,UAAU,EACVhN,MAAM,EACNiN,MAAM,GAoFR,SAASC,EAASC,EAAKpN,GACtB,OAAUoN,EAAMA,EAAKpN,KAA4B,IAAjBoN,EAAIrY,UACpC,OAAOqY,EAnFR3W,GAAOG,GAAGmC,OAAQ,CACjB8N,IAAK,SAAUzN,GACd,IAAIiU,EAAU5W,GAAQ2C,EAAQ5F,MAC7B8Z,EAAID,EAAQtW,OAEb,OAAOvD,KAAK6P,OAAQ,WAEnB,IADA,IAAI1N,EAAI,EACAA,EAAI2X,EAAG3X,IACd,GAAKc,GAAOwF,SAAUzI,KAAM6Z,EAAS1X,IACpC,OAAO,KAMX4X,QAAS,SAAUvB,EAAWrV,GAC7B,IAAIyW,EACHzX,EAAI,EACJ2X,EAAI9Z,KAAKuD,OACTyP,EAAU,GACV6G,EAA+B,iBAAdrB,GAA0BvV,GAAQuV,GAGpD,IAAMM,EAAcrR,KAAM+Q,GACzB,KAAQrW,EAAI2X,EAAG3X,IACd,IAAMyX,EAAM5Z,KAAMmC,GAAKyX,GAAOA,IAAQzW,EAASyW,EAAMA,EAAIhX,WAGxD,GAAKgX,EAAIrY,SAAW,KAAQsY,GACH,EAAxBA,EAAQG,MAAOJ,GAGE,IAAjBA,EAAIrY,UACH0B,GAAO4J,KAAK2D,gBAAiBoJ,EAAKpB,IAAgB,CAEnDxF,EAAQpS,KAAMgZ,GACd,MAMJ,OAAO5Z,KAAKkE,UAA4B,EAAjB8O,EAAQzP,OAAaN,GAAO0N,WAAYqC,GAAYA,IAI5EgH,MAAO,SAAUvW,GAGhB,OAAMA,EAKe,iBAATA,EACJ5C,GAAQJ,KAAMwC,GAAQQ,GAAQzD,KAAM,IAIrCa,GAAQJ,KAAMT,KAGpByD,EAAKI,OAASJ,EAAM,GAAMA,GAZjBzD,KAAM,IAAOA,KAAM,GAAI4C,WAAe5C,KAAK2E,QAAQsV,UAAU1W,QAAU,GAgBlF2W,IAAK,SAAUhX,EAAUC,GACxB,OAAOnD,KAAKkE,UACXjB,GAAO0N,WACN1N,GAAOoB,MAAOrE,KAAKgE,MAAOf,GAAQC,EAAUC,OAK/CgX,QAAS,SAAUjX,GAClB,OAAOlD,KAAKka,IAAiB,MAAZhX,EAChBlD,KAAKsE,WAAatE,KAAKsE,WAAWuL,OAAQ3M,OAU7CD,GAAOsB,KAAM,CACZgO,OAAQ,SAAU9O,GACjB,IAAI8O,EAAS9O,EAAKb,WAClB,OAAO2P,GAA8B,KAApBA,EAAOhR,SAAkBgR,EAAS,MAEpD6H,QAAS,SAAU3W,GAClB,OAAO+I,EAAK/I,EAAM,eAEnB4W,aAAc,SAAU5W,EAAM2E,EAAIqQ,GACjC,OAAOjM,EAAK/I,EAAM,aAAcgV,IAEjChM,KAAM,SAAUhJ,GACf,OAAOkW,EAASlW,EAAM,gBAEvBiW,KAAM,SAAUjW,GACf,OAAOkW,EAASlW,EAAM,oBAEvB6W,QAAS,SAAU7W,GAClB,OAAO+I,EAAK/I,EAAM,gBAEnBwW,QAAS,SAAUxW,GAClB,OAAO+I,EAAK/I,EAAM,oBAEnB8W,UAAW,SAAU9W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,cAAegV,IAElC+B,UAAW,SAAU/W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,kBAAmBgV,IAEtCG,SAAU,SAAUnV,GACnB,OAAOmV,GAAYnV,EAAKb,YAAc,IAAK8P,WAAYjP,IAExD+V,SAAU,SAAU/V,GACnB,OAAOmV,EAAUnV,EAAKiP,aAEvB+G,SAAU,SAAUhW,GACnB,OAA6B,MAAxBA,EAAKgX,iBAKTta,EAAUsD,EAAKgX,iBAERhX,EAAKgX,iBAMRjX,GAAUC,EAAM,cACpBA,EAAOA,EAAKiX,SAAWjX,GAGjBR,GAAOoB,MAAO,GAAIZ,EAAKiJ,eAE7B,SAAUhJ,EAAMN,GAClBH,GAAOG,GAAIM,GAAS,SAAU+U,EAAOvV,GACpC,IAAI8P,EAAU/P,GAAOwB,IAAKzE,KAAMoD,EAAIqV,GAuBpC,MArB0B,UAArB/U,EAAKpD,OAAQ,KACjB4C,EAAWuV,GAGPvV,GAAgC,iBAAbA,IACvB8P,EAAU/P,GAAO4M,OAAQ3M,EAAU8P,IAGjB,EAAdhT,KAAKuD,SAGHgW,EAAkB7V,IACvBT,GAAO0N,WAAYqC,GAIfsG,EAAa7R,KAAM/D,IACvBsP,EAAQ2H,WAIH3a,KAAKkE,UAAW8O,MAGzB,IAAI4H,EAAgB,oBAsOpB,SAASC,EAAUC,GAClB,OAAOA,EAER,SAASC,EAASC,GACjB,MAAMA,EAGP,SAASC,EAAYlT,EAAOmT,EAASC,EAAQC,GAC5C,IAAIC,EAEJ,IAGMtT,GAAS1G,EAAcga,EAAStT,EAAMuT,SAC1CD,EAAO5a,KAAMsH,GAAQ+B,KAAMoR,GAAUK,KAAMJ,GAGhCpT,GAAS1G,EAAcga,EAAStT,EAAMyT,MACjDH,EAAO5a,KAAMsH,EAAOmT,EAASC,GAQ7BD,EAAQva,WAAOsF,EAAW,CAAE8B,GAAQzH,MAAO8a,IAM3C,MAAQrT,GAIToT,EAAOxa,WAAOsF,EAAW,CAAE8B,KAvO7B9E,GAAOwY,UAAY,SAAUjW,GA9B7B,IAAwBA,EACnBkW,EAiCJlW,EAA6B,iBAAZA,GAlCMA,EAmCPA,EAlCZkW,EAAS,GACbzY,GAAOsB,KAAMiB,EAAQyH,MAAO2N,IAAmB,GAAI,SAAUe,EAAGC,GAC/DF,EAAQE,IAAS,IAEXF,GA+BNzY,GAAOsC,OAAQ,GAAIC,GAEpB,IACCqW,EAGAC,EAGAC,EAGAC,EAGAC,EAAO,GAGPC,EAAQ,GAGRC,GAAe,EAGfC,EAAO,WAQN,IALAJ,EAASA,GAAUxW,EAAQ6W,KAI3BN,EAAQF,GAAS,EACTK,EAAM3Y,OAAQ4Y,GAAe,EAAI,CACxCL,EAASI,EAAM3N,QACf,QAAU4N,EAAcF,EAAK1Y,QAGmC,IAA1D0Y,EAAME,GAAcxb,MAAOmb,EAAQ,GAAKA,EAAQ,KACpDtW,EAAQ8W,cAGRH,EAAcF,EAAK1Y,OACnBuY,GAAS,GAMNtW,EAAQsW,SACbA,GAAS,GAGVD,GAAS,EAGJG,IAIHC,EADIH,EACG,GAIA,KAMV5C,EAAO,CAGNgB,IAAK,WA2BJ,OA1BK+B,IAGCH,IAAWD,IACfM,EAAcF,EAAK1Y,OAAS,EAC5B2Y,EAAMtb,KAAMkb,IAGb,SAAW5B,EAAKrH,GACf5P,GAAOsB,KAAMsO,EAAM,SAAU8I,EAAG7T,GAC1BzG,EAAYyG,GACVtC,EAAQ6S,QAAWa,EAAK7F,IAAKvL,IAClCmU,EAAKrb,KAAMkH,GAEDA,GAAOA,EAAIvE,QAA4B,WAAlBT,EAAQgF,IAGxCoS,EAAKpS,KATR,CAYKpD,WAEAoX,IAAWD,GACfO,KAGKpc,MAIRuc,OAAQ,WAYP,OAXAtZ,GAAOsB,KAAMG,UAAW,SAAUiX,EAAG7T,GACpC,IAAIkS,EACJ,OAA0D,GAAhDA,EAAQ/W,GAAOkE,QAASW,EAAKmU,EAAMjC,IAC5CiC,EAAK3W,OAAQ0U,EAAO,GAGfA,GAASmC,GACbA,MAIInc,MAKRqT,IAAK,SAAUjQ,GACd,OAAOA,GACwB,EAA9BH,GAAOkE,QAAS/D,EAAI6Y,GACN,EAAdA,EAAK1Y,QAIP+Q,MAAO,WAIN,OAHK2H,IACJA,EAAO,IAEDjc,MAMRwc,QAAS,WAGR,OAFAR,EAASE,EAAQ,GACjBD,EAAOH,EAAS,GACT9b,MAERuM,SAAU,WACT,OAAQ0P,GAMTQ,KAAM,WAKL,OAJAT,EAASE,EAAQ,GACXJ,GAAWD,IAChBI,EAAOH,EAAS,IAEV9b,MAERgc,OAAQ,WACP,QAASA,GAIVU,SAAU,SAAUvZ,EAAS0P,GAS5B,OARMmJ,IAELnJ,EAAO,CAAE1P,GADT0P,EAAOA,GAAQ,IACQvS,MAAQuS,EAAKvS,QAAUuS,GAC9CqJ,EAAMtb,KAAMiS,GACNgJ,GACLO,KAGKpc,MAIRoc,KAAM,WAEL,OADAlD,EAAKwD,SAAU1c,KAAM0E,WACd1E,MAIR+b,MAAO,WACN,QAASA,IAIZ,OAAO7C,GA4CRjW,GAAOsC,OAAQ,CAEdoX,SAAU,SAAUC,GACnB,IAAIC,EAAS,CAIX,CAAE,SAAU,WAAY5Z,GAAOwY,UAAW,UACzCxY,GAAOwY,UAAW,UAAY,GAC/B,CAAE,UAAW,OAAQxY,GAAOwY,UAAW,eACtCxY,GAAOwY,UAAW,eAAiB,EAAG,YACvC,CAAE,SAAU,OAAQxY,GAAOwY,UAAW,eACrCxY,GAAOwY,UAAW,eAAiB,EAAG,aAExCqB,EAAQ,UACRxB,EAAU,CACTwB,MAAO,WACN,OAAOA,GAERC,OAAQ,WAEP,OADAC,EAASlT,KAAMpF,WAAY6W,KAAM7W,WAC1B1E,MAERid,QAAS,SAAU7Z,GAClB,OAAOkY,EAAQE,KAAM,KAAMpY,IAI5B8Z,KAAM,WACL,IAAIC,EAAMzY,UAEV,OAAOzB,GAAO0Z,SAAU,SAAUS,GACjCna,GAAOsB,KAAMsY,EAAQ,SAAUzU,EAAIiV,GAGlC,IAAIja,EAAK/B,EAAY8b,EAAKE,EAAO,MAAWF,EAAKE,EAAO,IAKxDL,EAAUK,EAAO,IAAO,WACvB,IAAIC,EAAWla,GAAMA,EAAGzC,MAAOX,KAAM0E,WAChC4Y,GAAYjc,EAAYic,EAAShC,SACrCgC,EAAShC,UACPiC,SAAUH,EAASI,QACnB1T,KAAMsT,EAASlC,SACfK,KAAM6B,EAASjC,QAEjBiC,EAAUC,EAAO,GAAM,QACtBrd,KACAoD,EAAK,CAAEka,GAAa5Y,eAKxByY,EAAM,OACH7B,WAELE,KAAM,SAAUiC,EAAaC,EAAYC,GACxC,IAAIC,EAAW,EACf,SAAS1C,EAAS2C,EAAOb,EAAUc,EAASC,GAC3C,OAAO,WACN,IAAIC,EAAOhe,KACV6S,EAAOnO,UACPuZ,EAAa,WACZ,IAAIX,EAAU9B,EAKd,KAAKqC,EAAQD,GAAb,CAQA,IAJAN,EAAWQ,EAAQnd,MAAOqd,EAAMnL,MAIdmK,EAAS1B,UAC1B,MAAM,IAAI4C,UAAW,4BAOtB1C,EAAO8B,IAKgB,iBAAbA,GACY,mBAAbA,IACRA,EAAS9B,KAGLna,EAAYma,GAGXuC,EACJvC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,KAOvCH,IAEApC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,GACtC7C,EAAS0C,EAAUZ,EAAUnC,EAC5BmC,EAASmB,eASPL,IAAYjD,IAChBmD,OAAO/X,EACP4M,EAAO,CAAEyK,KAKRS,GAAWf,EAASoB,aAAeJ,EAAMnL,MAK7CwL,EAAUN,EACTE,EACA,WACC,IACCA,IACC,MAAQtR,GAEJ1J,GAAO0Z,SAAS2B,eACpBrb,GAAO0Z,SAAS2B,cAAe3R,EAC9B0R,EAAQ9X,OAMQqX,GAAbC,EAAQ,IAIPC,IAAY/C,IAChBiD,OAAO/X,EACP4M,EAAO,CAAElG,IAGVqQ,EAASuB,WAAYP,EAAMnL,MAS3BgL,EACJQ,KAKKpb,GAAO0Z,SAAS6B,aACpBH,EAAQ9X,MAAQtD,GAAO0Z,SAAS6B,eAMrBvb,GAAO0Z,SAAS8B,eAC3BJ,EAAQ9X,MAAQtD,GAAO0Z,SAAS8B,gBAEjC1e,GAAO2e,WAAYL,KAKtB,OAAOpb,GAAO0Z,SAAU,SAAUS,GAGjCP,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYsc,GACXA,EACA9C,EACDuC,EAASe,aAKXtB,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYoc,GACXA,EACA5C,IAKHgC,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYqc,GACXA,EACA3C,MAGAO,WAKLA,QAAS,SAAUha,GAClB,OAAc,MAAPA,EAAc2B,GAAOsC,OAAQjE,EAAKga,GAAYA,IAGvD0B,EAAW,GAkEZ,OA/DA/Z,GAAOsB,KAAMsY,EAAQ,SAAU1a,EAAGkb,GACjC,IAAIpB,EAAOoB,EAAO,GACjBsB,EAActB,EAAO,GAKtB/B,EAAS+B,EAAO,IAAQpB,EAAK/B,IAGxByE,GACJ1C,EAAK/B,IACJ,WAIC4C,EAAQ6B,GAKT9B,EAAQ,EAAI1a,GAAK,GAAIqa,QAIrBK,EAAQ,EAAI1a,GAAK,GAAIqa,QAGrBK,EAAQ,GAAK,GAAIJ,KAGjBI,EAAQ,GAAK,GAAIJ,MAOnBR,EAAK/B,IAAKmD,EAAO,GAAIjB,MAKrBY,EAAUK,EAAO,IAAQ,WAExB,OADAL,EAAUK,EAAO,GAAM,QAAUrd,OAASgd,OAAW/W,EAAYjG,KAAM0E,WAChE1E,MAMRgd,EAAUK,EAAO,GAAM,QAAWpB,EAAKS,WAIxCpB,EAAQA,QAAS0B,GAGZJ,GACJA,EAAKnc,KAAMuc,EAAUA,GAIfA,GAIR4B,KAAM,SAAUC,GACf,IAGCC,EAAYpa,UAAUnB,OAGtBpB,EAAI2c,EAGJC,EAAkBhZ,MAAO5D,GACzB6c,EAAgB1e,GAAMG,KAAMiE,WAG5Bua,EAAUhc,GAAO0Z,WAGjBuC,EAAa,SAAU/c,GACtB,OAAO,SAAU4F,GAChBgX,EAAiB5c,GAAMnC,KACvBgf,EAAe7c,GAAyB,EAAnBuC,UAAUnB,OAAajD,GAAMG,KAAMiE,WAAcqD,IAC5D+W,GACTG,EAAQb,YAAaW,EAAiBC,KAM1C,GAAKF,GAAa,IACjB7D,EAAY4D,EAAaI,EAAQnV,KAAMoV,EAAY/c,IAAM+Y,QAAS+D,EAAQ9D,QACxE2D,GAGuB,YAApBG,EAAQnC,SACZzb,EAAY2d,EAAe7c,IAAO6c,EAAe7c,GAAIqZ,OAErD,OAAOyD,EAAQzD,OAKjB,MAAQrZ,IACP8Y,EAAY+D,EAAe7c,GAAK+c,EAAY/c,GAAK8c,EAAQ9D,QAG1D,OAAO8D,EAAQ3D,aAOjB,IAAI6D,EAAc,yDAKlBlc,GAAO0Z,SAAS2B,cAAgB,SAAU/X,EAAO6Y,GAI3Crf,GAAOsf,SAAWtf,GAAOsf,QAAQC,MAAQ/Y,GAAS4Y,EAAY1X,KAAMlB,EAAM7C,OAC9E3D,GAAOsf,QAAQC,KAAM,8BAAgC/Y,EAAMgZ,QAC1DhZ,EAAMiZ,MAAOJ,IAOhBnc,GAAOwc,eAAiB,SAAUlZ,GACjCxG,GAAO2e,WAAY,WAClB,MAAMnY,KAQR,IAAImZ,EAAYzc,GAAO0Z,WAkDvB,SAASgD,IACR/f,EAASggB,oBAAqB,mBAAoBD,GAClD5f,GAAO6f,oBAAqB,OAAQD,GACpC1c,GAAOoW,QAnDRpW,GAAOG,GAAGiW,MAAQ,SAAUjW,GAY3B,OAVAsc,EACElE,KAAMpY,GAKN6Z,SAAO,SAAU1W,GACjBtD,GAAOwc,eAAgBlZ,KAGlBvG,MAGRiD,GAAOsC,OAAQ,CAGde,SAAS,EAITuZ,UAAW,EAGXxG,MAAO,SAAUyG,KAGF,IAATA,IAAkB7c,GAAO4c,UAAY5c,GAAOqD,WAKjDrD,GAAOqD,SAAU,KAGZwZ,GAAsC,IAAnB7c,GAAO4c,WAK/BH,EAAUtB,YAAaxe,EAAU,CAAEqD,QAIrCA,GAAOoW,MAAMmC,KAAOkE,EAAUlE,KAaD,aAAxB5b,EAASmgB,YACa,YAAxBngB,EAASmgB,aAA6BngB,EAASmH,gBAAgBiZ,SAGjEjgB,GAAO2e,WAAYzb,GAAOoW,QAK1BzZ,EAAS2P,iBAAkB,mBAAoBoQ,GAG/C5f,GAAOwP,iBAAkB,OAAQoQ,IAQlC,IAAIM,EAAS,SAAU9b,EAAOf,EAAIiL,EAAKtG,EAAOmY,EAAWC,EAAUC,GAClE,IAAIje,EAAI,EACP+C,EAAMf,EAAMZ,OACZ8c,EAAc,MAAPhS,EAGR,GAAuB,WAAlBvL,EAAQuL,GAEZ,IAAMlM,KADN+d,GAAY,EACD7R,EACV4R,EAAQ9b,EAAOf,EAAIjB,EAAGkM,EAAKlM,IAAK,EAAMge,EAAUC,QAI3C,QAAena,IAAV8B,IACXmY,GAAY,EAEN7e,EAAY0G,KACjBqY,GAAM,GAGFC,IAGCD,GACJhd,EAAG3C,KAAM0D,EAAO4D,GAChB3E,EAAK,OAILid,EAAOjd,EACPA,EAAK,SAAUK,EAAM6c,EAAMvY,GAC1B,OAAOsY,EAAK5f,KAAMwC,GAAQQ,GAAQsE,MAKhC3E,GACJ,KAAQjB,EAAI+C,EAAK/C,IAChBiB,EACCe,EAAOhC,GAAKkM,EAAK+R,EAChBrY,EACAA,EAAMtH,KAAM0D,EAAOhC,GAAKA,EAAGiB,EAAIe,EAAOhC,GAAKkM,KAMhD,OAAK6R,EACG/b,EAIHkc,EACGjd,EAAG3C,KAAM0D,GAGVe,EAAM9B,EAAIe,EAAO,GAAKkK,GAAQ8R,GAKlCI,EAAY,QACfC,EAAa,YAGd,SAASC,EAAYC,EAAMC,GAC1B,OAAOA,EAAOC,cAMf,SAASC,EAAWC,GACnB,OAAOA,EAAOza,QAASka,EAAW,OAAQla,QAASma,EAAYC,GAEhE,IAAIM,EAAa,SAAUC,GAQ1B,OAA0B,IAAnBA,EAAMzf,UAAqC,IAAnByf,EAAMzf,YAAsByf,EAAMzf,UAMlE,SAAS0f,IACRjhB,KAAKkG,QAAUjD,GAAOiD,QAAU+a,EAAKC,MAGtCD,EAAKC,IAAM,EAEXD,EAAKrd,UAAY,CAEhBwK,MAAO,SAAU4S,GAGhB,IAAIjZ,EAAQiZ,EAAOhhB,KAAKkG,SA4BxB,OAzBM6B,IACLA,EAAQ,GAKHgZ,EAAYC,KAIXA,EAAMzf,SACVyf,EAAOhhB,KAAKkG,SAAY6B,EAMxB3H,OAAO+gB,eAAgBH,EAAOhhB,KAAKkG,QAAS,CAC3C6B,MAAOA,EACPqZ,cAAc,MAMXrZ,GAERsZ,IAAK,SAAUL,EAAOM,EAAMvZ,GAC3B,IAAIwZ,EACHnT,EAAQpO,KAAKoO,MAAO4S,GAIrB,GAAqB,iBAATM,EACXlT,EAAOyS,EAAWS,IAAWvZ,OAM7B,IAAMwZ,KAAQD,EACblT,EAAOyS,EAAWU,IAAWD,EAAMC,GAGrC,OAAOnT,GAERpK,IAAK,SAAUgd,EAAO3S,GACrB,YAAepI,IAARoI,EACNrO,KAAKoO,MAAO4S,GAGZA,EAAOhhB,KAAKkG,UAAa8a,EAAOhhB,KAAKkG,SAAW2a,EAAWxS,KAE7D4R,OAAQ,SAAUe,EAAO3S,EAAKtG,GAa7B,YAAa9B,IAARoI,GACCA,GAAsB,iBAARA,QAAgCpI,IAAV8B,EAElC/H,KAAKgE,IAAKgd,EAAO3S,IASzBrO,KAAKqhB,IAAKL,EAAO3S,EAAKtG,QAIL9B,IAAV8B,EAAsBA,EAAQsG,IAEtCkO,OAAQ,SAAUyE,EAAO3S,GACxB,IAAIlM,EACHiM,EAAQ4S,EAAOhhB,KAAKkG,SAErB,QAAeD,IAAVmI,EAAL,CAIA,QAAanI,IAARoI,EAAoB,CAkBxBlM,GAXCkM,EAJItI,MAAMC,QAASqI,GAIbA,EAAI5J,IAAKoc,IAEfxS,EAAMwS,EAAWxS,MAIJD,EACZ,CAAEC,GACAA,EAAIpB,MAAO2N,IAAmB,IAG1BrX,OAER,MAAQpB,WACAiM,EAAOC,EAAKlM,UAKR8D,IAARoI,GAAqBpL,GAAO2D,cAAewH,MAM1C4S,EAAMzf,SACVyf,EAAOhhB,KAAKkG,cAAYD,SAEjB+a,EAAOhhB,KAAKkG,YAItBsb,QAAS,SAAUR,GAClB,IAAI5S,EAAQ4S,EAAOhhB,KAAKkG,SACxB,YAAiBD,IAAVmI,IAAwBnL,GAAO2D,cAAewH,KAGvD,IAAIqT,EAAW,IAAIR,EAEfS,EAAW,IAAIT,EAcfU,EAAS,gCACZC,EAAa,SA2Bd,SAASC,EAAUpe,EAAM4K,EAAKiT,GAC7B,IAAI5d,EA1Ba4d,EA8BjB,QAAcrb,IAATqb,GAAwC,IAAlB7d,EAAKlC,SAI/B,GAHAmC,EAAO,QAAU2K,EAAIhI,QAASub,EAAY,OAAQje,cAG7B,iBAFrB2d,EAAO7d,EAAKjB,aAAckB,IAEM,CAC/B,IACC4d,EAnCW,UADGA,EAoCEA,IA/BL,UAATA,IAIS,SAATA,EACG,KAIHA,KAAUA,EAAO,IACbA,EAGJK,EAAOla,KAAM6Z,GACVQ,KAAKC,MAAOT,GAGbA,GAeH,MAAQ3U,IAGV+U,EAASL,IAAK5d,EAAM4K,EAAKiT,QAEzBA,OAAOrb,EAGT,OAAOqb,EAGRre,GAAOsC,OAAQ,CACdic,QAAS,SAAU/d,GAClB,OAAOie,EAASF,QAAS/d,IAAUge,EAASD,QAAS/d,IAGtD6d,KAAM,SAAU7d,EAAMC,EAAM4d,GAC3B,OAAOI,EAASzB,OAAQxc,EAAMC,EAAM4d,IAGrCU,WAAY,SAAUve,EAAMC,GAC3Bge,EAASnF,OAAQ9Y,EAAMC,IAKxBue,MAAO,SAAUxe,EAAMC,EAAM4d,GAC5B,OAAOG,EAASxB,OAAQxc,EAAMC,EAAM4d,IAGrCY,YAAa,SAAUze,EAAMC,GAC5B+d,EAASlF,OAAQ9Y,EAAMC,MAIzBT,GAAOG,GAAGmC,OAAQ,CACjB+b,KAAM,SAAUjT,EAAKtG,GACpB,IAAI5F,EAAGuB,EAAM4d,EACZ7d,EAAOzD,KAAM,GACbmiB,EAAQ1e,GAAQA,EAAK8G,WAGtB,QAAatE,IAARoI,EAAoB,CACxB,GAAKrO,KAAKuD,SACT+d,EAAOI,EAAS1d,IAAKP,GAEE,IAAlBA,EAAKlC,WAAmBkgB,EAASzd,IAAKP,EAAM,iBAAmB,CACnEtB,EAAIggB,EAAM5e,OACV,MAAQpB,IAIFggB,EAAOhgB,IAEsB,KADjCuB,EAAOye,EAAOhgB,GAAIuB,MACR7C,QAAS,WAClB6C,EAAOmd,EAAWnd,EAAKpD,MAAO,IAC9BuhB,EAAUpe,EAAMC,EAAM4d,EAAM5d,KAI/B+d,EAASJ,IAAK5d,EAAM,gBAAgB,GAItC,OAAO6d,EAIR,MAAoB,iBAARjT,EACJrO,KAAKuE,KAAM,WACjBmd,EAASL,IAAKrhB,KAAMqO,KAIf4R,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAIuZ,EAOJ,GAAK7d,QAAkBwC,IAAV8B,EAKZ,YAAc9B,KADdqb,EAAOI,EAAS1d,IAAKP,EAAM4K,IAEnBiT,OAMMrb,KADdqb,EAAOO,EAAUpe,EAAM4K,IAEfiT,OAIR,EAIDthB,KAAKuE,KAAM,WAGVmd,EAASL,IAAKrhB,KAAMqO,EAAKtG,MAExB,KAAMA,EAA0B,EAAnBrD,UAAUnB,OAAY,MAAM,IAG7Cye,WAAY,SAAU3T,GACrB,OAAOrO,KAAKuE,KAAM,WACjBmd,EAASnF,OAAQvc,KAAMqO,QAM1BpL,GAAOsC,OAAQ,CACd2W,MAAO,SAAUzY,EAAM9B,EAAM2f,GAC5B,IAAIpF,EAEJ,GAAKzY,EAYJ,OAXA9B,GAASA,GAAQ,MAAS,QAC1Bua,EAAQuF,EAASzd,IAAKP,EAAM9B,GAGvB2f,KACEpF,GAASnW,MAAMC,QAASsb,GAC7BpF,EAAQuF,EAASxB,OAAQxc,EAAM9B,EAAMsB,GAAOgE,UAAWqa,IAEvDpF,EAAMtb,KAAM0gB,IAGPpF,GAAS,IAIlBkG,QAAS,SAAU3e,EAAM9B,GACxBA,EAAOA,GAAQ,KAEf,IAAIua,EAAQjZ,GAAOiZ,MAAOzY,EAAM9B,GAC/B0gB,EAAcnG,EAAM3Y,OACpBH,EAAK8Y,EAAM3N,QACX+T,EAAQrf,GAAOsf,YAAa9e,EAAM9B,GAMvB,eAAPyB,IACJA,EAAK8Y,EAAM3N,QACX8T,KAGIjf,IAIU,OAATzB,GACJua,EAAMsG,QAAS,qBAITF,EAAMG,KACbrf,EAAG3C,KAAMgD,EApBF,WACNR,GAAOmf,QAAS3e,EAAM9B,IAmBF2gB,KAGhBD,GAAeC,GACpBA,EAAMhO,MAAM8H,QAKdmG,YAAa,SAAU9e,EAAM9B,GAC5B,IAAI0M,EAAM1M,EAAO,aACjB,OAAO8f,EAASzd,IAAKP,EAAM4K,IAASoT,EAASxB,OAAQxc,EAAM4K,EAAK,CAC/DiG,MAAOrR,GAAOwY,UAAW,eAAgBvB,IAAK,WAC7CuH,EAASlF,OAAQ9Y,EAAM,CAAE9B,EAAO,QAAS0M,WAM7CpL,GAAOG,GAAGmC,OAAQ,CACjB2W,MAAO,SAAUva,EAAM2f,GACtB,IAAIoB,EAAS,EAQb,MANqB,iBAAT/gB,IACX2f,EAAO3f,EACPA,EAAO,KACP+gB,KAGIhe,UAAUnB,OAASmf,EAChBzf,GAAOiZ,MAAOlc,KAAM,GAAK2B,QAGjBsE,IAATqb,EACNthB,KACAA,KAAKuE,KAAM,WACV,IAAI2X,EAAQjZ,GAAOiZ,MAAOlc,KAAM2B,EAAM2f,GAGtCre,GAAOsf,YAAaviB,KAAM2B,GAEZ,OAATA,GAAgC,eAAfua,EAAO,IAC5BjZ,GAAOmf,QAASpiB,KAAM2B,MAI1BygB,QAAS,SAAUzgB,GAClB,OAAO3B,KAAKuE,KAAM,WACjBtB,GAAOmf,QAASpiB,KAAM2B,MAGxBghB,WAAY,SAAUhhB,GACrB,OAAO3B,KAAKkc,MAAOva,GAAQ,KAAM,KAKlC2Z,QAAS,SAAU3Z,EAAML,GACxB,IAAIshB,EACHC,EAAQ,EACRC,EAAQ7f,GAAO0Z,WACfpM,EAAWvQ,KACXmC,EAAInC,KAAKuD,OACT2X,EAAU,aACC2H,GACTC,EAAM1E,YAAa7N,EAAU,CAAEA,KAIb,iBAAT5O,IACXL,EAAMK,EACNA,OAAOsE,GAERtE,EAAOA,GAAQ,KAEf,MAAQQ,KACPygB,EAAMnB,EAASzd,IAAKuM,EAAUpO,GAAKR,EAAO,gBAC9BihB,EAAItO,QACfuO,IACAD,EAAItO,MAAM4F,IAAKgB,IAIjB,OADAA,IACO4H,EAAMxH,QAASha,MAGxB,IAAIyhB,EAAO,sCAA0CC,OAEjDC,EAAU,IAAIza,OAAQ,iBAAmBua,EAAO,cAAe,KAG/DG,EAAY,CAAE,MAAO,QAAS,SAAU,QAExCnc,EAAkBnH,EAASmH,gBAI1Boc,EAAa,SAAU1f,GACzB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAE7C2f,EAAW,CAAEA,UAAU,GAOnBrc,EAAgBsc,cACpBF,EAAa,SAAU1f,GACtB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAC3CA,EAAK4f,YAAaD,KAAe3f,EAAK+D,gBAG1C,IAAI8b,GAAqB,SAAU7f,EAAMiL,GAOvC,MAA8B,UAH9BjL,EAAOiL,GAAMjL,GAGD8f,MAAMC,SACM,KAAvB/f,EAAK8f,MAAMC,SAMXL,EAAY1f,IAEsB,SAAlCR,GAAOwgB,IAAKhgB,EAAM,YAKrB,SAASigB,GAAWjgB,EAAM8d,EAAMoC,EAAYC,GAC3C,IAAIC,EAAUC,EACbC,EAAgB,GAChBC,EAAeJ,EACd,WACC,OAAOA,EAAMhK,OAEd,WACC,OAAO3W,GAAOwgB,IAAKhgB,EAAM8d,EAAM,KAEjC0C,EAAUD,IACVE,EAAOP,GAAcA,EAAY,KAAS1gB,GAAOkhB,UAAW5C,GAAS,GAAK,MAG1E6C,EAAgB3gB,EAAKlC,WAClB0B,GAAOkhB,UAAW5C,IAAmB,OAAT2C,IAAkBD,IAChDhB,EAAQ5V,KAAMpK,GAAOwgB,IAAKhgB,EAAM8d,IAElC,GAAK6C,GAAiBA,EAAe,KAAQF,EAAO,CAInDD,GAAoB,EAGpBC,EAAOA,GAAQE,EAAe,GAG9BA,GAAiBH,GAAW,EAE5B,MAAQF,IAIP9gB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,IACnC,EAAIJ,IAAY,GAAMA,EAAQE,IAAiBC,GAAW,MAAW,IAC3EF,EAAgB,GAEjBK,GAAgCN,EAIjCM,GAAgC,EAChCnhB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,GAG1CP,EAAaA,GAAc,GAgB5B,OAbKA,IACJS,GAAiBA,IAAkBH,GAAW,EAG9CJ,EAAWF,EAAY,GACtBS,GAAkBT,EAAY,GAAM,GAAMA,EAAY,IACrDA,EAAY,GACTC,IACJA,EAAMM,KAAOA,EACbN,EAAMtR,MAAQ8R,EACdR,EAAMxe,IAAMye,IAGPA,EAIR,IAAIQ,GAAoB,GAyBxB,SAASC,GAAU/T,EAAUgU,GAO5B,IANA,IAAIf,EAAS/f,EAxBcA,EACvBiT,EACHxU,EACAsB,EACAggB,EAqBAgB,EAAS,GACTxK,EAAQ,EACRzW,EAASgN,EAAShN,OAGXyW,EAAQzW,EAAQyW,KACvBvW,EAAO8M,EAAUyJ,IACNuJ,QAIXC,EAAU/f,EAAK8f,MAAMC,QAChBe,GAKa,SAAZf,IACJgB,EAAQxK,GAAUyH,EAASzd,IAAKP,EAAM,YAAe,KAC/C+gB,EAAQxK,KACbvW,EAAK8f,MAAMC,QAAU,KAGK,KAAvB/f,EAAK8f,MAAMC,SAAkBF,GAAoB7f,KACrD+gB,EAAQxK,IA7CVwJ,EAFAthB,EADGwU,OAAAA,EACHxU,GAF0BuB,EAiDaA,GA/C5B+D,cACXhE,EAAWC,EAAKD,UAChBggB,EAAUa,GAAmB7gB,MAM9BkT,EAAOxU,EAAIuiB,KAAK9hB,YAAaT,EAAII,cAAekB,IAChDggB,EAAUvgB,GAAOwgB,IAAK/M,EAAM,WAE5BA,EAAK9T,WAAWC,YAAa6T,GAEZ,SAAZ8M,IACJA,EAAU,SAEXa,GAAmB7gB,GAAaggB,MAkCb,SAAZA,IACJgB,EAAQxK,GAAU,OAGlByH,EAASJ,IAAK5d,EAAM,UAAW+f,KAMlC,IAAMxJ,EAAQ,EAAGA,EAAQzW,EAAQyW,IACR,MAAnBwK,EAAQxK,KACZzJ,EAAUyJ,GAAQuJ,MAAMC,QAAUgB,EAAQxK,IAI5C,OAAOzJ,EAGRtN,GAAOG,GAAGmC,OAAQ,CACjBgf,KAAM,WACL,OAAOD,GAAUtkB,MAAM,IAExB0kB,KAAM,WACL,OAAOJ,GAAUtkB,OAElB2kB,OAAQ,SAAU7H,GACjB,MAAsB,kBAAVA,EACJA,EAAQ9c,KAAKukB,OAASvkB,KAAK0kB,OAG5B1kB,KAAKuE,KAAM,WACZ+e,GAAoBtjB,MACxBiD,GAAQjD,MAAOukB,OAEfthB,GAAQjD,MAAO0kB,YAKnB,IAUEE,GACA1U,GAXE2U,GAAiB,wBAEjBC,GAAW,iCAEXC,GAAc,qCAMhBH,GADchlB,EAASolB,yBACRriB,YAAa/C,EAAS0C,cAAe,SACpD4N,GAAQtQ,EAAS0C,cAAe,UAM3BG,aAAc,OAAQ,SAC5ByN,GAAMzN,aAAc,UAAW,WAC/ByN,GAAMzN,aAAc,OAAQ,KAE5BmiB,GAAIjiB,YAAauN,IAIjB9O,GAAQ6jB,WAAaL,GAAIM,WAAW,GAAOA,WAAW,GAAOvS,UAAUwB,QAIvEyQ,GAAIzU,UAAY,yBAChB/O,GAAQ+jB,iBAAmBP,GAAIM,WAAW,GAAOvS,UAAUyS,aAK3DR,GAAIzU,UAAY,oBAChB/O,GAAQikB,SAAWT,GAAIjS,UAKxB,IAAI2S,GAAU,CAKbC,MAAO,CAAE,EAAG,UAAW,YACvBC,IAAK,CAAE,EAAG,oBAAqB,uBAC/BC,GAAI,CAAE,EAAG,iBAAkB,oBAC3BC,GAAI,CAAE,EAAG,qBAAsB,yBAE/BC,SAAU,CAAE,EAAG,GAAI,KAYpB,SAASC,GAAQziB,EAAS6M,GAIzB,IAAI5L,EAYJ,OATCA,EAD4C,oBAAjCjB,EAAQqK,qBACbrK,EAAQqK,qBAAsBwC,GAAO,KAEI,oBAA7B7M,EAAQ4K,iBACpB5K,EAAQ4K,iBAAkBiC,GAAO,KAGjC,QAGM/J,IAAR+J,GAAqBA,GAAOxM,GAAUL,EAAS6M,GAC5C/M,GAAOoB,MAAO,CAAElB,GAAWiB,GAG5BA,EAKR,SAASyhB,GAAe1hB,EAAO2hB,GAI9B,IAHA,IAAI3jB,EAAI,EACP2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IACdsf,EAASJ,IACRld,EAAOhC,GACP,cACC2jB,GAAerE,EAASzd,IAAK8hB,EAAa3jB,GAAK,eA1CnDmjB,GAAQS,MAAQT,GAAQU,MAAQV,GAAQW,SAAWX,GAAQY,QAAUZ,GAAQC,MAC7ED,GAAQa,GAAKb,GAAQI,GAGftkB,GAAQikB,SACbC,GAAQc,SAAWd,GAAQD,OAAS,CAAE,EAAG,+BAAgC,cA2C1E,IAAIgB,GAAQ,YAEZ,SAASC,GAAeniB,EAAOhB,EAASojB,EAASC,EAAWC,GAO3D,IANA,IAAIhjB,EAAMmf,EAAK5S,EAAK0W,EAAMC,EAAUxhB,EACnCyhB,EAAWzjB,EAAQ6hB,yBACnB6B,EAAQ,GACR1kB,EAAI,EACJ2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IAGd,IAFAsB,EAAOU,EAAOhC,KAEQ,IAATsB,EAGZ,GAAwB,WAAnBX,EAAQW,GAIZR,GAAOoB,MAAOwiB,EAAOpjB,EAAKlC,SAAW,CAAEkC,GAASA,QAG1C,GAAM4iB,GAAM5e,KAAMhE,GAIlB,CACNmf,EAAMA,GAAOgE,EAASjkB,YAAaQ,EAAQb,cAAe,QAG1D0N,GAAQ8U,GAASzX,KAAM5J,IAAU,CAAE,GAAI,KAAQ,GAAIE,cACnD+iB,EAAOpB,GAAStV,IAASsV,GAAQK,SACjC/C,EAAIzS,UAAYuW,EAAM,GAAMzjB,GAAO6jB,cAAerjB,GAASijB,EAAM,GAGjEvhB,EAAIuhB,EAAM,GACV,MAAQvhB,IACPyd,EAAMA,EAAIjQ,UAKX1P,GAAOoB,MAAOwiB,EAAOjE,EAAIlW,aAGzBkW,EAAMgE,EAASlU,YAGX5L,YAAc,QAzBlB+f,EAAMjmB,KAAMuC,EAAQ4jB,eAAgBtjB,IA+BvCmjB,EAAS9f,YAAc,GAEvB3E,EAAI,EACJ,MAAUsB,EAAOojB,EAAO1kB,KAGvB,GAAKqkB,IAAkD,EAArCvjB,GAAOkE,QAAS1D,EAAM+iB,GAClCC,GACJA,EAAQ7lB,KAAM6C,QAgBhB,GAXAkjB,EAAWxD,EAAY1f,GAGvBmf,EAAMgD,GAAQgB,EAASjkB,YAAac,GAAQ,UAGvCkjB,GACJd,GAAejD,GAIX2D,EAAU,CACdphB,EAAI,EACJ,MAAU1B,EAAOmf,EAAKzd,KAChB4f,GAAYtd,KAAMhE,EAAK9B,MAAQ,KACnC4kB,EAAQ3lB,KAAM6C,GAMlB,OAAOmjB,EAIR,IAAII,GAAiB,sBAErB,SAASC,KACR,OAAO,EAGR,SAASC,KACR,OAAO,EAGR,SAASC,GAAI1jB,EAAM2jB,EAAOlkB,EAAUoe,EAAMle,EAAIikB,GAC7C,IAAIC,EAAQ3lB,EAGZ,GAAsB,iBAAVylB,EAAqB,CAShC,IAAMzlB,IANmB,iBAAbuB,IAGXoe,EAAOA,GAAQpe,EACfA,OAAW+C,GAEEmhB,EACbD,GAAI1jB,EAAM9B,EAAMuB,EAAUoe,EAAM8F,EAAOzlB,GAAQ0lB,GAEhD,OAAO5jB,EAsBR,GAnBa,MAAR6d,GAAsB,MAANle,GAGpBA,EAAKF,EACLoe,EAAOpe,OAAW+C,GACD,MAAN7C,IACc,iBAAbF,GAGXE,EAAKke,EACLA,OAAOrb,IAIP7C,EAAKke,EACLA,EAAOpe,EACPA,OAAW+C,KAGD,IAAP7C,EACJA,EAAK8jB,QACC,IAAM9jB,EACZ,OAAOK,EAeR,OAZa,IAAR4jB,IACJC,EAASlkB,GACTA,EAAK,SAAUmkB,GAId,OADAtkB,KAASukB,IAAKD,GACPD,EAAO3mB,MAAOX,KAAM0E,aAIzBsD,KAAOsf,EAAOtf,OAAUsf,EAAOtf,KAAO/E,GAAO+E,SAE1CvE,EAAKc,KAAM,WACjBtB,GAAOskB,MAAMrN,IAAKla,KAAMonB,EAAOhkB,EAAIke,EAAMpe,KA+a3C,SAASukB,GAAgB/Y,EAAI/M,EAAM+lB,GAG5BA,GAQNjG,EAASJ,IAAK3S,EAAI/M,GAAM,GACxBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAM,CAC3B0F,WAAW,EACXyW,QAAS,SAAUyJ,GAClB,IAAI3V,EACH+V,EAAQlG,EAASzd,IAAKhE,KAAM2B,GAE7B,GAAyB,EAAlB4lB,EAAMK,WAAmB5nB,KAAM2B,IAGrC,GAAMgmB,GA4BQ1kB,GAAOskB,MAAMxJ,QAASpc,IAAU,IAAKkmB,cAClDN,EAAMO,uBAhBN,GARAH,EAAQrnB,GAAMG,KAAMiE,WACpB+c,EAASJ,IAAKrhB,KAAM2B,EAAMgmB,GAG1B3nB,KAAM2B,KACNiQ,EAAS6P,EAASzd,IAAKhE,KAAM2B,GAC7B8f,EAASJ,IAAKrhB,KAAM2B,GAAM,GAErBgmB,IAAU/V,EAMd,OAHA2V,EAAMQ,2BACNR,EAAMS,iBAECpW,OAeE+V,IAGXlG,EAASJ,IAAKrhB,KAAM2B,EAAMsB,GAAOskB,MAAMU,QACtCN,EAAO,GACPA,EAAMrnB,MAAO,GACbN,OAWDunB,EAAMO,kBACNP,EAAMW,8BAAgCjB,aArENhhB,IAA7Bwb,EAASzd,IAAK0K,EAAI/M,IACtBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAMslB,IA5a/BhkB,GAAOskB,MAAQ,CAEd/nB,OAAQ,GAER0a,IAAK,SAAUzW,EAAM2jB,EAAOtJ,EAASwD,EAAMpe,GAE1C,IAAIilB,EAAaC,EAAaxF,EAC7ByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASzd,IAAKP,GAG1B,GAAMsd,EAAYtd,GAAlB,CAKKqa,EAAQA,UAEZA,GADAqK,EAAcrK,GACQA,QACtB5a,EAAWilB,EAAYjlB,UAKnBA,GACJD,GAAO4J,KAAK2D,gBAAiBzJ,EAAiB7D,GAIzC4a,EAAQ9V,OACb8V,EAAQ9V,KAAO/E,GAAO+E,SAIfqgB,EAASM,EAASN,UACzBA,EAASM,EAASN,OAASjoB,OAAOwoB,OAAQ,QAEnCR,EAAcO,EAASE,UAC9BT,EAAcO,EAASE,OAAS,SAAUlc,GAIzC,MAAyB,oBAAX1J,IAA0BA,GAAOskB,MAAMuB,YAAcnc,EAAEhL,KACpEsB,GAAOskB,MAAMwB,SAASpoB,MAAO8C,EAAMiB,gBAAcuB,IAMpDqiB,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAEP3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,IAKNoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1CA,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,EAGjEoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1C4mB,EAAYtlB,GAAOsC,OAAQ,CAC1B5D,KAAMA,EACN+mB,SAAUA,EACVpH,KAAMA,EACNxD,QAASA,EACT9V,KAAM8V,EAAQ9V,KACd9E,SAAUA,EACVqI,aAAcrI,GAAYD,GAAOqN,KAAKrD,MAAM1B,aAAa9D,KAAMvE,GAC/DmE,UAAWohB,EAAW3a,KAAM,MAC1Bqa,IAGKK,EAAWH,EAAQ1mB,OAC1B6mB,EAAWH,EAAQ1mB,GAAS,IACnBsnB,cAAgB,EAGnBlL,EAAQmL,QACiD,IAA9DnL,EAAQmL,MAAMzoB,KAAMgD,EAAM6d,EAAMmH,EAAYL,IAEvC3kB,EAAK8L,kBACT9L,EAAK8L,iBAAkB5N,EAAMymB,IAK3BrK,EAAQ7D,MACZ6D,EAAQ7D,IAAIzZ,KAAMgD,EAAM8kB,GAElBA,EAAUzK,QAAQ9V,OACvBugB,EAAUzK,QAAQ9V,KAAO8V,EAAQ9V,OAK9B9E,EACJslB,EAASljB,OAAQkjB,EAASS,gBAAiB,EAAGV,GAE9CC,EAAS5nB,KAAM2nB,GAIhBtlB,GAAOskB,MAAM/nB,OAAQmC,IAAS,KAMhC4a,OAAQ,SAAU9Y,EAAM2jB,EAAOtJ,EAAS5a,EAAUimB,GAEjD,IAAIhkB,EAAGikB,EAAWxG,EACjByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASD,QAAS/d,IAAUge,EAASzd,IAAKP,GAEtD,GAAMklB,IAAeN,EAASM,EAASN,QAAvC,CAMAC,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAMP,GAJA3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,EAAN,CAOAoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAE1C6mB,EAAWH,EADX1mB,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,IACpC,GAC7BihB,EAAMA,EAAK,IACV,IAAIpa,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAG9Dsb,EAAYjkB,EAAIqjB,EAASjlB,OACzB,MAAQ4B,IACPojB,EAAYC,EAAUrjB,IAEfgkB,GAAeT,IAAaH,EAAUG,UACzC5K,GAAWA,EAAQ9V,OAASugB,EAAUvgB,MACtC4a,IAAOA,EAAInb,KAAM8gB,EAAUlhB,YAC3BnE,GAAYA,IAAaqlB,EAAUrlB,WACxB,OAAbA,IAAqBqlB,EAAUrlB,YAChCslB,EAASljB,OAAQH,EAAG,GAEfojB,EAAUrlB,UACdslB,EAASS,gBAELlL,EAAQxB,QACZwB,EAAQxB,OAAO9b,KAAMgD,EAAM8kB,IAOzBa,IAAcZ,EAASjlB,SACrBwa,EAAQsL,WACkD,IAA/DtL,EAAQsL,SAAS5oB,KAAMgD,EAAMglB,EAAYE,EAASE,SAElD5lB,GAAOqmB,YAAa7lB,EAAM9B,EAAMgnB,EAASE,eAGnCR,EAAQ1mB,SA1Cf,IAAMA,KAAQ0mB,EACbplB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,EAAOylB,EAAOkB,GAAKxK,EAAS5a,GAAU,GA8C/DD,GAAO2D,cAAeyhB,IAC1B5G,EAASlF,OAAQ9Y,EAAM,mBAIzBslB,SAAU,SAAUQ,GAEnB,IAAIpnB,EAAGgD,EAAGf,EAAK4O,EAASuV,EAAWiB,EAClC3W,EAAO,IAAI9M,MAAOrB,UAAUnB,QAG5BgkB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAE1Bf,GACC/G,EAASzd,IAAKhE,KAAM,WAAcI,OAAOwoB,OAAQ,OAC/CrB,EAAM5lB,OAAU,GACnBoc,EAAU9a,GAAOskB,MAAMxJ,QAASwJ,EAAM5lB,OAAU,GAKjD,IAFAkR,EAAM,GAAM0U,EAENplB,EAAI,EAAGA,EAAIuC,UAAUnB,OAAQpB,IAClC0Q,EAAM1Q,GAAMuC,UAAWvC,GAMxB,GAHAolB,EAAMmC,eAAiB1pB,MAGlB+d,EAAQ4L,cAA2D,IAA5C5L,EAAQ4L,YAAYlpB,KAAMT,KAAMunB,GAA5D,CAKAiC,EAAevmB,GAAOskB,MAAMiB,SAAS/nB,KAAMT,KAAMunB,EAAOiB,GAGxDrmB,EAAI,EACJ,OAAU6Q,EAAUwW,EAAcrnB,QAAYolB,EAAMqC,uBAAyB,CAC5ErC,EAAMsC,cAAgB7W,EAAQvP,KAE9B0B,EAAI,EACJ,OAAUojB,EAAYvV,EAAQwV,SAAUrjB,QACtCoiB,EAAMW,gCAIDX,EAAMuC,aAAsC,IAAxBvB,EAAUlhB,YACnCkgB,EAAMuC,WAAWriB,KAAM8gB,EAAUlhB,aAEjCkgB,EAAMgB,UAAYA,EAClBhB,EAAMjG,KAAOiH,EAAUjH,UAKVrb,KAHb7B,IAAUnB,GAAOskB,MAAMxJ,QAASwK,EAAUG,WAAc,IAAKG,QAC5DN,EAAUzK,SAAUnd,MAAOqS,EAAQvP,KAAMoP,MAGT,KAAzB0U,EAAM3V,OAASxN,KACrBmjB,EAAMS,iBACNT,EAAMO,oBAYX,OAJK/J,EAAQgM,cACZhM,EAAQgM,aAAatpB,KAAMT,KAAMunB,GAG3BA,EAAM3V,SAGd4W,SAAU,SAAUjB,EAAOiB,GAC1B,IAAIrmB,EAAGomB,EAAWnf,EAAK4gB,EAAiBC,EACvCT,EAAe,GACfP,EAAgBT,EAASS,cACzBrP,EAAM2N,EAAM3hB,OAGb,GAAKqjB,GAIJrP,EAAIrY,YAOc,UAAfgmB,EAAM5lB,MAAoC,GAAhB4lB,EAAM9S,QAEnC,KAAQmF,IAAQ5Z,KAAM4Z,EAAMA,EAAIhX,YAAc5C,KAI7C,GAAsB,IAAjB4Z,EAAIrY,WAAoC,UAAfgmB,EAAM5lB,OAAqC,IAAjBiY,EAAIrN,UAAsB,CAGjF,IAFAyd,EAAkB,GAClBC,EAAmB,GACb9nB,EAAI,EAAGA,EAAI8mB,EAAe9mB,SAME8D,IAA5BgkB,EAFL7gB,GAHAmf,EAAYC,EAAUrmB,IAGNe,SAAW,OAG1B+mB,EAAkB7gB,GAAQmf,EAAUhd,cACC,EAApCtI,GAAQmG,EAAKpJ,MAAOga,MAAOJ,GAC3B3W,GAAO4J,KAAMzD,EAAKpJ,KAAM,KAAM,CAAE4Z,IAAQrW,QAErC0mB,EAAkB7gB,IACtB4gB,EAAgBppB,KAAM2nB,GAGnByB,EAAgBzmB,QACpBimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUwB,IAY9C,OALApQ,EAAM5Z,KACDipB,EAAgBT,EAASjlB,QAC7BimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUA,EAASloB,MAAO2oB,KAGpDO,GAGRU,QAAS,SAAUxmB,EAAMymB,GACxB/pB,OAAO+gB,eAAgBle,GAAOmnB,MAAMxmB,UAAWF,EAAM,CACpD2mB,YAAY,EACZjJ,cAAc,EAEdpd,IAAK3C,EAAY8oB,GAChB,WACC,GAAKnqB,KAAKsqB,cACT,OAAOH,EAAMnqB,KAAKsqB,gBAGpB,WACC,GAAKtqB,KAAKsqB,cACT,OAAOtqB,KAAKsqB,cAAe5mB,IAI9B2d,IAAK,SAAUtZ,GACd3H,OAAO+gB,eAAgBnhB,KAAM0D,EAAM,CAClC2mB,YAAY,EACZjJ,cAAc,EACdmJ,UAAU,EACVxiB,MAAOA,QAMX0hB,IAAK,SAAUa,GACd,OAAOA,EAAernB,GAAOiD,SAC5BokB,EACA,IAAIrnB,GAAOmnB,MAAOE,IAGpBvM,QAAS,CACRyM,KAAM,CAGLC,UAAU,GAEXC,MAAO,CAGNxB,MAAO,SAAU5H,GAIhB,IAAI5S,EAAK1O,MAAQshB,EAWjB,OARKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAG1B+Y,GAAgB/Y,EAAI,SAAS,IAIvB,GAERuZ,QAAS,SAAU3G,GAIlB,IAAI5S,EAAK1O,MAAQshB,EAUjB,OAPKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAE1B+Y,GAAgB/Y,EAAI,UAId,GAKRiX,SAAU,SAAU4B,GACnB,IAAI3hB,EAAS2hB,EAAM3hB,OACnB,OAAOif,GAAepd,KAAM7B,EAAOjE,OAClCiE,EAAO8kB,OAASlnB,GAAUoC,EAAQ,UAClC6b,EAASzd,IAAK4B,EAAQ,UACtBpC,GAAUoC,EAAQ,OAIrB+kB,aAAc,CACbZ,aAAc,SAAUxC,QAIDthB,IAAjBshB,EAAM3V,QAAwB2V,EAAM+C,gBACxC/C,EAAM+C,cAAcM,YAAcrD,EAAM3V,YA0F7C3O,GAAOqmB,YAAc,SAAU7lB,EAAM9B,EAAMknB,GAGrCplB,EAAKmc,qBACTnc,EAAKmc,oBAAqBje,EAAMknB,IAIlC5lB,GAAOmnB,MAAQ,SAAUxoB,EAAKipB,GAG7B,KAAQ7qB,gBAAgBiD,GAAOmnB,OAC9B,OAAO,IAAInnB,GAAOmnB,MAAOxoB,EAAKipB,GAI1BjpB,GAAOA,EAAID,MACf3B,KAAKsqB,cAAgB1oB,EACrB5B,KAAK2B,KAAOC,EAAID,KAIhB3B,KAAK8qB,mBAAqBlpB,EAAImpB,uBACH9kB,IAAzBrE,EAAImpB,mBAGgB,IAApBnpB,EAAIgpB,YACL3D,GACAC,GAKDlnB,KAAK4F,OAAWhE,EAAIgE,QAAkC,IAAxBhE,EAAIgE,OAAOrE,SACxCK,EAAIgE,OAAOhD,WACXhB,EAAIgE,OAEL5F,KAAK6pB,cAAgBjoB,EAAIioB,cACzB7pB,KAAKgrB,cAAgBppB,EAAIopB,eAIzBhrB,KAAK2B,KAAOC,EAIRipB,GACJ5nB,GAAOsC,OAAQvF,KAAM6qB,GAItB7qB,KAAKirB,UAAYrpB,GAAOA,EAAIqpB,WAAaC,KAAKC,MAG9CnrB,KAAMiD,GAAOiD,UAAY,GAK1BjD,GAAOmnB,MAAMxmB,UAAY,CACxBE,YAAab,GAAOmnB,MACpBU,mBAAoB5D,GACpB0C,qBAAsB1C,GACtBgB,8BAA+BhB,GAC/BkE,aAAa,EAEbpD,eAAgB,WACf,IAAIrb,EAAI3M,KAAKsqB,cAEbtqB,KAAK8qB,mBAAqB7D,GAErBta,IAAM3M,KAAKorB,aACfze,EAAEqb,kBAGJF,gBAAiB,WAChB,IAAInb,EAAI3M,KAAKsqB,cAEbtqB,KAAK4pB,qBAAuB3C,GAEvBta,IAAM3M,KAAKorB,aACfze,EAAEmb,mBAGJC,yBAA0B,WACzB,IAAIpb,EAAI3M,KAAKsqB,cAEbtqB,KAAKkoB,8BAAgCjB,GAEhCta,IAAM3M,KAAKorB,aACfze,EAAEob,2BAGH/nB,KAAK8nB,oBAKP7kB,GAAOsB,KAAM,CACZ8mB,QAAQ,EACRC,SAAS,EACTC,YAAY,EACZC,gBAAgB,EAChBC,SAAS,EACTC,QAAQ,EACRC,YAAY,EACZC,SAAS,EACTC,OAAO,EACPC,OAAO,EACPC,UAAU,EACVC,MAAM,EACNC,QAAQ,EACRjqB,MAAM,EACNkqB,UAAU,EACV7d,KAAK,EACL8d,SAAS,EACT1X,QAAQ,EACR2X,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,WAAW,EACXC,aAAa,EACbC,SAAS,EACTC,SAAS,EACTC,eAAe,EACfC,WAAW,EACXC,SAAS,EACTC,OAAO,GACL/pB,GAAOskB,MAAM2C,SAEhBjnB,GAAOsB,KAAM,CAAEoP,MAAO,UAAWsZ,KAAM,YAAc,SAAUtrB,EAAMkmB,GAEpE,SAASqF,EAAoB3D,GAC5B,GAAK3pB,EAASutB,aAAe,CAS5B,IAAItE,EAASpH,EAASzd,IAAKhE,KAAM,UAChCunB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAC3BhC,EAAM5lB,KAA4B,YAArB4nB,EAAY5nB,KAAqB,QAAU,OACxD4lB,EAAM6D,aAAc,EAGpBvC,EAAQU,GAMHhC,EAAM3hB,SAAW2hB,EAAMsC,eAK3BhB,EAAQtB,QAMTtkB,GAAOskB,MAAM6F,SAAUvF,EAAc0B,EAAY3jB,OAChD3C,GAAOskB,MAAMkC,IAAKF,IAIrBtmB,GAAOskB,MAAMxJ,QAASpc,GAAS,CAG9BunB,MAAO,WAEN,IAAImE,EAOJ,GAFA5F,GAAgBznB,KAAM2B,GAAM,IAEvB/B,EAASutB,aAcb,OAAO,GARPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,KAE9B7nB,KAAKuP,iBAAkBsY,EAAcqF,GAEtCzL,EAASJ,IAAKrhB,KAAM6nB,GAAgBwF,GAAY,GAAM,IAOxDpF,QAAS,WAMR,OAHAR,GAAgBznB,KAAM2B,IAGf,GAGR0nB,SAAU,WACT,IAAIgE,EAEJ,IAAKztB,EAASutB,aAWb,OAAO,GAVPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,GAAiB,GAK/CpG,EAASJ,IAAKrhB,KAAM6nB,EAAcwF,IAHlCrtB,KAAK4f,oBAAqBiI,EAAcqF,GACxCzL,EAASlF,OAAQvc,KAAM6nB,KAa1BlC,SAAU,SAAU4B,GACnB,OAAO9F,EAASzd,IAAKujB,EAAM3hB,OAAQjE,IAGpCkmB,aAAcA,GAef5kB,GAAOskB,MAAMxJ,QAAS8J,GAAiB,CACtCqB,MAAO,WAIN,IAAIhnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAMhCwF,IACAztB,EAASutB,aACbntB,KAAKuP,iBAAkBsY,EAAcqF,GAErChrB,EAAIqN,iBAAkB5N,EAAMurB,GAAoB,IAGlDzL,EAASJ,IAAKiM,EAAYzF,GAAgBwF,GAAY,GAAM,IAE7DhE,SAAU,WACT,IAAInnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAAiB,EAEjDwF,EAQL5L,EAASJ,IAAKiM,EAAYzF,EAAcwF,IAPnCztB,EAASutB,aACbntB,KAAK4f,oBAAqBiI,EAAcqF,GAExChrB,EAAI0d,oBAAqBje,EAAMurB,GAAoB,GAEpDzL,EAASlF,OAAQ+Q,EAAYzF,QAgBjC5kB,GAAOsB,KAAM,CACZgpB,WAAY,YACZC,WAAY,WACZC,aAAc,cACdC,aAAc,cACZ,SAAUC,EAAMlE,GAClBxmB,GAAOskB,MAAMxJ,QAAS4P,GAAS,CAC9B9F,aAAc4B,EACdT,SAAUS,EAEVZ,OAAQ,SAAUtB,GACjB,IAAInjB,EAEHwpB,EAAUrG,EAAMyD,cAChBzC,EAAYhB,EAAMgB,UASnB,OALMqF,IAAaA,IANT5tB,MAMgCiD,GAAOwF,SANvCzI,KAMyD4tB,MAClErG,EAAM5lB,KAAO4mB,EAAUG,SACvBtkB,EAAMmkB,EAAUzK,QAAQnd,MAAOX,KAAM0E,WACrC6iB,EAAM5lB,KAAO8nB,GAEPrlB,MAKVnB,GAAOG,GAAGmC,OAAQ,CAEjB4hB,GAAI,SAAUC,EAAOlkB,EAAUoe,EAAMle,GACpC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,IAEzCikB,IAAK,SAAUD,EAAOlkB,EAAUoe,EAAMle,GACrC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,EAAI,IAE7CokB,IAAK,SAAUJ,EAAOlkB,EAAUE,GAC/B,IAAImlB,EAAW5mB,EACf,GAAKylB,GAASA,EAAMY,gBAAkBZ,EAAMmB,UAW3C,OARAA,EAAYnB,EAAMmB,UAClBtlB,GAAQmkB,EAAMsC,gBAAiBlC,IAC9Be,EAAUlhB,UACTkhB,EAAUG,SAAW,IAAMH,EAAUlhB,UACrCkhB,EAAUG,SACXH,EAAUrlB,SACVqlB,EAAUzK,SAEJ9d,KAER,GAAsB,iBAAVonB,EAAqB,CAGhC,IAAMzlB,KAAQylB,EACbpnB,KAAKwnB,IAAK7lB,EAAMuB,EAAUkkB,EAAOzlB,IAElC,OAAO3B,KAWR,OATkB,IAAbkD,GAA0C,mBAAbA,IAGjCE,EAAKF,EACLA,OAAW+C,IAEA,IAAP7C,IACJA,EAAK8jB,IAEClnB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMhL,OAAQvc,KAAMonB,EAAOhkB,EAAIF,QAMzC,IAKC2qB,GAAe,wBAGfC,GAAW,oCAEXC,GAAe,6BAGhB,SAASC,GAAoBvqB,EAAMiX,GAClC,OAAKlX,GAAUC,EAAM,UACpBD,GAA+B,KAArBkX,EAAQnZ,SAAkBmZ,EAAUA,EAAQhI,WAAY,OAE3DzP,GAAQQ,GAAO+V,SAAU,SAAW,IAGrC/V,EAIR,SAASwqB,GAAexqB,GAEvB,OADAA,EAAK9B,MAAyC,OAAhC8B,EAAKjB,aAAc,SAAsB,IAAMiB,EAAK9B,KAC3D8B,EAER,SAASyqB,GAAezqB,GAOvB,MAN2C,WAApCA,EAAK9B,MAAQ,IAAKrB,MAAO,EAAG,GAClCmD,EAAK9B,KAAO8B,EAAK9B,KAAKrB,MAAO,GAE7BmD,EAAKwK,gBAAiB,QAGhBxK,EAGR,SAAS0qB,GAAgBvsB,EAAKwsB,GAC7B,IAAIjsB,EAAG2X,EAAGnY,EAAgB0sB,EAAUC,EAAUjG,EAE9C,GAAuB,IAAlB+F,EAAK7sB,SAAV,CAKA,GAAKkgB,EAASD,QAAS5f,KAEtBymB,EADW5G,EAASzd,IAAKpC,GACPymB,QAKjB,IAAM1mB,KAFN8f,EAASlF,OAAQ6R,EAAM,iBAET/F,EACb,IAAMlmB,EAAI,EAAG2X,EAAIuO,EAAQ1mB,GAAO4B,OAAQpB,EAAI2X,EAAG3X,IAC9Cc,GAAOskB,MAAMrN,IAAKkU,EAAMzsB,EAAM0mB,EAAQ1mB,GAAQQ,IAO7Cuf,EAASF,QAAS5f,KACtBysB,EAAW3M,EAASzB,OAAQre,GAC5B0sB,EAAWrrB,GAAOsC,OAAQ,GAAI8oB,GAE9B3M,EAASL,IAAK+M,EAAME,KAkBtB,SAASC,GAAUC,EAAY3b,EAAMrO,EAAUiiB,GAG9C5T,EAAOtS,EAAMsS,GAEb,IAAI+T,EAAUjiB,EAAO4hB,EAASkI,EAAYxsB,EAAMC,EAC/CC,EAAI,EACJ2X,EAAI0U,EAAWjrB,OACfmrB,EAAW5U,EAAI,EACf/R,EAAQ8K,EAAM,GACd8b,EAAkBttB,EAAY0G,GAG/B,GAAK4mB,GACG,EAAJ7U,GAA0B,iBAAV/R,IAChB3G,GAAQ6jB,YAAc6I,GAASrmB,KAAMM,GACxC,OAAOymB,EAAWjqB,KAAM,SAAUyV,GACjC,IAAId,EAAOsV,EAAW5pB,GAAIoV,GACrB2U,IACJ9b,EAAM,GAAM9K,EAAMtH,KAAMT,KAAMga,EAAOd,EAAK0V,SAE3CL,GAAUrV,EAAMrG,EAAMrO,EAAUiiB,KAIlC,GAAK3M,IAEJnV,GADAiiB,EAAWN,GAAezT,EAAM2b,EAAY,GAAIhnB,eAAe,EAAOgnB,EAAY/H,IACjE/T,WAEmB,IAA/BkU,EAASla,WAAWnJ,SACxBqjB,EAAWjiB,GAIPA,GAAS8hB,GAAU,CAOvB,IALAgI,GADAlI,EAAUtjB,GAAOwB,IAAKmhB,GAAQgB,EAAU,UAAYqH,KAC/B1qB,OAKbpB,EAAI2X,EAAG3X,IACdF,EAAO2kB,EAEFzkB,IAAMusB,IACVzsB,EAAOgB,GAAO0C,MAAO1D,GAAM,GAAM,GAG5BwsB,GAIJxrB,GAAOoB,MAAOkiB,EAASX,GAAQ3jB,EAAM,YAIvCuC,EAAS/D,KAAM+tB,EAAYrsB,GAAKF,EAAME,GAGvC,GAAKssB,EAOJ,IANAvsB,EAAMqkB,EAASA,EAAQhjB,OAAS,GAAIiE,cAGpCvE,GAAOwB,IAAK8hB,EAAS2H,IAGf/rB,EAAI,EAAGA,EAAIssB,EAAYtsB,IAC5BF,EAAOskB,EAASpkB,GACX4iB,GAAYtd,KAAMxF,EAAKN,MAAQ,MAClC8f,EAASxB,OAAQhe,EAAM,eACxBgB,GAAOwF,SAAUvG,EAAKD,KAEjBA,EAAKL,KAA8C,YAArCK,EAAKN,MAAQ,IAAKgC,cAG/BV,GAAO4rB,WAAa5sB,EAAKH,UAC7BmB,GAAO4rB,SAAU5sB,EAAKL,IAAK,CAC1BC,MAAOI,EAAKJ,OAASI,EAAKO,aAAc,UACtCN,GASJH,EAASE,EAAK6E,YAAYT,QAAS0nB,GAAc,IAAM9rB,EAAMC,IAQnE,OAAOssB,EAGR,SAASjS,GAAQ9Y,EAAMP,EAAU4rB,GAKhC,IAJA,IAAI7sB,EACH4kB,EAAQ3jB,EAAWD,GAAO4M,OAAQ3M,EAAUO,GAASA,EACrDtB,EAAI,EAE4B,OAAvBF,EAAO4kB,EAAO1kB,IAAeA,IAChC2sB,GAA8B,IAAlB7sB,EAAKV,UACtB0B,GAAO8rB,UAAWnJ,GAAQ3jB,IAGtBA,EAAKW,aACJksB,GAAY3L,EAAYlhB,IAC5B4jB,GAAeD,GAAQ3jB,EAAM,WAE9BA,EAAKW,WAAWC,YAAaZ,IAI/B,OAAOwB,EAGRR,GAAOsC,OAAQ,CACduhB,cAAe,SAAU8H,GACxB,OAAOA,GAGRjpB,MAAO,SAAUlC,EAAMurB,EAAeC,GACrC,IAAI9sB,EAAG2X,EAAGoV,EAAaC,EA1INvtB,EAAKwsB,EACnB5qB,EA0IFmC,EAAQlC,EAAKyhB,WAAW,GACxBkK,EAASjM,EAAY1f,GAGtB,KAAMrC,GAAQ+jB,gBAAsC,IAAlB1hB,EAAKlC,UAAoC,KAAlBkC,EAAKlC,UAC3D0B,GAAOmE,SAAU3D,IAOnB,IAHA0rB,EAAevJ,GAAQjgB,GAGjBxD,EAAI,EAAG2X,GAFboV,EAActJ,GAAQniB,IAEOF,OAAQpB,EAAI2X,EAAG3X,IAvJ5BP,EAwJLstB,EAAa/sB,GAxJHisB,EAwJQe,EAAchtB,QAvJzCqB,EAGc,WAHdA,EAAW4qB,EAAK5qB,SAASG,gBAGAkhB,GAAepd,KAAM7F,EAAID,MACrDysB,EAAKja,QAAUvS,EAAIuS,QAGK,UAAb3Q,GAAqC,aAAbA,IACnC4qB,EAAKhJ,aAAexjB,EAAIwjB,cAoJxB,GAAK4J,EACJ,GAAKC,EAIJ,IAHAC,EAAcA,GAAetJ,GAAQniB,GACrC0rB,EAAeA,GAAgBvJ,GAAQjgB,GAEjCxD,EAAI,EAAG2X,EAAIoV,EAAY3rB,OAAQpB,EAAI2X,EAAG3X,IAC3CgsB,GAAgBe,EAAa/sB,GAAKgtB,EAAchtB,SAGjDgsB,GAAgB1qB,EAAMkC,GAWxB,OAL2B,GAD3BwpB,EAAevJ,GAAQjgB,EAAO,WACZpC,QACjBsiB,GAAesJ,GAAeC,GAAUxJ,GAAQniB,EAAM,WAIhDkC,GAGRopB,UAAW,SAAU5qB,GAKpB,IAJA,IAAImd,EAAM7d,EAAM9B,EACfoc,EAAU9a,GAAOskB,MAAMxJ,QACvB5b,EAAI,OAE6B8D,KAAxBxC,EAAOU,EAAOhC,IAAqBA,IAC5C,GAAK4e,EAAYtd,GAAS,CACzB,GAAO6d,EAAO7d,EAAMge,EAASvb,SAAc,CAC1C,GAAKob,EAAK+G,OACT,IAAM1mB,KAAQ2f,EAAK+G,OACbtK,EAASpc,GACbsB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,GAI3BsB,GAAOqmB,YAAa7lB,EAAM9B,EAAM2f,EAAKuH,QAOxCplB,EAAMge,EAASvb,cAAYD,EAEvBxC,EAAMie,EAASxb,WAInBzC,EAAMie,EAASxb,cAAYD,OAOhChD,GAAOG,GAAGmC,OAAQ,CACjB8pB,OAAQ,SAAUnsB,GACjB,OAAOqZ,GAAQvc,KAAMkD,GAAU,IAGhCqZ,OAAQ,SAAUrZ,GACjB,OAAOqZ,GAAQvc,KAAMkD,IAGtBX,KAAM,SAAUwF,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,YAAiB9B,IAAV8B,EACN9E,GAAOV,KAAMvC,MACbA,KAAKsU,QAAQ/P,KAAM,WACK,IAAlBvE,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,WACxDvB,KAAK8G,YAAciB,MAGpB,KAAMA,EAAOrD,UAAUnB,SAG3B+rB,OAAQ,WACP,OAAOf,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACpB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,UAC3CysB,GAAoBhuB,KAAMyD,GAChCd,YAAac,MAKvB8rB,QAAS,WACR,OAAOhB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,GAAuB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,SAAiB,CACzE,IAAIqE,EAASooB,GAAoBhuB,KAAMyD,GACvCmC,EAAO4pB,aAAc/rB,EAAMmC,EAAO8M,gBAKrC+c,OAAQ,WACP,OAAOlB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,SAKvC0vB,MAAO,WACN,OAAOnB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,KAAKuU,gBAK5CD,MAAO,WAIN,IAHA,IAAI7Q,EACHtB,EAAI,EAE2B,OAAtBsB,EAAOzD,KAAMmC,IAAeA,IACd,IAAlBsB,EAAKlC,WAGT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAGhCA,EAAKqD,YAAc,IAIrB,OAAO9G,MAGR2F,MAAO,SAAUqpB,EAAeC,GAI/B,OAHAD,EAAiC,MAAjBA,GAAgCA,EAChDC,EAAyC,MAArBA,EAA4BD,EAAgBC,EAEzDjvB,KAAKyE,IAAK,WAChB,OAAOxB,GAAO0C,MAAO3F,KAAMgvB,EAAeC,MAI5CL,KAAM,SAAU7mB,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAItE,EAAOzD,KAAM,IAAO,GACvBmC,EAAI,EACJ2X,EAAI9Z,KAAKuD,OAEV,QAAe0C,IAAV8B,GAAyC,IAAlBtE,EAAKlC,SAChC,OAAOkC,EAAK0M,UAIb,GAAsB,iBAAVpI,IAAuB8lB,GAAapmB,KAAMM,KACpDud,IAAWR,GAASzX,KAAMtF,IAAW,CAAE,GAAI,KAAQ,GAAIpE,eAAkB,CAE1EoE,EAAQ9E,GAAO6jB,cAAe/e,GAE9B,IACC,KAAQ5F,EAAI2X,EAAG3X,IAIS,KAHvBsB,EAAOzD,KAAMmC,IAAO,IAGVZ,WACT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAChCA,EAAK0M,UAAYpI,GAInBtE,EAAO,EAGN,MAAQkJ,KAGNlJ,GACJzD,KAAKsU,QAAQgb,OAAQvnB,IAEpB,KAAMA,EAAOrD,UAAUnB,SAG3BosB,YAAa,WACZ,IAAIlJ,EAAU,GAGd,OAAO8H,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,IAAI8O,EAASvS,KAAK4C,WAEbK,GAAOkE,QAASnH,KAAMymB,GAAY,IACtCxjB,GAAO8rB,UAAWnJ,GAAQ5lB,OACrBuS,GACJA,EAAOqd,aAAcnsB,EAAMzD,QAK3BymB,MAILxjB,GAAOsB,KAAM,CACZsrB,SAAU,SACVC,UAAW,UACXN,aAAc,SACdO,YAAa,QACbC,WAAY,eACV,SAAUtsB,EAAMusB,GAClBhtB,GAAOG,GAAIM,GAAS,SAAUR,GAO7B,IANA,IAAIiB,EACHC,EAAM,GACN8rB,EAASjtB,GAAQC,GACjB2B,EAAOqrB,EAAO3sB,OAAS,EACvBpB,EAAI,EAEGA,GAAK0C,EAAM1C,IAClBgC,EAAQhC,IAAM0C,EAAO7E,KAAOA,KAAK2F,OAAO,GACxC1C,GAAQitB,EAAQ/tB,IAAO8tB,GAAY9rB,GAInCvD,EAAKD,MAAOyD,EAAKD,EAAMH,OAGxB,OAAOhE,KAAKkE,UAAWE,MAGzB,IAAI+rB,GAAY,IAAI3nB,OAAQ,KAAOua,EAAO,kBAAmB,KAEzDqN,GAAc,MAGdC,GAAY,SAAU5sB,GAKxB,IAAIuoB,EAAOvoB,EAAK+D,cAAc6H,YAM9B,OAJM2c,GAASA,EAAKsE,SACnBtE,EAAOjsB,IAGDisB,EAAKuE,iBAAkB9sB,IAG5B+sB,GAAO,SAAU/sB,EAAM+B,EAAShB,GACnC,IAAIJ,EAAKV,EACR+sB,EAAM,GAGP,IAAM/sB,KAAQ8B,EACbirB,EAAK/sB,GAASD,EAAK8f,MAAO7f,GAC1BD,EAAK8f,MAAO7f,GAAS8B,EAAS9B,GAM/B,IAAMA,KAHNU,EAAMI,EAAS/D,KAAMgD,GAGP+B,EACb/B,EAAK8f,MAAO7f,GAAS+sB,EAAK/sB,GAG3B,OAAOU,GAIJssB,GAAY,IAAIloB,OAAQ0a,EAAUpV,KAAM,KAAO,KAiJnD,SAAS6iB,GAAQltB,EAAMC,EAAMktB,GAC5B,IAAIC,EAAOC,EAAUC,EAAU3sB,EAC9B4sB,EAAeZ,GAAY3oB,KAAM/D,GAMjC6f,EAAQ9f,EAAK8f,MAoEd,OAlEAqN,EAAWA,GAAYP,GAAW5sB,MAgBjCW,EAAMwsB,EAASK,iBAAkBvtB,IAAUktB,EAAUltB,GAEhDstB,GAAgB5sB,IAkBpBA,EAAMA,EAAIiC,QAASkC,GAAU,YAAUtC,GAG3B,KAAR7B,GAAe+e,EAAY1f,KAC/BW,EAAMnB,GAAOsgB,MAAO9f,EAAMC,KAQrBtC,GAAQ8vB,kBAAoBf,GAAU1oB,KAAMrD,IAASssB,GAAUjpB,KAAM/D,KAG1EmtB,EAAQtN,EAAMsN,MACdC,EAAWvN,EAAMuN,SACjBC,EAAWxN,EAAMwN,SAGjBxN,EAAMuN,SAAWvN,EAAMwN,SAAWxN,EAAMsN,MAAQzsB,EAChDA,EAAMwsB,EAASC,MAGftN,EAAMsN,MAAQA,EACdtN,EAAMuN,SAAWA,EACjBvN,EAAMwN,SAAWA,SAIJ9qB,IAAR7B,EAINA,EAAM,GACNA,EAIF,SAAS+sB,GAAcC,EAAaC,GAGnC,MAAO,CACNrtB,IAAK,WACJ,IAAKotB,IASL,OAASpxB,KAAKgE,IAAMqtB,GAAS1wB,MAAOX,KAAM0E,kBALlC1E,KAAKgE,OA3OhB,WAIC,SAASstB,IAGR,GAAM1M,EAAN,CAIA2M,EAAUhO,MAAMiO,QAAU,+EAE1B5M,EAAIrB,MAAMiO,QACT,4HAGDzqB,EAAgBpE,YAAa4uB,GAAY5uB,YAAaiiB,GAEtD,IAAI6M,EAAW1xB,GAAOwwB,iBAAkB3L,GACxC8M,EAAoC,OAAjBD,EAASniB,IAG5BqiB,EAAsE,KAA9CC,EAAoBH,EAASI,YAIrDjN,EAAIrB,MAAMuO,MAAQ,MAClBC,EAA6D,KAAzCH,EAAoBH,EAASK,OAIjDE,EAAgE,KAAzCJ,EAAoBH,EAASZ,OAMpDjM,EAAIrB,MAAM0O,SAAW,WACrBC,EAAiE,KAA9CN,EAAoBhN,EAAIuN,YAAc,GAEzDprB,EAAgBlE,YAAa0uB,GAI7B3M,EAAM,MAGP,SAASgN,EAAoBQ,GAC5B,OAAOjsB,KAAKksB,MAAOC,WAAYF,IAGhC,IAAIV,EAAkBM,EAAsBE,EAAkBH,EAC7DQ,EAAyBZ,EACzBJ,EAAY3xB,EAAS0C,cAAe,OACpCsiB,EAAMhlB,EAAS0C,cAAe,OAGzBsiB,EAAIrB,QAMVqB,EAAIrB,MAAMiP,eAAiB,cAC3B5N,EAAIM,WAAW,GAAO3B,MAAMiP,eAAiB,GAC7CpxB,GAAQqxB,gBAA+C,gBAA7B7N,EAAIrB,MAAMiP,eAEpCvvB,GAAOsC,OAAQnE,GAAS,CACvBsxB,kBAAmB,WAElB,OADApB,IACOU,GAERd,eAAgB,WAEf,OADAI,IACOS,GAERY,cAAe,WAEd,OADArB,IACOI,GAERkB,mBAAoB,WAEnB,OADAtB,IACOK,GAERkB,cAAe,WAEd,OADAvB,IACOY,GAYRY,qBAAsB,WACrB,IAAIC,EAAOtN,EAAIuN,EAASC,EAmCxB,OAlCgC,MAA3BV,IACJQ,EAAQnzB,EAAS0C,cAAe,SAChCmjB,EAAK7lB,EAAS0C,cAAe,MAC7B0wB,EAAUpzB,EAAS0C,cAAe,OAElCywB,EAAMxP,MAAMiO,QAAU,2DACtB/L,EAAGlC,MAAMiO,QAAU,0CAKnB/L,EAAGlC,MAAM2P,OAAS,MAClBF,EAAQzP,MAAM2P,OAAS,MAQvBF,EAAQzP,MAAMC,QAAU,QAExBzc,EACEpE,YAAaowB,GACbpwB,YAAa8iB,GACb9iB,YAAaqwB,GAEfC,EAAUlzB,GAAOwwB,iBAAkB9K,GACnC8M,EAA4BY,SAAUF,EAAQC,OAAQ,IACrDC,SAAUF,EAAQG,eAAgB,IAClCD,SAAUF,EAAQI,kBAAmB,MAAW5N,EAAG6N,aAEpDvsB,EAAgBlE,YAAakwB,IAEvBR,MAvIV,GAsPA,IAAIgB,GAAc,CAAE,SAAU,MAAO,MACpCC,GAAa5zB,EAAS0C,cAAe,OAAQihB,MAC7CkQ,GAAc,GAkBf,SAASC,GAAehwB,GACvB,IAAIiwB,EAAQ1wB,GAAO2wB,SAAUlwB,IAAU+vB,GAAa/vB,GAEpD,OAAKiwB,IAGAjwB,KAAQ8vB,GACL9vB,EAED+vB,GAAa/vB,GAxBrB,SAAyBA,GAGxB,IAAImwB,EAAUnwB,EAAM,GAAIkd,cAAgBld,EAAKpD,MAAO,GACnD6B,EAAIoxB,GAAYhwB,OAEjB,MAAQpB,IAEP,IADAuB,EAAO6vB,GAAapxB,GAAM0xB,KACbL,GACZ,OAAO9vB,EAeoBowB,CAAgBpwB,IAAUA,GAIxD,IAKCqwB,GAAe,4BACfC,GAAU,CAAE/B,SAAU,WAAYgC,WAAY,SAAUzQ,QAAS,SACjE0Q,GAAqB,CACpBC,cAAe,IACfC,WAAY,OAGd,SAASC,GAAmBrvB,EAAO+C,EAAOusB,GAIzC,IAAI1sB,EAAUqb,EAAQ5V,KAAMtF,GAC5B,OAAOH,EAGNzB,KAAKouB,IAAK,EAAG3sB,EAAS,IAAQ0sB,GAAY,KAAU1sB,EAAS,IAAO,MACpEG,EAGF,SAASysB,GAAoB/wB,EAAMgxB,EAAWC,EAAKC,EAAaC,EAAQC,GACvE,IAAI1yB,EAAkB,UAAdsyB,EAAwB,EAAI,EACnCK,EAAQ,EACRC,EAAQ,EACRC,EAAc,EAGf,GAAKN,KAAUC,EAAc,SAAW,WACvC,OAAO,EAGR,KAAQxyB,EAAI,EAAGA,GAAK,EAKN,WAARuyB,IACJM,GAAe/xB,GAAOwgB,IAAKhgB,EAAMixB,EAAMxR,EAAW/gB,IAAK,EAAMyyB,IAIxDD,GAmBQ,YAARD,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,IAIjD,WAARF,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,MAtBvEG,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,GAGhD,YAARF,EACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,GAItEE,GAAS7xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,IAoCzE,OAhBMD,GAA8B,GAAfE,IAIpBE,GAAS5uB,KAAKouB,IAAK,EAAGpuB,KAAK8uB,KAC1BxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEu0B,EACAE,EACAD,EACA,MAIM,GAGDC,EAAQC,EAGhB,SAASE,GAAkBzxB,EAAMgxB,EAAWK,GAG3C,IAAIF,EAASvE,GAAW5sB,GAKvBkxB,IADmBvzB,GAAQsxB,qBAAuBoC,IAEE,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCO,EAAmBR,EAEnBvyB,EAAMuuB,GAAQltB,EAAMgxB,EAAWG,GAC/BQ,EAAa,SAAWX,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,GAIzE,GAAK6vB,GAAU1oB,KAAMrF,GAAQ,CAC5B,IAAM0yB,EACL,OAAO1yB,EAERA,EAAM,OAyCP,QAlCQhB,GAAQsxB,qBAAuBiC,IAMrCvzB,GAAQ0xB,wBAA0BtvB,GAAUC,EAAM,OAI3C,SAARrB,IAICkwB,WAAYlwB,IAA0D,WAAjDa,GAAOwgB,IAAKhgB,EAAM,WAAW,EAAOmxB,KAG1DnxB,EAAK4xB,iBAAiB9xB,SAEtBoxB,EAAiE,eAAnD1xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,IAKpDO,EAAmBC,KAAc3xB,KAEhCrB,EAAMqB,EAAM2xB,MAKdhzB,EAAMkwB,WAAYlwB,IAAS,GAI1BoyB,GACC/wB,EACAgxB,EACAK,IAAWH,EAAc,SAAW,WACpCQ,EACAP,EAGAxyB,GAEE,KAwTL,SAASkzB,GAAO7xB,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GACzC,OAAO,IAAID,GAAM1xB,UAAUP,KAAMI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GAtT5DtyB,GAAOsC,OAAQ,CAIdiwB,SAAU,CACTC,QAAS,CACRzxB,IAAK,SAAUP,EAAMmtB,GACpB,GAAKA,EAAW,CAGf,IAAIxsB,EAAMusB,GAAQltB,EAAM,WACxB,MAAe,KAARW,EAAa,IAAMA,MAO9B+f,UAAW,CACVuR,yBAAyB,EACzBC,aAAa,EACbC,kBAAkB,EAClBC,aAAa,EACbC,UAAU,EACVC,YAAY,EACZ3B,YAAY,EACZ4B,UAAU,EACVC,YAAY,EACZC,eAAe,EACfC,iBAAiB,EACjBC,SAAS,EACTC,YAAY,EACZC,cAAc,EACdC,YAAY,EACZd,SAAS,EACTe,OAAO,EACPC,SAAS,EACT3S,OAAO,EACP4S,QAAQ,EACRC,QAAQ,EACRC,MAAM,EAGNC,aAAa,EACbC,cAAc,EACdC,aAAa,EACbC,kBAAkB,EAClBC,eAAe,GAKhBrD,SAAU,GAGVrQ,MAAO,SAAU9f,EAAMC,EAAMqE,EAAO+sB,GAGnC,GAAMrxB,GAA0B,IAAlBA,EAAKlC,UAAoC,IAAlBkC,EAAKlC,UAAmBkC,EAAK8f,MAAlE,CAKA,IAAInf,EAAKzC,EAAM2gB,EACd4U,EAAWrW,EAAWnd,GACtBstB,EAAeZ,GAAY3oB,KAAM/D,GACjC6f,EAAQ9f,EAAK8f,MAad,GARMyN,IACLttB,EAAOgwB,GAAewD,IAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,QAGrCjxB,IAAV8B,EA0CJ,OAAKua,GAAS,QAASA,QACwBrc,KAA5C7B,EAAMke,EAAMte,IAAKP,GAAM,EAAOqxB,IAEzB1wB,EAIDmf,EAAO7f,GA7CA,YAHd/B,SAAcoG,KAGc3D,EAAM6e,EAAQ5V,KAAMtF,KAAa3D,EAAK,KACjE2D,EAAQ2b,GAAWjgB,EAAMC,EAAMU,GAG/BzC,EAAO,UAIM,MAAToG,GAAiBA,GAAUA,IAOlB,WAATpG,GAAsBqvB,IAC1BjpB,GAAS3D,GAAOA,EAAK,KAASnB,GAAOkhB,UAAW+S,GAAa,GAAK,OAI7D91B,GAAQqxB,iBAA6B,KAAV1qB,GAAiD,IAAjCrE,EAAK7C,QAAS,gBAC9D0iB,EAAO7f,GAAS,WAIX4e,GAAY,QAASA,QACsBrc,KAA9C8B,EAAQua,EAAMjB,IAAK5d,EAAMsE,EAAO+sB,MAE7B9D,EACJzN,EAAM4T,YAAazzB,EAAMqE,GAEzBwb,EAAO7f,GAASqE,MAkBpB0b,IAAK,SAAUhgB,EAAMC,EAAMoxB,EAAOF,GACjC,IAAIxyB,EAAK6B,EAAKqe,EACb4U,EAAWrW,EAAWnd,GA6BvB,OA5BgB0sB,GAAY3oB,KAAM/D,KAMjCA,EAAOgwB,GAAewD,KAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,KAGtC,QAAS5U,IACtBlgB,EAAMkgB,EAAMte,IAAKP,GAAM,EAAMqxB,SAIjB7uB,IAAR7D,IACJA,EAAMuuB,GAAQltB,EAAMC,EAAMkxB,IAId,WAARxyB,GAAoBsB,KAAQwwB,KAChC9xB,EAAM8xB,GAAoBxwB,IAIZ,KAAVoxB,GAAgBA,GACpB7wB,EAAMquB,WAAYlwB,IACD,IAAV0yB,GAAkBsC,SAAUnzB,GAAQA,GAAO,EAAI7B,GAGhDA,KAITa,GAAOsB,KAAM,CAAE,SAAU,SAAW,SAAU6D,EAAIqsB,GACjDxxB,GAAOuyB,SAAUf,GAAc,CAC9BzwB,IAAK,SAAUP,EAAMmtB,EAAUkE,GAC9B,GAAKlE,EAIJ,OAAOmD,GAAatsB,KAAMxE,GAAOwgB,IAAKhgB,EAAM,aAQxCA,EAAK4xB,iBAAiB9xB,QAAWE,EAAK4zB,wBAAwBxG,MAIjEqE,GAAkBzxB,EAAMgxB,EAAWK,GAHnCtE,GAAM/sB,EAAMuwB,GAAS,WACpB,OAAOkB,GAAkBzxB,EAAMgxB,EAAWK,MAM9CzT,IAAK,SAAU5d,EAAMsE,EAAO+sB,GAC3B,IAAIltB,EACHgtB,EAASvE,GAAW5sB,GAIpB6zB,GAAsBl2B,GAAQyxB,iBACT,aAApB+B,EAAO3C,SAIR0C,GADkB2C,GAAsBxC,IAEY,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCN,EAAWQ,EACVN,GACC/wB,EACAgxB,EACAK,EACAH,EACAC,GAED,EAqBF,OAjBKD,GAAe2C,IACnBhD,GAAYnuB,KAAK8uB,KAChBxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEgyB,WAAYsC,EAAQH,IACpBD,GAAoB/wB,EAAMgxB,EAAW,UAAU,EAAOG,GACtD,KAKGN,IAAc1sB,EAAUqb,EAAQ5V,KAAMtF,KACb,QAA3BH,EAAS,IAAO,QAElBnE,EAAK8f,MAAOkR,GAAc1sB,EAC1BA,EAAQ9E,GAAOwgB,IAAKhgB,EAAMgxB,IAGpBJ,GAAmB5wB,EAAMsE,EAAOusB,OAK1CrxB,GAAOuyB,SAAS3D,WAAaV,GAAc/vB,GAAQwxB,mBAClD,SAAUnvB,EAAMmtB,GACf,GAAKA,EACJ,OAAS0B,WAAY3B,GAAQltB,EAAM,gBAClCA,EAAK4zB,wBAAwBE,KAC5B/G,GAAM/sB,EAAM,CAAEouB,WAAY,GAAK,WAC9B,OAAOpuB,EAAK4zB,wBAAwBE,QAEnC,OAMPt0B,GAAOsB,KAAM,CACZizB,OAAQ,GACRC,QAAS,GACTC,OAAQ,SACN,SAAUC,EAAQC,GACpB30B,GAAOuyB,SAAUmC,EAASC,GAAW,CACpCC,OAAQ,SAAU9vB,GAOjB,IANA,IAAI5F,EAAI,EACP21B,EAAW,GAGXC,EAAyB,iBAAVhwB,EAAqBA,EAAMI,MAAO,KAAQ,CAAEJ,GAEpD5F,EAAI,EAAGA,IACd21B,EAAUH,EAASzU,EAAW/gB,GAAMy1B,GACnCG,EAAO51B,IAAO41B,EAAO51B,EAAI,IAAO41B,EAAO,GAGzC,OAAOD,IAIO,WAAXH,IACJ10B,GAAOuyB,SAAUmC,EAASC,GAASvW,IAAMgT,MAI3CpxB,GAAOG,GAAGmC,OAAQ,CACjBke,IAAK,SAAU/f,EAAMqE,GACpB,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAMC,EAAMqE,GAC1C,IAAI6sB,EAAQ1vB,EACXT,EAAM,GACNtC,EAAI,EAEL,GAAK4D,MAAMC,QAAStC,GAAS,CAI5B,IAHAkxB,EAASvE,GAAW5sB,GACpByB,EAAMxB,EAAKH,OAEHpB,EAAI+C,EAAK/C,IAChBsC,EAAKf,EAAMvB,IAAQc,GAAOwgB,IAAKhgB,EAAMC,EAAMvB,IAAK,EAAOyyB,GAGxD,OAAOnwB,EAGR,YAAiBwB,IAAV8B,EACN9E,GAAOsgB,MAAO9f,EAAMC,EAAMqE,GAC1B9E,GAAOwgB,IAAKhgB,EAAMC,IACjBA,EAAMqE,EAA0B,EAAnBrD,UAAUnB,aAQ5BN,GAAOqyB,MAAQA,IAET1xB,UAAY,CACjBE,YAAawxB,GACbjyB,KAAM,SAAUI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,EAAQrR,GACjDlkB,KAAKyD,KAAOA,EACZzD,KAAKuhB,KAAOA,EACZvhB,KAAKu1B,OAASA,GAAUtyB,GAAOsyB,OAAO5P,SACtC3lB,KAAKwF,QAAUA,EACfxF,KAAKsS,MAAQtS,KAAKmrB,IAAMnrB,KAAK4Z,MAC7B5Z,KAAKoF,IAAMA,EACXpF,KAAKkkB,KAAOA,IAAUjhB,GAAOkhB,UAAW5C,GAAS,GAAK,OAEvD3H,IAAK,WACJ,IAAI0I,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAElC,OAAOe,GAASA,EAAMte,IACrBse,EAAMte,IAAKhE,MACXs1B,GAAM0C,UAAUrS,SAAS3hB,IAAKhE,OAEhCi4B,IAAK,SAAUC,GACd,IAAIC,EACH7V,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAoB/B,OAlBKvhB,KAAKwF,QAAQ4yB,SACjBp4B,KAAKq4B,IAAMF,EAAQl1B,GAAOsyB,OAAQv1B,KAAKu1B,QACtC2C,EAASl4B,KAAKwF,QAAQ4yB,SAAWF,EAAS,EAAG,EAAGl4B,KAAKwF,QAAQ4yB,UAG9Dp4B,KAAKq4B,IAAMF,EAAQD,EAEpBl4B,KAAKmrB,KAAQnrB,KAAKoF,IAAMpF,KAAKsS,OAAU6lB,EAAQn4B,KAAKsS,MAE/CtS,KAAKwF,QAAQ8yB,MACjBt4B,KAAKwF,QAAQ8yB,KAAK73B,KAAMT,KAAKyD,KAAMzD,KAAKmrB,IAAKnrB,MAGzCsiB,GAASA,EAAMjB,IACnBiB,EAAMjB,IAAKrhB,MAEXs1B,GAAM0C,UAAUrS,SAAStE,IAAKrhB,MAExBA,QAIOqD,KAAKO,UAAY0xB,GAAM1xB,WAEvC0xB,GAAM0C,UAAY,CACjBrS,SAAU,CACT3hB,IAAK,SAAU4f,GACd,IAAIhS,EAIJ,OAA6B,IAAxBgS,EAAMngB,KAAKlC,UACa,MAA5BqiB,EAAMngB,KAAMmgB,EAAMrC,OAAoD,MAAlCqC,EAAMngB,KAAK8f,MAAOK,EAAMrC,MACrDqC,EAAMngB,KAAMmgB,EAAMrC,OAO1B3P,EAAS3O,GAAOwgB,IAAKG,EAAMngB,KAAMmgB,EAAMrC,KAAM,MAGhB,SAAX3P,EAAwBA,EAAJ,GAEvCyP,IAAK,SAAUuC,GAKT3gB,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAC1Bte,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAAQqC,GACK,IAAxBA,EAAMngB,KAAKlC,WACtB0B,GAAOuyB,SAAU5R,EAAMrC,OAC6B,MAAnDqC,EAAMngB,KAAK8f,MAAOmQ,GAAe9P,EAAMrC,OAGxCqC,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,IAFjCloB,GAAOsgB,MAAOK,EAAMngB,KAAMmgB,EAAMrC,KAAMqC,EAAMuH,IAAMvH,EAAMM,UAU5CsU,UAAYlD,GAAM0C,UAAUS,WAAa,CACxDpX,IAAK,SAAUuC,GACTA,EAAMngB,KAAKlC,UAAYqiB,EAAMngB,KAAKb,aACtCghB,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,OAKpCloB,GAAOsyB,OAAS,CACfmD,OAAQ,SAAUC,GACjB,OAAOA,GAERC,MAAO,SAAUD,GAChB,MAAO,GAAMxyB,KAAK0yB,IAAKF,EAAIxyB,KAAK2yB,IAAO,GAExCnT,SAAU,SAGX1iB,GAAOs1B,GAAKjD,GAAM1xB,UAAUP,KAG5BJ,GAAOs1B,GAAGD,KAAO,GAKjB,IACCS,GAAOC,GAkrBH9oB,GAEH+oB,GAnrBDC,GAAW,yBACXC,GAAO,cAER,SAASC,KACHJ,MACqB,IAApBp5B,EAASy5B,QAAoBt5B,GAAOu5B,sBACxCv5B,GAAOu5B,sBAAuBF,IAE9Br5B,GAAO2e,WAAY0a,GAAUn2B,GAAOs1B,GAAGgB,UAGxCt2B,GAAOs1B,GAAGiB,QAKZ,SAASC,KAIR,OAHA15B,GAAO2e,WAAY,WAClBqa,QAAQ9yB,IAEA8yB,GAAQ7N,KAAKC,MAIvB,SAASuO,GAAO/3B,EAAMg4B,GACrB,IAAI3M,EACH7qB,EAAI,EACJggB,EAAQ,CAAE+Q,OAAQvxB,GAKnB,IADAg4B,EAAeA,EAAe,EAAI,EAC1Bx3B,EAAI,EAAGA,GAAK,EAAIw3B,EAEvBxX,EAAO,UADP6K,EAAQ9J,EAAW/gB,KACSggB,EAAO,UAAY6K,GAAUrrB,EAO1D,OAJKg4B,IACJxX,EAAMsT,QAAUtT,EAAM0O,MAAQlvB,GAGxBwgB,EAGR,SAASyX,GAAa7xB,EAAOwZ,EAAMsY,GAKlC,IAJA,IAAIjW,EACH4K,GAAesL,GAAUC,SAAUxY,IAAU,IAAK7gB,OAAQo5B,GAAUC,SAAU,MAC9E/f,EAAQ,EACRzW,EAASirB,EAAWjrB,OACbyW,EAAQzW,EAAQyW,IACvB,GAAO4J,EAAQ4K,EAAYxU,GAAQvZ,KAAMo5B,EAAWtY,EAAMxZ,GAGzD,OAAO6b,EAsNV,SAASkW,GAAWr2B,EAAMu2B,EAAYx0B,GACrC,IAAIoM,EACHqoB,EACAjgB,EAAQ,EACRzW,EAASu2B,GAAUI,WAAW32B,OAC9ByZ,EAAW/Z,GAAO0Z,WAAWI,OAAQ,kBAG7Byc,EAAK/1B,OAEb+1B,EAAO,WACN,GAAKS,EACJ,OAAO,EAYR,IAVA,IAAIE,EAAcpB,IAASU,KAC1B3a,EAAY3Y,KAAKouB,IAAK,EAAGsF,EAAUO,UAAYP,EAAUzB,SAAW+B,GAKpEjC,EAAU,GADHpZ,EAAY+a,EAAUzB,UAAY,GAEzCpe,EAAQ,EACRzW,EAASs2B,EAAUQ,OAAO92B,OAEnByW,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAKC,GAMhC,OAHAlb,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW3B,EAASpZ,IAG5CoZ,EAAU,GAAK30B,EACZub,GAIFvb,GACLyZ,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAI5C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,KACvB,IAERA,EAAY7c,EAAS1B,QAAS,CAC7B7X,KAAMA,EACNonB,MAAO5nB,GAAOsC,OAAQ,GAAIy0B,GAC1BM,KAAMr3B,GAAOsC,QAAQ,EAAM,CAC1Bg1B,cAAe,GACfhF,OAAQtyB,GAAOsyB,OAAO5P,UACpBngB,GACHg1B,mBAAoBR,EACpBS,gBAAiBj1B,EACjB40B,UAAWrB,IAASU,KACpBrB,SAAU5yB,EAAQ4yB,SAClBiC,OAAQ,GACRT,YAAa,SAAUrY,EAAMnc,GAC5B,IAAIwe,EAAQ3gB,GAAOqyB,MAAO7xB,EAAMo2B,EAAUS,KAAM/Y,EAAMnc,EACrDy0B,EAAUS,KAAKC,cAAehZ,IAAUsY,EAAUS,KAAK/E,QAExD,OADAsE,EAAUQ,OAAOz5B,KAAMgjB,GAChBA,GAERnB,KAAM,SAAUiY,GACf,IAAI1gB,EAAQ,EAIXzW,EAASm3B,EAAUb,EAAUQ,OAAO92B,OAAS,EAC9C,GAAK02B,EACJ,OAAOj6B,KAGR,IADAi6B,GAAU,EACFjgB,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAK,GAUhC,OANKyC,GACJ1d,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAC3C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,EAAWa,KAEzC1d,EAASuB,WAAY9a,EAAM,CAAEo2B,EAAWa,IAElC16B,QAGT6qB,EAAQgP,EAAUhP,MAInB,KA/HD,SAAqBA,EAAO0P,GAC3B,IAAIvgB,EAAOtW,EAAM6xB,EAAQxtB,EAAOua,EAGhC,IAAMtI,KAAS6Q,EAed,GAbA0K,EAASgF,EADT72B,EAAOmd,EAAW7G,IAElBjS,EAAQ8iB,EAAO7Q,GACVjU,MAAMC,QAAS+B,KACnBwtB,EAASxtB,EAAO,GAChBA,EAAQ8iB,EAAO7Q,GAAUjS,EAAO,IAG5BiS,IAAUtW,IACdmnB,EAAOnnB,GAASqE,SACT8iB,EAAO7Q,KAGfsI,EAAQrf,GAAOuyB,SAAU9xB,KACX,WAAY4e,EAMzB,IAAMtI,KALNjS,EAAQua,EAAMuV,OAAQ9vB,UACf8iB,EAAOnnB,GAICqE,EACNiS,KAAS6Q,IAChBA,EAAO7Q,GAAUjS,EAAOiS,GACxBugB,EAAevgB,GAAUub,QAI3BgF,EAAe72B,GAAS6xB,EA6F1BoF,CAAY9P,EAAOgP,EAAUS,KAAKC,eAE1BvgB,EAAQzW,EAAQyW,IAEvB,GADApI,EAASkoB,GAAUI,WAAYlgB,GAAQvZ,KAAMo5B,EAAWp2B,EAAMonB,EAAOgP,EAAUS,MAM9E,OAJKj5B,EAAYuQ,EAAO6Q,QACvBxf,GAAOsf,YAAasX,EAAUp2B,KAAMo2B,EAAUS,KAAKpe,OAAQuG,KAC1D7Q,EAAO6Q,KAAKmY,KAAMhpB,IAEbA,EAyBT,OArBA3O,GAAOwB,IAAKomB,EAAO+O,GAAaC,GAE3Bx4B,EAAYw4B,EAAUS,KAAKhoB,QAC/BunB,EAAUS,KAAKhoB,MAAM7R,KAAMgD,EAAMo2B,GAIlCA,EACEtc,SAAUsc,EAAUS,KAAK/c,UACzBzT,KAAM+vB,EAAUS,KAAKxwB,KAAM+vB,EAAUS,KAAKO,UAC1Ctf,KAAMse,EAAUS,KAAK/e,MACrBwB,OAAQ8c,EAAUS,KAAKvd,QAEzB9Z,GAAOs1B,GAAGuC,MACT73B,GAAOsC,OAAQi0B,EAAM,CACpB/1B,KAAMA,EACNs3B,KAAMlB,EACN3d,MAAO2d,EAAUS,KAAKpe,SAIjB2d,EAGR52B,GAAO62B,UAAY72B,GAAOsC,OAAQu0B,GAAW,CAE5CC,SAAU,CACTiB,IAAK,CAAE,SAAUzZ,EAAMxZ,GACtB,IAAI6b,EAAQ5jB,KAAK45B,YAAarY,EAAMxZ,GAEpC,OADA2b,GAAWE,EAAMngB,KAAM8d,EAAM0B,EAAQ5V,KAAMtF,GAAS6b,GAC7CA,KAITqX,QAAS,SAAUpQ,EAAOrmB,GACpBnD,EAAYwpB,IAChBrmB,EAAWqmB,EACXA,EAAQ,CAAE,MAEVA,EAAQA,EAAM5d,MAAO2N,GAOtB,IAJA,IAAI2G,EACHvH,EAAQ,EACRzW,EAASsnB,EAAMtnB,OAERyW,EAAQzW,EAAQyW,IACvBuH,EAAOsJ,EAAO7Q,GACd8f,GAAUC,SAAUxY,GAASuY,GAAUC,SAAUxY,IAAU,GAC3DuY,GAAUC,SAAUxY,GAAOiB,QAAShe,IAItC01B,WAAY,CA3Wb,SAA2Bz2B,EAAMonB,EAAOyP,GACvC,IAAI/Y,EAAMxZ,EAAO4c,EAAQrC,EAAO4Y,EAASC,EAAWC,EAAgB5X,EACnE6X,EAAQ,UAAWxQ,GAAS,WAAYA,EACxCkQ,EAAO/6B,KACP2tB,EAAO,GACPpK,EAAQ9f,EAAK8f,MACb8V,EAAS51B,EAAKlC,UAAY+hB,GAAoB7f,GAC9C63B,EAAW7Z,EAASzd,IAAKP,EAAM,UA6BhC,IAAM8d,KA1BA+Y,EAAKpe,QAEa,OADvBoG,EAAQrf,GAAOsf,YAAa9e,EAAM,OACvB83B,WACVjZ,EAAMiZ,SAAW,EACjBL,EAAU5Y,EAAMhO,MAAM8H,KACtBkG,EAAMhO,MAAM8H,KAAO,WACZkG,EAAMiZ,UACXL,MAIH5Y,EAAMiZ,WAENR,EAAKhe,OAAQ,WAGZge,EAAKhe,OAAQ,WACZuF,EAAMiZ,WACAt4B,GAAOiZ,MAAOzY,EAAM,MAAOF,QAChC+e,EAAMhO,MAAM8H,YAOFyO,EAEb,GADA9iB,EAAQ8iB,EAAOtJ,GACV2X,GAASzxB,KAAMM,GAAU,CAG7B,UAFO8iB,EAAOtJ,GACdoD,EAASA,GAAoB,WAAV5c,EACdA,KAAYsxB,EAAS,OAAS,QAAW,CAI7C,GAAe,SAAVtxB,IAAoBuzB,QAAiCr1B,IAArBq1B,EAAU/Z,GAK9C,SAJA8X,GAAS,EAOX1L,EAAMpM,GAAS+Z,GAAYA,EAAU/Z,IAAUte,GAAOsgB,MAAO9f,EAAM8d,GAMrE,IADA4Z,GAAal4B,GAAO2D,cAAeikB,MAChB5nB,GAAO2D,cAAe+mB,GA8DzC,IAAMpM,KAzDD8Z,GAA2B,IAAlB53B,EAAKlC,WAMlB+4B,EAAKkB,SAAW,CAAEjY,EAAMiY,SAAUjY,EAAMkY,UAAWlY,EAAMmY,WAIlC,OADvBN,EAAiBE,GAAYA,EAAS9X,WAErC4X,EAAiB3Z,EAASzd,IAAKP,EAAM,YAGrB,UADjB+f,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,cAEtB23B,EACJ5X,EAAU4X,GAIV9W,GAAU,CAAE7gB,IAAQ,GACpB23B,EAAiB33B,EAAK8f,MAAMC,SAAW4X,EACvC5X,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,WAC5B6gB,GAAU,CAAE7gB,OAKG,WAAZ+f,GAAoC,iBAAZA,GAAgD,MAAlB4X,IACrB,SAAhCn4B,GAAOwgB,IAAKhgB,EAAM,WAGhB03B,IACLJ,EAAKjxB,KAAM,WACVyZ,EAAMC,QAAU4X,IAEM,MAAlBA,IACJ5X,EAAUD,EAAMC,QAChB4X,EAA6B,SAAZ5X,EAAqB,GAAKA,IAG7CD,EAAMC,QAAU,iBAKd8W,EAAKkB,WACTjY,EAAMiY,SAAW,SACjBT,EAAKhe,OAAQ,WACZwG,EAAMiY,SAAWlB,EAAKkB,SAAU,GAChCjY,EAAMkY,UAAYnB,EAAKkB,SAAU,GACjCjY,EAAMmY,UAAYpB,EAAKkB,SAAU,MAKnCL,GAAY,EACExN,EAGPwN,IACAG,EACC,WAAYA,IAChBjC,EAASiC,EAASjC,QAGnBiC,EAAW7Z,EAASxB,OAAQxc,EAAM,SAAU,CAAE+f,QAAS4X,IAInDzW,IACJ2W,EAASjC,QAAUA,GAIfA,GACJ/U,GAAU,CAAE7gB,IAAQ,GAKrBs3B,EAAKjxB,KAAM,WASV,IAAMyX,KAJA8X,GACL/U,GAAU,CAAE7gB,IAEbge,EAASlF,OAAQ9Y,EAAM,UACTkqB,EACb1qB,GAAOsgB,MAAO9f,EAAM8d,EAAMoM,EAAMpM,OAMnC4Z,EAAYvB,GAAaP,EAASiC,EAAU/Z,GAAS,EAAGA,EAAMwZ,GACtDxZ,KAAQ+Z,IACfA,EAAU/Z,GAAS4Z,EAAU7oB,MACxB+mB,IACJ8B,EAAU/1B,IAAM+1B,EAAU7oB,MAC1B6oB,EAAU7oB,MAAQ,MAuMrBqpB,UAAW,SAAUn3B,EAAU+qB,GACzBA,EACJuK,GAAUI,WAAW1X,QAAShe,GAE9Bs1B,GAAUI,WAAWt5B,KAAM4D,MAK9BvB,GAAO24B,MAAQ,SAAUA,EAAOrG,EAAQnyB,GACvC,IAAI61B,EAAM2C,GAA0B,iBAAVA,EAAqB34B,GAAOsC,OAAQ,GAAIq2B,GAAU,CAC3Ef,SAAUz3B,IAAOA,GAAMmyB,GACtBl0B,EAAYu6B,IAAWA,EACxBxD,SAAUwD,EACVrG,OAAQnyB,GAAMmyB,GAAUA,IAAWl0B,EAAYk0B,IAAYA,GAoC5D,OAhCKtyB,GAAOs1B,GAAG/Q,IACdyR,EAAIb,SAAW,EAGc,iBAAjBa,EAAIb,WACVa,EAAIb,YAAYn1B,GAAOs1B,GAAGsD,OAC9B5C,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAQ5C,EAAIb,UAGrCa,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAOlW,UAMjB,MAAbsT,EAAI/c,QAA+B,IAAd+c,EAAI/c,QAC7B+c,EAAI/c,MAAQ,MAIb+c,EAAIxI,IAAMwI,EAAI4B,SAEd5B,EAAI4B,SAAW,WACTx5B,EAAY43B,EAAIxI,MACpBwI,EAAIxI,IAAIhwB,KAAMT,MAGVi5B,EAAI/c,OACRjZ,GAAOmf,QAASpiB,KAAMi5B,EAAI/c,QAIrB+c,GAGRh2B,GAAOG,GAAGmC,OAAQ,CACjBu2B,OAAQ,SAAUF,EAAOG,EAAIxG,EAAQ/wB,GAGpC,OAAOxE,KAAK6P,OAAQyT,IAAqBG,IAAK,UAAW,GAAIc,OAG3Dnf,MAAM42B,QAAS,CAAEvG,QAASsG,GAAMH,EAAOrG,EAAQ/wB,IAElDw3B,QAAS,SAAUza,EAAMqa,EAAOrG,EAAQ/wB,GACvC,IAAI8P,EAAQrR,GAAO2D,cAAe2a,GACjC0a,EAASh5B,GAAO24B,MAAOA,EAAOrG,EAAQ/wB,GACtC03B,EAAc,WAGb,IAAInB,EAAOjB,GAAW95B,KAAMiD,GAAOsC,OAAQ,GAAIgc,GAAQ0a,IAGlD3nB,GAASmN,EAASzd,IAAKhE,KAAM,YACjC+6B,EAAKtY,MAAM,IAMd,OAFAyZ,EAAYC,OAASD,EAEd5nB,IAA0B,IAAjB2nB,EAAO/f,MACtBlc,KAAKuE,KAAM23B,GACXl8B,KAAKkc,MAAO+f,EAAO/f,MAAOggB,IAE5BzZ,KAAM,SAAU9gB,EAAMghB,EAAY+X,GACjC,IAAI0B,EAAY,SAAU9Z,GACzB,IAAIG,EAAOH,EAAMG,YACVH,EAAMG,KACbA,EAAMiY,IAYP,MATqB,iBAAT/4B,IACX+4B,EAAU/X,EACVA,EAAahhB,EACbA,OAAOsE,GAEH0c,GACJ3iB,KAAKkc,MAAOva,GAAQ,KAAM,IAGpB3B,KAAKuE,KAAM,WACjB,IAAI6d,GAAU,EACbpI,EAAgB,MAARrY,GAAgBA,EAAO,aAC/B06B,EAASp5B,GAAOo5B,OAChB/a,EAAOG,EAASzd,IAAKhE,MAEtB,GAAKga,EACCsH,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MACnC2Z,EAAW9a,EAAMtH,SAGlB,IAAMA,KAASsH,EACTA,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MAAQ0W,GAAK1xB,KAAMuS,IACtDoiB,EAAW9a,EAAMtH,IAKpB,IAAMA,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MACnB,MAAR2B,GAAgB06B,EAAQriB,GAAQkC,QAAUva,IAE5C06B,EAAQriB,GAAQ+gB,KAAKtY,KAAMiY,GAC3BtY,GAAU,EACVia,EAAO/2B,OAAQ0U,EAAO,KAOnBoI,GAAYsY,GAChBz3B,GAAOmf,QAASpiB,KAAM2B,MAIzBw6B,OAAQ,SAAUx6B,GAIjB,OAHc,IAATA,IACJA,EAAOA,GAAQ,MAET3B,KAAKuE,KAAM,WACjB,IAAIyV,EACHsH,EAAOG,EAASzd,IAAKhE,MACrBkc,EAAQoF,EAAM3f,EAAO,SACrB2gB,EAAQhB,EAAM3f,EAAO,cACrB06B,EAASp5B,GAAOo5B,OAChB94B,EAAS2Y,EAAQA,EAAM3Y,OAAS,EAajC,IAVA+d,EAAK6a,QAAS,EAGdl5B,GAAOiZ,MAAOlc,KAAM2B,EAAM,IAErB2gB,GAASA,EAAMG,MACnBH,EAAMG,KAAKhiB,KAAMT,MAAM,GAIlBga,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MAAQq8B,EAAQriB,GAAQkC,QAAUva,IAC/D06B,EAAQriB,GAAQ+gB,KAAKtY,MAAM,GAC3B4Z,EAAO/2B,OAAQ0U,EAAO,IAKxB,IAAMA,EAAQ,EAAGA,EAAQzW,EAAQyW,IAC3BkC,EAAOlC,IAAWkC,EAAOlC,GAAQmiB,QACrCjgB,EAAOlC,GAAQmiB,OAAO17B,KAAMT,aAKvBshB,EAAK6a,YAKfl5B,GAAOsB,KAAM,CAAE,SAAU,OAAQ,QAAU,SAAU6D,EAAI1E,GACxD,IAAI44B,EAAQr5B,GAAOG,GAAIM,GACvBT,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAgB,MAATo3B,GAAkC,kBAAVA,EAC9BU,EAAM37B,MAAOX,KAAM0E,WACnB1E,KAAKg8B,QAAStC,GAAOh2B,GAAM,GAAQk4B,EAAOrG,EAAQ/wB,MAKrDvB,GAAOsB,KAAM,CACZg4B,UAAW7C,GAAO,QAClB8C,QAAS9C,GAAO,QAChB+C,YAAa/C,GAAO,UACpBgD,OAAQ,CAAEjH,QAAS,QACnBkH,QAAS,CAAElH,QAAS,QACpBmH,WAAY,CAAEnH,QAAS,WACrB,SAAU/xB,EAAMmnB,GAClB5nB,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAOxE,KAAKg8B,QAASnR,EAAO+Q,EAAOrG,EAAQ/wB,MAI7CvB,GAAOo5B,OAAS,GAChBp5B,GAAOs1B,GAAGiB,KAAO,WAChB,IAAIsB,EACH34B,EAAI,EACJk6B,EAASp5B,GAAOo5B,OAIjB,IAFAtD,GAAQ7N,KAAKC,MAELhpB,EAAIk6B,EAAO94B,OAAQpB,KAC1B24B,EAAQuB,EAAQl6B,OAGCk6B,EAAQl6B,KAAQ24B,GAChCuB,EAAO/2B,OAAQnD,IAAK,GAIhBk6B,EAAO94B,QACZN,GAAOs1B,GAAG9V,OAEXsW,QAAQ9yB,GAGThD,GAAOs1B,GAAGuC,MAAQ,SAAUA,GAC3B73B,GAAOo5B,OAAOz7B,KAAMk6B,GACpB73B,GAAOs1B,GAAGjmB,SAGXrP,GAAOs1B,GAAGgB,SAAW,GACrBt2B,GAAOs1B,GAAGjmB,MAAQ,WACZ0mB,KAILA,IAAa,EACbI,OAGDn2B,GAAOs1B,GAAG9V,KAAO,WAChBuW,GAAa,MAGd/1B,GAAOs1B,GAAGsD,OAAS,CAClBgB,KAAM,IACNC,KAAM,IAGNnX,SAAU,KAKX1iB,GAAOG,GAAG25B,MAAQ,SAAUC,EAAMr7B,GAIjC,OAHAq7B,EAAO/5B,GAAOs1B,IAAKt1B,GAAOs1B,GAAGsD,OAAQmB,IAAiBA,EACtDr7B,EAAOA,GAAQ,KAER3B,KAAKkc,MAAOva,EAAM,SAAU8K,EAAM6V,GACxC,IAAI2a,EAAUl9B,GAAO2e,WAAYjS,EAAMuwB,GACvC1a,EAAMG,KAAO,WACZ1iB,GAAOm9B,aAAcD,OAOnB/sB,GAAQtQ,EAAS0C,cAAe,SAEnC22B,GADSr5B,EAAS0C,cAAe,UACpBK,YAAa/C,EAAS0C,cAAe,WAEnD4N,GAAMvO,KAAO,WAIbP,GAAQ+7B,QAA0B,KAAhBjtB,GAAMnI,MAIxB3G,GAAQg8B,YAAcnE,GAAI7kB,UAI1BlE,GAAQtQ,EAAS0C,cAAe,UAC1ByF,MAAQ,IACdmI,GAAMvO,KAAO,QACbP,GAAQi8B,WAA6B,MAAhBntB,GAAMnI,MAI5B,IAAIu1B,GACH5sB,GAAazN,GAAOqN,KAAKI,WAE1BzN,GAAOG,GAAGmC,OAAQ,CACjBkL,KAAM,SAAU/M,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOwN,KAAM/M,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1Dg6B,WAAY,SAAU75B,GACrB,OAAO1D,KAAKuE,KAAM,WACjBtB,GAAOs6B,WAAYv9B,KAAM0D,QAK5BT,GAAOsC,OAAQ,CACdkL,KAAM,SAAUhN,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAKnC,MAAkC,oBAAtB/5B,EAAKjB,aACTS,GAAOse,KAAM9d,EAAMC,EAAMqE,IAKlB,IAAVy1B,GAAgBv6B,GAAOmE,SAAU3D,KACrC6e,EAAQrf,GAAOw6B,UAAW/5B,EAAKC,iBAC5BV,GAAOqN,KAAKrD,MAAM3B,KAAK7D,KAAM/D,GAAS45B,QAAWr3B,SAGtCA,IAAV8B,EACW,OAAVA,OACJ9E,GAAOs6B,WAAY95B,EAAMC,GAIrB4e,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,GAGRX,EAAKhB,aAAciB,EAAMqE,EAAQ,IAC1BA,GAGHua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAMM,OAHdA,EAAMnB,GAAO4J,KAAK4D,KAAMhN,EAAMC,SAGTuC,EAAY7B,IAGlCq5B,UAAW,CACV97B,KAAM,CACL0f,IAAK,SAAU5d,EAAMsE,GACpB,IAAM3G,GAAQi8B,YAAwB,UAAVt1B,GAC3BvE,GAAUC,EAAM,SAAY,CAC5B,IAAIrB,EAAMqB,EAAKsE,MAKf,OAJAtE,EAAKhB,aAAc,OAAQsF,GACtB3F,IACJqB,EAAKsE,MAAQ3F,GAEP2F,MAMXw1B,WAAY,SAAU95B,EAAMsE,GAC3B,IAAIrE,EACHvB,EAAI,EAIJu7B,EAAY31B,GAASA,EAAMkF,MAAO2N,GAEnC,GAAK8iB,GAA+B,IAAlBj6B,EAAKlC,SACtB,MAAUmC,EAAOg6B,EAAWv7B,KAC3BsB,EAAKwK,gBAAiBvK,MAO1B45B,GAAW,CACVjc,IAAK,SAAU5d,EAAMsE,EAAOrE,GAQ3B,OAPe,IAAVqE,EAGJ9E,GAAOs6B,WAAY95B,EAAMC,GAEzBD,EAAKhB,aAAciB,EAAMA,GAEnBA,IAITT,GAAOsB,KAAMtB,GAAOqN,KAAKrD,MAAM3B,KAAK0X,OAAO/V,MAAO,QAAU,SAAU7E,EAAI1E,GACzE,IAAIi6B,EAASjtB,GAAYhN,IAAUT,GAAO4J,KAAK4D,KAE/CC,GAAYhN,GAAS,SAAUD,EAAMC,EAAM6U,GAC1C,IAAInU,EAAKykB,EACR+U,EAAgBl6B,EAAKC,cAYtB,OAVM4U,IAGLsQ,EAASnY,GAAYktB,GACrBltB,GAAYktB,GAAkBx5B,EAC9BA,EAAqC,MAA/Bu5B,EAAQl6B,EAAMC,EAAM6U,GACzBqlB,EACA,KACDltB,GAAYktB,GAAkB/U,GAExBzkB,KAOT,IAAIy5B,GAAa,sCAChBC,GAAa,gBAwIb,SAASC,GAAkBh2B,GAE1B,OADaA,EAAMkF,MAAO2N,IAAmB,IAC/B9M,KAAM,KAItB,SAASkwB,GAAUv6B,GAClB,OAAOA,EAAKjB,cAAgBiB,EAAKjB,aAAc,UAAa,GAG7D,SAASy7B,GAAgBl2B,GACxB,OAAKhC,MAAMC,QAAS+B,GACZA,EAEc,iBAAVA,GACJA,EAAMkF,MAAO2N,IAEd,GAvJR3X,GAAOG,GAAGmC,OAAQ,CACjBgc,KAAM,SAAU7d,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOse,KAAM7d,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1D26B,WAAY,SAAUx6B,GACrB,OAAO1D,KAAKuE,KAAM,kBACVvE,KAAMiD,GAAOk7B,QAASz6B,IAAUA,QAK1CT,GAAOsC,OAAQ,CACdgc,KAAM,SAAU9d,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAWnC,OAPe,IAAVA,GAAgBv6B,GAAOmE,SAAU3D,KAGrCC,EAAOT,GAAOk7B,QAASz6B,IAAUA,EACjC4e,EAAQrf,GAAO+0B,UAAWt0B,SAGZuC,IAAV8B,EACCua,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,EAGCX,EAAMC,GAASqE,EAGpBua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAGDX,EAAMC,IAGds0B,UAAW,CACV/jB,SAAU,CACTjQ,IAAK,SAAUP,GAMd,IAAI26B,EAAWn7B,GAAO4J,KAAK4D,KAAMhN,EAAM,YAEvC,OAAK26B,EACGjL,SAAUiL,EAAU,IAI3BP,GAAWp2B,KAAMhE,EAAKD,WACtBs6B,GAAWr2B,KAAMhE,EAAKD,WACtBC,EAAKuQ,KAEE,GAGA,KAKXmqB,QAAS,CACRE,MAAO,UACPC,QAAS,eAYLl9B,GAAQg8B,cACbn6B,GAAO+0B,UAAU5jB,SAAW,CAC3BpQ,IAAK,SAAUP,GAId,IAAI8O,EAAS9O,EAAKb,WAIlB,OAHK2P,GAAUA,EAAO3P,YACrB2P,EAAO3P,WAAWyR,cAEZ,MAERgN,IAAK,SAAU5d,GAId,IAAI8O,EAAS9O,EAAKb,WACb2P,IACJA,EAAO8B,cAEF9B,EAAO3P,YACX2P,EAAO3P,WAAWyR,kBAOvBpR,GAAOsB,KAAM,CACZ,WACA,WACA,YACA,cACA,cACA,UACA,UACA,SACA,cACA,mBACE,WACFtB,GAAOk7B,QAASn+B,KAAK2D,eAAkB3D,OA4BxCiD,GAAOG,GAAGmC,OAAQ,CACjBg5B,SAAU,SAAUx2B,GACnB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAOu+B,SAAUx2B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,WAI1Dw+B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAIjB,GAHAk6B,EAAWT,GAAUh+B,MACrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GACnByX,EAAI/Y,QAAS,IAAMoP,EAAY,KAAQ,IAC3C2J,GAAO3J,EAAY,KAKrByuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,MAGR2+B,YAAa,SAAU52B,GACtB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAO2+B,YAAa52B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,UAIvD0E,UAAUnB,QAIhBi7B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAMjB,GALAk6B,EAAWT,GAAUh+B,MAGrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IAAM,CACzC8N,EAAYuuB,EAAYr8B,GAGxB,OAAgD,EAAxCyX,EAAI/Y,QAAS,IAAMoP,EAAY,KACtC2J,EAAMA,EAAIvT,QAAS,IAAM4J,EAAY,IAAK,KAK5CyuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,KA/BCA,KAAKyQ,KAAM,QAAS,KAkC7BmuB,YAAa,SAAU72B,EAAO82B,GAC7B,IAAIL,EAAYvuB,EAAW9N,EAAG+W,EAC7BvX,SAAcoG,EACd+2B,EAAwB,WAATn9B,GAAqBoE,MAAMC,QAAS+B,GAEpD,OAAK1G,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO4+B,YACd72B,EAAMtH,KAAMT,KAAMmC,EAAG67B,GAAUh+B,MAAQ6+B,GACvCA,KAKsB,kBAAbA,GAA0BC,EAC9BD,EAAW7+B,KAAKu+B,SAAUx2B,GAAU/H,KAAK2+B,YAAa52B,IAG9Dy2B,EAAaP,GAAgBl2B,GAEtB/H,KAAKuE,KAAM,WACjB,GAAKu6B,EAKJ,IAFA5lB,EAAOjW,GAAQjD,MAETmC,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GAGnB+W,EAAK6lB,SAAU9uB,GACnBiJ,EAAKylB,YAAa1uB,GAElBiJ,EAAKqlB,SAAUtuB,aAKIhK,IAAV8B,GAAgC,YAATpG,KAClCsO,EAAY+tB,GAAUh+B,QAIrByhB,EAASJ,IAAKrhB,KAAM,gBAAiBiQ,GAOjCjQ,KAAKyC,cACTzC,KAAKyC,aAAc,QAClBwN,IAAuB,IAAVlI,EACZ,GACA0Z,EAASzd,IAAKhE,KAAM,kBAAqB,SAO/C++B,SAAU,SAAU77B,GACnB,IAAI+M,EAAWxM,EACdtB,EAAI,EAEL8N,EAAY,IAAM/M,EAAW,IAC7B,MAAUO,EAAOzD,KAAMmC,KACtB,GAAuB,IAAlBsB,EAAKlC,WACoE,GAA3E,IAAMw8B,GAAkBC,GAAUv6B,IAAW,KAAM5C,QAASoP,GAC9D,OAAO,EAIT,OAAO,KAOT,IAAI+uB,GAAU,MAEd/7B,GAAOG,GAAGmC,OAAQ,CACjBnD,IAAK,SAAU2F,GACd,IAAIua,EAAOle,EAAKuqB,EACflrB,EAAOzD,KAAM,GAEd,OAAM0E,UAAUnB,QA0BhBorB,EAAkBttB,EAAY0G,GAEvB/H,KAAKuE,KAAM,SAAUpC,GAC3B,IAAIC,EAEmB,IAAlBpC,KAAKuB,WAWE,OANXa,EADIusB,EACE5mB,EAAMtH,KAAMT,KAAMmC,EAAGc,GAAQjD,MAAOoC,OAEpC2F,GAKN3F,EAAM,GAEoB,iBAARA,EAClBA,GAAO,GAEI2D,MAAMC,QAAS5D,KAC1BA,EAAMa,GAAOwB,IAAKrC,EAAK,SAAU2F,GAChC,OAAgB,MAATA,EAAgB,GAAKA,EAAQ,OAItCua,EAAQrf,GAAOg8B,SAAUj/B,KAAK2B,OAAUsB,GAAOg8B,SAAUj/B,KAAKwD,SAASG,iBAGrD,QAAS2e,QAA+Crc,IAApCqc,EAAMjB,IAAKrhB,KAAMoC,EAAK,WAC3DpC,KAAK+H,MAAQ3F,OAzDTqB,GACJ6e,EAAQrf,GAAOg8B,SAAUx7B,EAAK9B,OAC7BsB,GAAOg8B,SAAUx7B,EAAKD,SAASG,iBAG/B,QAAS2e,QACgCrc,KAAvC7B,EAAMke,EAAMte,IAAKP,EAAM,UAElBW,EAMY,iBAHpBA,EAAMX,EAAKsE,OAIH3D,EAAIiC,QAAS24B,GAAS,IAIhB,MAAP56B,EAAc,GAAKA,OAG3B,KAyCHnB,GAAOsC,OAAQ,CACd05B,SAAU,CACT5Z,OAAQ,CACPrhB,IAAK,SAAUP,GAEd,IAAIrB,EAAMa,GAAO4J,KAAK4D,KAAMhN,EAAM,SAClC,OAAc,MAAPrB,EACNA,EAMA27B,GAAkB96B,GAAOV,KAAMkB,MAGlCyK,OAAQ,CACPlK,IAAK,SAAUP,GACd,IAAIsE,EAAOsd,EAAQljB,EAClBqD,EAAU/B,EAAK+B,QACfwU,EAAQvW,EAAK4Q,cACbgT,EAAoB,eAAd5jB,EAAK9B,KACX6iB,EAAS6C,EAAM,KAAO,GACtBkN,EAAMlN,EAAMrN,EAAQ,EAAIxU,EAAQjC,OAUjC,IAPCpB,EADI6X,EAAQ,EACRua,EAGAlN,EAAMrN,EAAQ,EAIX7X,EAAIoyB,EAAKpyB,IAKhB,KAJAkjB,EAAS7f,EAASrD,IAIJiS,UAAYjS,IAAM6X,KAG7BqL,EAAO9Y,YACL8Y,EAAOziB,WAAW2J,WACnB/I,GAAU6hB,EAAOziB,WAAY,aAAiB,CAMjD,GAHAmF,EAAQ9E,GAAQoiB,GAASjjB,MAGpBilB,EACJ,OAAOtf,EAIRyc,EAAO5jB,KAAMmH,GAIf,OAAOyc,GAGRnD,IAAK,SAAU5d,EAAMsE,GACpB,IAAIm3B,EAAW7Z,EACd7f,EAAU/B,EAAK+B,QACfgf,EAASvhB,GAAOgE,UAAWc,GAC3B5F,EAAIqD,EAAQjC,OAEb,MAAQpB,MACPkjB,EAAS7f,EAASrD,IAINiS,UACuD,EAAlEnR,GAAOkE,QAASlE,GAAOg8B,SAAS5Z,OAAOrhB,IAAKqhB,GAAUb,MAEtD0a,GAAY,GAUd,OAHMA,IACLz7B,EAAK4Q,eAAiB,GAEhBmQ,OAOXvhB,GAAOsB,KAAM,CAAE,QAAS,YAAc,WACrCtB,GAAOg8B,SAAUj/B,MAAS,CACzBqhB,IAAK,SAAU5d,EAAMsE,GACpB,GAAKhC,MAAMC,QAAS+B,GACnB,OAAStE,EAAK0Q,SAA2D,EAAjDlR,GAAOkE,QAASlE,GAAQQ,GAAOrB,MAAO2F,KAI3D3G,GAAQ+7B,UACbl6B,GAAOg8B,SAAUj/B,MAAOgE,IAAM,SAAUP,GACvC,OAAwC,OAAjCA,EAAKjB,aAAc,SAAqB,KAAOiB,EAAKsE,UAS9D,IAAI0L,GAAW1T,GAAO0T,SAElB5R,GAAQ,CAAEmG,KAAMkjB,KAAKC,OAErBgU,GAAS,KAKbl8B,GAAOm8B,SAAW,SAAU9d,GAC3B,IAAInP,EAAKktB,EACT,IAAM/d,GAAwB,iBAATA,EACpB,OAAO,KAKR,IACCnP,GAAM,IAAMpS,GAAOu/B,WAAcC,gBAAiBje,EAAM,YACvD,MAAQ3U,IAYV,OAVA0yB,EAAkBltB,GAAOA,EAAI3E,qBAAsB,eAAiB,GAC9D2E,IAAOktB,GACZp8B,GAAOsD,MAAO,iBACb84B,EACCp8B,GAAOwB,IAAK46B,EAAgB3yB,WAAY,SAAUgC,GACjD,OAAOA,EAAG5H,cACPgH,KAAM,MACVwT,IAGInP,GAIR,IAAIqtB,GAAc,kCACjBC,GAA0B,SAAU9yB,GACnCA,EAAEmb,mBAGJ7kB,GAAOsC,OAAQtC,GAAOskB,MAAO,CAE5BU,QAAS,SAAUV,EAAOjG,EAAM7d,EAAMi8B,GAErC,IAAIv9B,EAAGyX,EAAKgJ,EAAK+c,EAAYC,EAAQ/W,EAAQ9K,EAAS8hB,EACrDC,EAAY,CAAEr8B,GAAQ7D,GACtB+B,EAAOX,GAAOP,KAAM8mB,EAAO,QAAWA,EAAM5lB,KAAO4lB,EACnDkB,EAAaznB,GAAOP,KAAM8mB,EAAO,aAAgBA,EAAMlgB,UAAUc,MAAO,KAAQ,GAKjF,GAHAyR,EAAMimB,EAAcjd,EAAMnf,EAAOA,GAAQ7D,EAGlB,IAAlB6D,EAAKlC,UAAoC,IAAlBkC,EAAKlC,WAK5Bi+B,GAAY/3B,KAAM9F,EAAOsB,GAAOskB,MAAMuB,cAIf,EAAvBnnB,EAAKd,QAAS,OAIlBc,GADA8mB,EAAa9mB,EAAKwG,MAAO,MACPoG,QAClBka,EAAWpjB,QAEZu6B,EAASj+B,EAAKd,QAAS,KAAQ,GAAK,KAAOc,GAG3C4lB,EAAQA,EAAOtkB,GAAOiD,SACrBqhB,EACA,IAAItkB,GAAOmnB,MAAOzoB,EAAuB,iBAAV4lB,GAAsBA,IAGhDK,UAAY8X,EAAe,EAAI,EACrCnY,EAAMlgB,UAAYohB,EAAW3a,KAAM,KACnCyZ,EAAMuC,WAAavC,EAAMlgB,UACxB,IAAImB,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAC7D,KAGDyZ,EAAM3V,YAAS3L,EACTshB,EAAM3hB,SACX2hB,EAAM3hB,OAASnC,GAIhB6d,EAAe,MAARA,EACN,CAAEiG,GACFtkB,GAAOgE,UAAWqa,EAAM,CAAEiG,IAG3BxJ,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GACpC+9B,IAAgB3hB,EAAQkK,UAAmD,IAAxClK,EAAQkK,QAAQtnB,MAAO8C,EAAM6d,IAAtE,CAMA,IAAMoe,IAAiB3hB,EAAQ0M,WAAahpB,EAAUgC,GAAS,CAM9D,IAJAk8B,EAAa5hB,EAAQ8J,cAAgBlmB,EAC/B69B,GAAY/3B,KAAMk4B,EAAah+B,KACpCiY,EAAMA,EAAIhX,YAEHgX,EAAKA,EAAMA,EAAIhX,WACtBk9B,EAAUl/B,KAAMgZ,GAChBgJ,EAAMhJ,EAIFgJ,KAAUnf,EAAK+D,eAAiB5H,IACpCkgC,EAAUl/B,KAAMgiB,EAAIvT,aAAeuT,EAAImd,cAAgBhgC,IAKzDoC,EAAI,EACJ,OAAUyX,EAAMkmB,EAAW39B,QAAYolB,EAAMqC,uBAC5CiW,EAAcjmB,EACd2N,EAAM5lB,KAAW,EAAJQ,EACZw9B,EACA5hB,EAAQiL,UAAYrnB,GAGrBknB,GAAWpH,EAASzd,IAAK4V,EAAK,WAAcxZ,OAAOwoB,OAAQ,OAAUrB,EAAM5lB,OAC1E8f,EAASzd,IAAK4V,EAAK,YAEnBiP,EAAOloB,MAAOiZ,EAAK0H,IAIpBuH,EAAS+W,GAAUhmB,EAAKgmB,KACT/W,EAAOloB,OAASogB,EAAYnH,KAC1C2N,EAAM3V,OAASiX,EAAOloB,MAAOiZ,EAAK0H,IACZ,IAAjBiG,EAAM3V,QACV2V,EAAMS,kBA8CT,OA1CAT,EAAM5lB,KAAOA,EAGP+9B,GAAiBnY,EAAMuD,sBAEpB/M,EAAQ4H,WACqC,IAApD5H,EAAQ4H,SAAShlB,MAAOm/B,EAAUz3B,MAAOiZ,KACzCP,EAAYtd,IAIPm8B,GAAUv+B,EAAYoC,EAAM9B,MAAaF,EAAUgC,MAGvDmf,EAAMnf,EAAMm8B,MAGXn8B,EAAMm8B,GAAW,MAIlB38B,GAAOskB,MAAMuB,UAAYnnB,EAEpB4lB,EAAMqC,wBACViW,EAAYtwB,iBAAkB5N,EAAM89B,IAGrCh8B,EAAM9B,KAED4lB,EAAMqC,wBACViW,EAAYjgB,oBAAqBje,EAAM89B,IAGxCx8B,GAAOskB,MAAMuB,eAAY7iB,EAEpB2c,IACJnf,EAAMm8B,GAAWhd,IAMd2E,EAAM3V,SAKdwb,SAAU,SAAUzrB,EAAM8B,EAAM8jB,GAC/B,IAAI5a,EAAI1J,GAAOsC,OACd,IAAItC,GAAOmnB,MACX7C,EACA,CACC5lB,KAAMA,EACNypB,aAAa,IAIfnoB,GAAOskB,MAAMU,QAAStb,EAAG,KAAMlJ,MAKjCR,GAAOG,GAAGmC,OAAQ,CAEjB0iB,QAAS,SAAUtmB,EAAM2f,GACxB,OAAOthB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAMthB,SAGpCggC,eAAgB,SAAUr+B,EAAM2f,GAC/B,IAAI7d,EAAOzD,KAAM,GACjB,GAAKyD,EACJ,OAAOR,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAM7d,GAAM,MAMlD,IACCw8B,GAAW,QACXC,GAAQ,SACRC,GAAkB,wCAClBC,GAAe,qCAEhB,SAASC,GAAa1I,EAAQr2B,EAAKg/B,EAAapmB,GAC/C,IAAIxW,EAEJ,GAAKqC,MAAMC,QAAS1E,GAGnB2B,GAAOsB,KAAMjD,EAAK,SAAUa,EAAG2Y,GACzBwlB,GAAeL,GAASx4B,KAAMkwB,GAGlCzd,EAAKyd,EAAQ7c,GAKbulB,GACC1I,EAAS,KAAqB,iBAAN7c,GAAuB,MAALA,EAAY3Y,EAAI,IAAO,IACjE2Y,EACAwlB,EACApmB,UAKG,GAAMomB,GAAiC,WAAlBx9B,EAAQxB,GAUnC4Y,EAAKyd,EAAQr2B,QAPb,IAAMoC,KAAQpC,EACb++B,GAAa1I,EAAS,IAAMj0B,EAAO,IAAKpC,EAAKoC,GAAQ48B,EAAapmB,GAYrEjX,GAAOs9B,MAAQ,SAAU73B,EAAG43B,GAC3B,IAAI3I,EACH6I,EAAI,GACJtmB,EAAM,SAAU7L,EAAKoyB,GAGpB,IAAI14B,EAAQ1G,EAAYo/B,GACvBA,IACAA,EAEDD,EAAGA,EAAEj9B,QAAWm9B,mBAAoBryB,GAAQ,IAC3CqyB,mBAA6B,MAAT34B,EAAgB,GAAKA,IAG5C,GAAU,MAALW,EACJ,MAAO,GAIR,GAAK3C,MAAMC,QAAS0C,IAASA,EAAE7E,SAAWZ,GAAO6C,cAAe4C,GAG/DzF,GAAOsB,KAAMmE,EAAG,WACfwR,EAAKla,KAAK0D,KAAM1D,KAAK+H,cAOtB,IAAM4vB,KAAUjvB,EACf23B,GAAa1I,EAAQjvB,EAAGivB,GAAU2I,EAAapmB,GAKjD,OAAOsmB,EAAE1yB,KAAM,MAGhB7K,GAAOG,GAAGmC,OAAQ,CACjBo7B,UAAW,WACV,OAAO19B,GAAOs9B,MAAOvgC,KAAK4gC,mBAE3BA,eAAgB,WACf,OAAO5gC,KAAKyE,IAAK,WAGhB,IAAI8L,EAAWtN,GAAOse,KAAMvhB,KAAM,YAClC,OAAOuQ,EAAWtN,GAAOgE,UAAWsJ,GAAavQ,OAC9C6P,OAAQ,WACX,IAAIlO,EAAO3B,KAAK2B,KAGhB,OAAO3B,KAAK0D,OAAST,GAAQjD,MAAO2Y,GAAI,cACvCynB,GAAa34B,KAAMzH,KAAKwD,YAAe28B,GAAgB14B,KAAM9F,KAC3D3B,KAAKmU,UAAY0Q,GAAepd,KAAM9F,MACtC8C,IAAK,SAAU2D,EAAI3E,GACtB,IAAIrB,EAAMa,GAAQjD,MAAOoC,MAEzB,OAAY,MAAPA,EACG,KAGH2D,MAAMC,QAAS5D,GACZa,GAAOwB,IAAKrC,EAAK,SAAUA,GACjC,MAAO,CAAEsB,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAIhD,CAAEx8B,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAClDl8B,SAKN,IACC68B,GAAM,OACNC,GAAQ,OACRC,GAAa,gBACbC,GAAW,6BAIXC,GAAa,iBACbC,GAAY,QAWZhH,GAAa,GAObiH,GAAa,GAGbC,GAAW,KAAK1gC,OAAQ,KAGxB2gC,GAAezhC,EAAS0C,cAAe,KAKxC,SAASg/B,GAA6BC,GAGrC,OAAO,SAAUC,EAAoB5kB,GAED,iBAAvB4kB,IACX5kB,EAAO4kB,EACPA,EAAqB,KAGtB,IAAIC,EACHt/B,EAAI,EACJu/B,EAAYF,EAAmB79B,cAAcsJ,MAAO2N,IAAmB,GAExE,GAAKvZ,EAAYub,GAGhB,MAAU6kB,EAAWC,EAAWv/B,KAGR,MAAlBs/B,EAAU,IACdA,EAAWA,EAASnhC,MAAO,IAAO,KAChCihC,EAAWE,GAAaF,EAAWE,IAAc,IAAKjf,QAAS5F,KAI/D2kB,EAAWE,GAAaF,EAAWE,IAAc,IAAK7gC,KAAMgc,IAQnE,SAAS+kB,GAA+BJ,EAAW/7B,EAASi1B,EAAiBmH,GAE5E,IAAIC,EAAY,GACfC,EAAqBP,IAAcJ,GAEpC,SAASY,EAASN,GACjB,IAAIrtB,EAcJ,OAbAytB,EAAWJ,IAAa,EACxBx+B,GAAOsB,KAAMg9B,EAAWE,IAAc,GAAI,SAAU9lB,EAAGqmB,GACtD,IAAIC,EAAsBD,EAAoBx8B,EAASi1B,EAAiBmH,GACxE,MAAoC,iBAAxBK,GACVH,GAAqBD,EAAWI,GAKtBH,IACD1tB,EAAW6tB,QADf,GAHNz8B,EAAQk8B,UAAUlf,QAASyf,GAC3BF,EAASE,IACF,KAKF7tB,EAGR,OAAO2tB,EAASv8B,EAAQk8B,UAAW,MAAUG,EAAW,MAASE,EAAS,KAM3E,SAASG,GAAYt8B,EAAQhE,GAC5B,IAAIyM,EAAKxI,EACRs8B,EAAcl/B,GAAOm/B,aAAaD,aAAe,GAElD,IAAM9zB,KAAOzM,OACQqE,IAAfrE,EAAKyM,MACP8zB,EAAa9zB,GAAQzI,EAAWC,IAAUA,EAAO,KAAUwI,GAAQzM,EAAKyM,IAO5E,OAJKxI,GACJ5C,GAAOsC,QAAQ,EAAMK,EAAQC,GAGvBD,EA/ERy7B,GAAartB,KAAOP,GAASO,KAgP7B/Q,GAAOsC,OAAQ,CAGd88B,OAAQ,EAGRC,aAAc,GACdC,KAAM,GAENH,aAAc,CACbI,IAAK/uB,GAASO,KACdrS,KAAM,MACN8gC,QAxRgB,4DAwRQh7B,KAAMgM,GAASivB,UACvCljC,QAAQ,EACRmjC,aAAa,EACbC,OAAO,EACPC,YAAa,mDAcbC,QAAS,CACR9H,IAAKoG,GACL7+B,KAAM,aACNqsB,KAAM,YACNzc,IAAK,4BACL4wB,KAAM,qCAGPtpB,SAAU,CACTtH,IAAK,UACLyc,KAAM,SACNmU,KAAM,YAGPC,eAAgB,CACf7wB,IAAK,cACL5P,KAAM,eACNwgC,KAAM,gBAKPE,WAAY,CAGXC,SAAUj3B,OAGVk3B,aAAa,EAGbC,YAAathB,KAAKC,MAGlBshB,WAAYpgC,GAAOm8B,UAOpB+C,YAAa,CACZK,KAAK,EACLr/B,SAAS,IAOXmgC,UAAW,SAAU19B,EAAQ29B,GAC5B,OAAOA,EAGNrB,GAAYA,GAAYt8B,EAAQ3C,GAAOm/B,cAAgBmB,GAGvDrB,GAAYj/B,GAAOm/B,aAAcx8B,IAGnC49B,cAAelC,GAA6BpH,IAC5CuJ,cAAenC,GAA6BH,IAG5CuC,KAAM,SAAUlB,EAAKh9B,GAGA,iBAARg9B,IACXh9B,EAAUg9B,EACVA,OAAMv8B,GAIPT,EAAUA,GAAW,GAErB,IAAIm+B,EAGHC,EAGAC,EACAC,EAGAC,EAGAC,EAGArkB,EAGAskB,EAGA9hC,EAGA+hC,EAGA1D,EAAIv9B,GAAOqgC,UAAW,GAAI99B,GAG1B2+B,EAAkB3D,EAAEr9B,SAAWq9B,EAG/B4D,EAAqB5D,EAAEr9B,UACpBghC,EAAgB5iC,UAAY4iC,EAAgBtgC,QAC9CZ,GAAQkhC,GACRlhC,GAAOskB,MAGRvK,EAAW/Z,GAAO0Z,WAClB0nB,EAAmBphC,GAAOwY,UAAW,eAGrC6oB,EAAa9D,EAAE8D,YAAc,GAG7BC,EAAiB,GACjBC,EAAsB,GAGtBC,EAAW,WAGX7C,EAAQ,CACP7hB,WAAY,EAGZ2kB,kBAAmB,SAAUr2B,GAC5B,IAAIpB,EACJ,GAAK0S,EAAY,CAChB,IAAMmkB,EAAkB,CACvBA,EAAkB,GAClB,MAAU72B,EAAQ+zB,GAAS3zB,KAAMw2B,GAChCC,EAAiB72B,EAAO,GAAItJ,cAAgB,MACzCmgC,EAAiB72B,EAAO,GAAItJ,cAAgB,MAAS,IACrDjD,OAAQuM,EAAO,IAGpBA,EAAQ62B,EAAiBz1B,EAAI1K,cAAgB,KAE9C,OAAgB,MAATsJ,EAAgB,KAAOA,EAAMa,KAAM,OAI3C62B,sBAAuB,WACtB,OAAOhlB,EAAYkkB,EAAwB,MAI5Ce,iBAAkB,SAAUlhC,EAAMqE,GAMjC,OALkB,MAAb4X,IACJjc,EAAO8gC,EAAqB9gC,EAAKC,eAChC6gC,EAAqB9gC,EAAKC,gBAAmBD,EAC9C6gC,EAAgB7gC,GAASqE,GAEnB/H,MAIR6kC,iBAAkB,SAAUljC,GAI3B,OAHkB,MAAbge,IACJ6gB,EAAEsE,SAAWnjC,GAEP3B,MAIRskC,WAAY,SAAU7/B,GACrB,IAAIzC,EACJ,GAAKyC,EACJ,GAAKkb,EAGJiiB,EAAM7kB,OAAQtY,EAAKm9B,EAAMmD,cAIzB,IAAM/iC,KAAQyC,EACb6/B,EAAYtiC,GAAS,CAAEsiC,EAAYtiC,GAAQyC,EAAKzC,IAInD,OAAOhC,MAIRglC,MAAO,SAAUC,GAChB,IAAIC,EAAYD,GAAcR,EAK9B,OAJKd,GACJA,EAAUqB,MAAOE,GAElBp7B,EAAM,EAAGo7B,GACFllC,OAoBV,GAfAgd,EAAS1B,QAASsmB,GAKlBpB,EAAEgC,MAAUA,GAAOhC,EAAEgC,KAAO/uB,GAASO,MAAS,IAC5C3N,QAAS66B,GAAWztB,GAASivB,SAAW,MAG1ClC,EAAE7+B,KAAO6D,EAAQ6V,QAAU7V,EAAQ7D,MAAQ6+B,EAAEnlB,QAAUmlB,EAAE7+B,KAGzD6+B,EAAEkB,WAAclB,EAAEiB,UAAY,KAAM99B,cAAcsJ,MAAO2N,IAAmB,CAAE,IAGxD,MAAjB4lB,EAAE2E,YAAsB,CAC5BnB,EAAYpkC,EAAS0C,cAAe,KAKpC,IACC0hC,EAAUhwB,KAAOwsB,EAAEgC,IAInBwB,EAAUhwB,KAAOgwB,EAAUhwB,KAC3BwsB,EAAE2E,YAAc9D,GAAaqB,SAAW,KAAOrB,GAAa+D,MAC3DpB,EAAUtB,SAAW,KAAOsB,EAAUoB,KACtC,MAAQz4B,GAIT6zB,EAAE2E,aAAc,GAalB,GARK3E,EAAElf,MAAQkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,OACxCkf,EAAElf,KAAOre,GAAOs9B,MAAOC,EAAElf,KAAMkf,EAAEF,cAIlCqB,GAA+BzH,GAAYsG,EAAGh7B,EAASo8B,GAGlDjiB,EACJ,OAAOiiB,EA8ER,IAAMz/B,KAzEN8hC,EAAchhC,GAAOskB,OAASiZ,EAAEhhC,SAGQ,GAApByD,GAAOo/B,UAC1Bp/B,GAAOskB,MAAMU,QAAS,aAIvBuY,EAAE7+B,KAAO6+B,EAAE7+B,KAAKif,cAGhB4f,EAAE6E,YAAcpE,GAAWx5B,KAAM+4B,EAAE7+B,MAKnCiiC,EAAWpD,EAAEgC,IAAIn8B,QAASy6B,GAAO,IAG3BN,EAAE6E,WAwBI7E,EAAElf,MAAQkf,EAAEmC,aACoD,KAAzEnC,EAAEqC,aAAe,IAAKhiC,QAAS,uCACjC2/B,EAAElf,KAAOkf,EAAElf,KAAKjb,QAASw6B,GAAK,OAvB9BqD,EAAW1D,EAAEgC,IAAIliC,MAAOsjC,EAASrgC,QAG5Bi9B,EAAElf,OAAUkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,QAC1CsiB,IAAczE,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQpD,EAAElf,YAGjDkf,EAAElf,OAIO,IAAZkf,EAAEpyB,QACNw1B,EAAWA,EAASv9B,QAAS06B,GAAY,MACzCmD,GAAa/E,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQ,KAAS/hC,GAAMmG,OACnEk8B,GAIF1D,EAAEgC,IAAMoB,EAAWM,GASf1D,EAAE8E,aACDriC,GAAOq/B,aAAcsB,IACzBhC,EAAMgD,iBAAkB,oBAAqB3hC,GAAOq/B,aAAcsB,IAE9D3gC,GAAOs/B,KAAMqB,IACjBhC,EAAMgD,iBAAkB,gBAAiB3hC,GAAOs/B,KAAMqB,MAKnDpD,EAAElf,MAAQkf,EAAE6E,aAAgC,IAAlB7E,EAAEqC,aAAyBr9B,EAAQq9B,cACjEjB,EAAMgD,iBAAkB,eAAgBpE,EAAEqC,aAI3CjB,EAAMgD,iBACL,SACApE,EAAEkB,UAAW,IAAOlB,EAAEsC,QAAStC,EAAEkB,UAAW,IAC3ClB,EAAEsC,QAAStC,EAAEkB,UAAW,KACA,MAArBlB,EAAEkB,UAAW,GAAc,KAAON,GAAW,WAAa,IAC7DZ,EAAEsC,QAAS,MAIFtC,EAAE+E,QACZ3D,EAAMgD,iBAAkBziC,EAAGq+B,EAAE+E,QAASpjC,IAIvC,GAAKq+B,EAAEgF,cAC+C,IAAnDhF,EAAEgF,WAAW/kC,KAAM0jC,EAAiBvC,EAAOpB,IAAiB7gB,GAG9D,OAAOiiB,EAAMoD,QAed,GAXAP,EAAW,QAGXJ,EAAiBnqB,IAAKsmB,EAAE3F,UACxB+G,EAAM93B,KAAM02B,EAAEiF,SACd7D,EAAMrmB,KAAMilB,EAAEj6B,OAGdo9B,EAAYhC,GAA+BR,GAAYX,EAAGh7B,EAASo8B,GAK5D,CASN,GARAA,EAAM7hB,WAAa,EAGdkkB,GACJG,EAAmBnc,QAAS,WAAY,CAAE2Z,EAAOpB,IAI7C7gB,EACJ,OAAOiiB,EAIHpB,EAAEoC,OAAqB,EAAZpC,EAAEvD,UACjB8G,EAAehkC,GAAO2e,WAAY,WACjCkjB,EAAMoD,MAAO,YACXxE,EAAEvD,UAGN,IACCtd,GAAY,EACZgkB,EAAU+B,KAAMnB,EAAgBz6B,GAC/B,MAAQ6C,GAGT,GAAKgT,EACJ,MAAMhT,EAIP7C,GAAO,EAAG6C,SAhCX7C,GAAO,EAAG,gBAqCX,SAASA,EAAMi7B,EAAQY,EAAkBC,EAAWL,GACnD,IAAIM,EAAWJ,EAASl/B,EAAOu/B,EAAUC,EACxCd,EAAaU,EAGThmB,IAILA,GAAY,EAGPokB,GACJhkC,GAAOm9B,aAAc6G,GAKtBJ,OAAY19B,EAGZ49B,EAAwB0B,GAAW,GAGnC3D,EAAM7hB,WAAsB,EAATglB,EAAa,EAAI,EAGpCc,EAAsB,KAAVd,GAAiBA,EAAS,KAAkB,MAAXA,EAGxCa,IACJE,EA7lBJ,SAA8BtF,EAAGoB,EAAOgE,GAEvC,IAAII,EAAIrkC,EAAMskC,EAAeC,EAC5BzsB,EAAW+mB,EAAE/mB,SACbioB,EAAYlB,EAAEkB,UAGf,MAA2B,MAAnBA,EAAW,GAClBA,EAAUnzB,aACEtI,IAAP+/B,IACJA,EAAKxF,EAAEsE,UAAYlD,EAAM8C,kBAAmB,iBAK9C,GAAKsB,EACJ,IAAMrkC,KAAQ8X,EACb,GAAKA,EAAU9X,IAAU8X,EAAU9X,GAAO8F,KAAMu+B,GAAO,CACtDtE,EAAUlf,QAAS7gB,GACnB,MAMH,GAAK+/B,EAAW,KAAOkE,EACtBK,EAAgBvE,EAAW,OACrB,CAGN,IAAM//B,KAAQikC,EAAY,CACzB,IAAMlE,EAAW,IAAOlB,EAAEyC,WAAYthC,EAAO,IAAM+/B,EAAW,IAAQ,CACrEuE,EAAgBtkC,EAChB,MAEKukC,IACLA,EAAgBvkC,GAKlBskC,EAAgBA,GAAiBC,EAMlC,GAAKD,EAIJ,OAHKA,IAAkBvE,EAAW,IACjCA,EAAUlf,QAASyjB,GAEbL,EAAWK,GA0iBLE,CAAqB3F,EAAGoB,EAAOgE,KAIrCC,IACsC,EAA3C5iC,GAAOkE,QAAS,SAAUq5B,EAAEkB,YAC5Bz+B,GAAOkE,QAAS,OAAQq5B,EAAEkB,WAAc,IACxClB,EAAEyC,WAAY,eAAkB,cAIjC6C,EA9iBH,SAAsBtF,EAAGsF,EAAUlE,EAAOiE,GACzC,IAAIO,EAAOC,EAASC,EAAM1jB,EAAKlJ,EAC9BupB,EAAa,GAGbvB,EAAYlB,EAAEkB,UAAUphC,QAGzB,GAAKohC,EAAW,GACf,IAAM4E,KAAQ9F,EAAEyC,WACfA,EAAYqD,EAAK3iC,eAAkB68B,EAAEyC,WAAYqD,GAInDD,EAAU3E,EAAUnzB,QAGpB,MAAQ83B,EAcP,GAZK7F,EAAEwC,eAAgBqD,KACtBzE,EAAOpB,EAAEwC,eAAgBqD,IAAcP,IAIlCpsB,GAAQmsB,GAAarF,EAAE+F,aAC5BT,EAAWtF,EAAE+F,WAAYT,EAAUtF,EAAEiB,WAGtC/nB,EAAO2sB,EACPA,EAAU3E,EAAUnzB,QAKnB,GAAiB,MAAZ83B,EAEJA,EAAU3sB,OAGJ,GAAc,MAATA,GAAgBA,IAAS2sB,EAAU,CAM9C,KAHAC,EAAOrD,EAAYvpB,EAAO,IAAM2sB,IAAapD,EAAY,KAAOoD,IAI/D,IAAMD,KAASnD,EAId,IADArgB,EAAMwjB,EAAMj+B,MAAO,MACT,KAAQk+B,IAGjBC,EAAOrD,EAAYvpB,EAAO,IAAMkJ,EAAK,KACpCqgB,EAAY,KAAOrgB,EAAK,KACb,EAGG,IAAT0jB,EACJA,EAAOrD,EAAYmD,IAGgB,IAAxBnD,EAAYmD,KACvBC,EAAUzjB,EAAK,GACf8e,EAAUlf,QAASI,EAAK,KAEzB,MAOJ,IAAc,IAAT0jB,EAGJ,GAAKA,GAAQ9F,EAAEgG,UACdV,EAAWQ,EAAMR,QAEjB,IACCA,EAAWQ,EAAMR,GAChB,MAAQn5B,GACT,MAAO,CACNmQ,MAAO,cACPvW,MAAO+/B,EAAO35B,EAAI,sBAAwB+M,EAAO,OAAS2sB,IASjE,MAAO,CAAEvpB,MAAO,UAAWwE,KAAMwkB,GAidpBW,CAAajG,EAAGsF,EAAUlE,EAAOiE,GAGvCA,GAGCrF,EAAE8E,cACNS,EAAWnE,EAAM8C,kBAAmB,oBAEnCzhC,GAAOq/B,aAAcsB,GAAamC,IAEnCA,EAAWnE,EAAM8C,kBAAmB,WAEnCzhC,GAAOs/B,KAAMqB,GAAamC,IAKZ,MAAXhB,GAA6B,SAAXvE,EAAE7+B,KACxBsjC,EAAa,YAGS,MAAXF,EACXE,EAAa,eAIbA,EAAaa,EAAShpB,MACtB2oB,EAAUK,EAASxkB,KAEnBukB,IADAt/B,EAAQu/B,EAASv/B,UAMlBA,EAAQ0+B,GACHF,GAAWE,IACfA,EAAa,QACRF,EAAS,IACbA,EAAS,KAMZnD,EAAMmD,OAASA,EACfnD,EAAMqD,YAAeU,GAAoBV,GAAe,GAGnDY,EACJ7oB,EAASoB,YAAa+lB,EAAiB,CAAEsB,EAASR,EAAYrD,IAE9D5kB,EAASuB,WAAY4lB,EAAiB,CAAEvC,EAAOqD,EAAY1+B,IAI5Dq7B,EAAM0C,WAAYA,GAClBA,OAAar+B,EAERg+B,GACJG,EAAmBnc,QAAS4d,EAAY,cAAgB,YACvD,CAAEjE,EAAOpB,EAAGqF,EAAYJ,EAAUl/B,IAIpC89B,EAAiB3nB,SAAUynB,EAAiB,CAAEvC,EAAOqD,IAEhDhB,IACJG,EAAmBnc,QAAS,eAAgB,CAAE2Z,EAAOpB,MAG3Cv9B,GAAOo/B,QAChBp/B,GAAOskB,MAAMU,QAAS,cAKzB,OAAO2Z,GAGR8E,QAAS,SAAUlE,EAAKlhB,EAAM9c,GAC7B,OAAOvB,GAAOe,IAAKw+B,EAAKlhB,EAAM9c,EAAU,SAGzCmiC,UAAW,SAAUnE,EAAKh+B,GACzB,OAAOvB,GAAOe,IAAKw+B,OAAKv8B,EAAWzB,EAAU,aAI/CvB,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAIiT,GAC7CpY,GAAQoY,GAAW,SAAUmnB,EAAKlhB,EAAM9c,EAAU7C,GAUjD,OAPKN,EAAYigB,KAChB3f,EAAOA,GAAQ6C,EACfA,EAAW8c,EACXA,OAAOrb,GAIDhD,GAAOygC,KAAMzgC,GAAOsC,OAAQ,CAClCi9B,IAAKA,EACL7gC,KAAM0Z,EACNomB,SAAU9/B,EACV2f,KAAMA,EACNmkB,QAASjhC,GACPvB,GAAO6C,cAAe08B,IAASA,OAIpCv/B,GAAOugC,cAAe,SAAUhD,GAC/B,IAAIr+B,EACJ,IAAMA,KAAKq+B,EAAE+E,QACa,iBAApBpjC,EAAEwB,gBACN68B,EAAEqC,YAAcrC,EAAE+E,QAASpjC,IAAO,MAMrCc,GAAO4rB,SAAW,SAAU2T,EAAKh9B,EAAStD,GACzC,OAAOe,GAAOygC,KAAM,CACnBlB,IAAKA,EAGL7gC,KAAM,MACN8/B,SAAU,SACVrzB,OAAO,EACPw0B,OAAO,EACPpjC,QAAQ,EAKRyjC,WAAY,CACX2D,cAAe,cAEhBL,WAAY,SAAUT,GACrB7iC,GAAO4D,WAAYi/B,EAAUtgC,EAAStD,OAMzCe,GAAOG,GAAGmC,OAAQ,CACjBshC,QAAS,SAAUjY,GAClB,IAAIlI,EAyBJ,OAvBK1mB,KAAM,KACLqB,EAAYutB,KAChBA,EAAOA,EAAKnuB,KAAMT,KAAM,KAIzB0mB,EAAOzjB,GAAQ2rB,EAAM5uB,KAAM,GAAIwH,eAAgB5C,GAAI,GAAIe,OAAO,GAEzD3F,KAAM,GAAI4C,YACd8jB,EAAK8I,aAAcxvB,KAAM,IAG1B0mB,EAAKjiB,IAAK,WACT,IAAIhB,EAAOzD,KAEX,MAAQyD,EAAKqjC,kBACZrjC,EAAOA,EAAKqjC,kBAGb,OAAOrjC,IACJ6rB,OAAQtvB,OAGNA,MAGR+mC,UAAW,SAAUnY,GACpB,OAAKvtB,EAAYutB,GACT5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO+mC,UAAWnY,EAAKnuB,KAAMT,KAAMmC,MAItCnC,KAAKuE,KAAM,WACjB,IAAI2U,EAAOjW,GAAQjD,MAClByZ,EAAWP,EAAKO,WAEZA,EAASlW,OACbkW,EAASotB,QAASjY,GAGlB1V,EAAKoW,OAAQV,MAKhBlI,KAAM,SAAUkI,GACf,IAAIoY,EAAiB3lC,EAAYutB,GAEjC,OAAO5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO6mC,QAASG,EAAiBpY,EAAKnuB,KAAMT,KAAMmC,GAAMysB,MAIlEqY,OAAQ,SAAU/jC,GAIjB,OAHAlD,KAAKuS,OAAQrP,GAAW+P,IAAK,QAAS1O,KAAM,WAC3CtB,GAAQjD,MAAO2vB,YAAa3vB,KAAK0M,cAE3B1M,QAKTiD,GAAOqN,KAAK9F,QAAQ6uB,OAAS,SAAU51B,GACtC,OAAQR,GAAOqN,KAAK9F,QAAQ08B,QAASzjC,IAEtCR,GAAOqN,KAAK9F,QAAQ08B,QAAU,SAAUzjC,GACvC,SAAWA,EAAK0uB,aAAe1uB,EAAK6vB,cAAgB7vB,EAAK4xB,iBAAiB9xB,SAM3EN,GAAOm/B,aAAa+E,IAAM,WACzB,IACC,OAAO,IAAIpnC,GAAOqnC,eACjB,MAAQz6B,MAGX,IAAI06B,GAAmB,CAGrBC,EAAG,IAIHC,KAAM,KAEPC,GAAevkC,GAAOm/B,aAAa+E,MAEpC/lC,GAAQqmC,OAASD,IAAkB,oBAAqBA,GACxDpmC,GAAQsiC,KAAO8D,KAAiBA,GAEhCvkC,GAAOwgC,cAAe,SAAUj+B,GAC/B,IAAIhB,EAAUkjC,EAGd,GAAKtmC,GAAQqmC,MAAQD,KAAiBhiC,EAAQ2/B,YAC7C,MAAO,CACNO,KAAM,SAAUH,EAAS1K,GACxB,IAAI14B,EACHglC,EAAM3hC,EAAQ2hC,MAWf,GATAA,EAAIQ,KACHniC,EAAQ7D,KACR6D,EAAQg9B,IACRh9B,EAAQo9B,MACRp9B,EAAQoiC,SACRpiC,EAAQyP,UAIJzP,EAAQqiC,UACZ,IAAM1lC,KAAKqD,EAAQqiC,UAClBV,EAAKhlC,GAAMqD,EAAQqiC,UAAW1lC,GAmBhC,IAAMA,KAdDqD,EAAQs/B,UAAYqC,EAAItC,kBAC5BsC,EAAItC,iBAAkBr/B,EAAQs/B,UAQzBt/B,EAAQ2/B,aAAgBI,EAAS,sBACtCA,EAAS,oBAAuB,kBAItBA,EACV4B,EAAIvC,iBAAkBziC,EAAGojC,EAASpjC,IAInCqC,EAAW,SAAU7C,GACpB,OAAO,WACD6C,IACJA,EAAWkjC,EAAgBP,EAAIW,OAC9BX,EAAIY,QAAUZ,EAAIa,QAAUb,EAAIc,UAC/Bd,EAAIe,mBAAqB,KAEb,UAATvmC,EACJwlC,EAAInC,QACgB,UAATrjC,EAKgB,iBAAfwlC,EAAIpC,OACflK,EAAU,EAAG,SAEbA,EAGCsM,EAAIpC,OACJoC,EAAIlC,YAINpK,EACCwM,GAAkBF,EAAIpC,SAAYoC,EAAIpC,OACtCoC,EAAIlC,WAK+B,UAAjCkC,EAAIgB,cAAgB,SACM,iBAArBhB,EAAIiB,aACV,CAAEC,OAAQlB,EAAIrB,UACd,CAAEvjC,KAAM4kC,EAAIiB,cACbjB,EAAIxC,4BAQTwC,EAAIW,OAAStjC,IACbkjC,EAAgBP,EAAIY,QAAUZ,EAAIc,UAAYzjC,EAAU,cAKnCyB,IAAhBkhC,EAAIa,QACRb,EAAIa,QAAUN,EAEdP,EAAIe,mBAAqB,WAGA,IAAnBf,EAAIpnB,YAMRhgB,GAAO2e,WAAY,WACbla,GACJkjC,OAQLljC,EAAWA,EAAU,SAErB,IAGC2iC,EAAIzB,KAAMlgC,EAAQ6/B,YAAc7/B,EAAQ8b,MAAQ,MAC/C,MAAQ3U,GAGT,GAAKnI,EACJ,MAAMmI,IAKTq4B,MAAO,WACDxgC,GACJA,QAWLvB,GAAOugC,cAAe,SAAUhD,GAC1BA,EAAE2E,cACN3E,EAAE/mB,SAASpX,QAAS,KAKtBY,GAAOqgC,UAAW,CACjBR,QAAS,CACRzgC,OAAQ,6FAGToX,SAAU,CACTpX,OAAQ,2BAET4gC,WAAY,CACX2D,cAAe,SAAUrkC,GAExB,OADAU,GAAO4D,WAAYtE,GACZA,MAMVU,GAAOugC,cAAe,SAAU,SAAUhD,QACxBv6B,IAAZu6B,EAAEpyB,QACNoyB,EAAEpyB,OAAQ,GAENoyB,EAAE2E,cACN3E,EAAE7+B,KAAO,SAKXsB,GAAOwgC,cAAe,SAAU,SAAUjD,GAIxC,IAAIn+B,EAAQmC,EADb,GAAKg8B,EAAE2E,aAAe3E,EAAE8H,YAEvB,MAAO,CACN5C,KAAM,SAAU/pB,EAAGkf,GAClBx4B,EAASY,GAAQ,YACfwN,KAAM+vB,EAAE8H,aAAe,IACvB/mB,KAAM,CAAEgnB,QAAS/H,EAAEgI,cAAe5mC,IAAK4+B,EAAEgC,MACzCrb,GAAI,aAAc3iB,EAAW,SAAUikC,GACvCpmC,EAAOka,SACP/X,EAAW,KACNikC,GACJ5N,EAAuB,UAAb4N,EAAI9mC,KAAmB,IAAM,IAAK8mC,EAAI9mC,QAKnD/B,EAAS8C,KAAKC,YAAaN,EAAQ,KAEpC2iC,MAAO,WACDxgC,GACJA,QAUL,IAqGKigB,GArGDikB,GAAe,GAClBC,GAAS,oBAGV1lC,GAAOqgC,UAAW,CACjBsF,MAAO,WACPC,cAAe,WACd,IAAIrkC,EAAWkkC,GAAargC,OAAWpF,GAAOiD,QAAU,IAAQrE,GAAMmG,OAEtE,OADAhI,KAAMwE,IAAa,EACZA,KAKTvB,GAAOugC,cAAe,aAAc,SAAUhD,EAAGsI,EAAkBlH,GAElE,IAAImH,EAAcC,EAAaC,EAC9BC,GAAuB,IAAZ1I,EAAEoI,QAAqBD,GAAOlhC,KAAM+4B,EAAEgC,KAChD,MACkB,iBAAXhC,EAAElf,MAE6C,KADnDkf,EAAEqC,aAAe,IACjBhiC,QAAS,sCACX8nC,GAAOlhC,KAAM+4B,EAAElf,OAAU,QAI5B,GAAK4nB,GAAiC,UAArB1I,EAAEkB,UAAW,GA8D7B,OA3DAqH,EAAevI,EAAEqI,cAAgBxnC,EAAYm/B,EAAEqI,eAC9CrI,EAAEqI,gBACFrI,EAAEqI,cAGEK,EACJ1I,EAAG0I,GAAa1I,EAAG0I,GAAW7iC,QAASsiC,GAAQ,KAAOI,IAC/B,IAAZvI,EAAEoI,QACbpI,EAAEgC,MAASrD,GAAO13B,KAAM+4B,EAAEgC,KAAQ,IAAM,KAAQhC,EAAEoI,MAAQ,IAAMG,GAIjEvI,EAAEyC,WAAY,eAAkB,WAI/B,OAHMgG,GACLhmC,GAAOsD,MAAOwiC,EAAe,mBAEvBE,EAAmB,IAI3BzI,EAAEkB,UAAW,GAAM,OAGnBsH,EAAcjpC,GAAQgpC,GACtBhpC,GAAQgpC,GAAiB,WACxBE,EAAoBvkC,WAIrBk9B,EAAM7kB,OAAQ,gBAGQ9W,IAAhB+iC,EACJ/lC,GAAQlD,IAASm+B,WAAY6K,GAI7BhpC,GAAQgpC,GAAiBC,EAIrBxI,EAAGuI,KAGPvI,EAAEqI,cAAgBC,EAAiBD,cAGnCH,GAAa9nC,KAAMmoC,IAIfE,GAAqB5nC,EAAY2nC,IACrCA,EAAaC,EAAmB,IAGjCA,EAAoBD,OAAc/iC,IAI5B,WAYT7E,GAAQ+nC,qBACH1kB,GAAO7kB,EAASwpC,eAAeD,mBAAoB,IAAK1kB,MACvDtU,UAAY,6BACiB,IAA3BsU,GAAK/X,WAAWnJ,QAQxBN,GAAOmW,UAAY,SAAUkI,EAAMne,EAASkmC,GAC3C,MAAqB,iBAAT/nB,EACJ,IAEgB,kBAAZne,IACXkmC,EAAclmC,EACdA,GAAU,GAKLA,IAIA/B,GAAQ+nC,qBAMZxzB,GALAxS,EAAUvD,EAASwpC,eAAeD,mBAAoB,KAKvC7mC,cAAe,SACzB0R,KAAOpU,EAAS6T,SAASO,KAC9B7Q,EAAQT,KAAKC,YAAagT,IAE1BxS,EAAUvD,GAKZ2mB,GAAW8iB,GAAe,IAD1BC,EAASvwB,EAAW1L,KAAMiU,IAKlB,CAAEne,EAAQb,cAAegnC,EAAQ,MAGzCA,EAAShjB,GAAe,CAAEhF,GAAQne,EAASojB,GAEtCA,GAAWA,EAAQhjB,QACvBN,GAAQsjB,GAAUhK,SAGZtZ,GAAOoB,MAAO,GAAIilC,EAAO58B,cAlChC,IAAIiJ,EAAM2zB,EAAQ/iB,GAyCnBtjB,GAAOG,GAAGonB,KAAO,SAAUgY,EAAK+G,EAAQ/kC,GACvC,IAAItB,EAAUvB,EAAMmkC,EACnB5sB,EAAOlZ,KACPwnB,EAAMgb,EAAI3hC,QAAS,KAsDpB,OApDY,EAAP2mB,IACJtkB,EAAW66B,GAAkByE,EAAIliC,MAAOknB,IACxCgb,EAAMA,EAAIliC,MAAO,EAAGknB,IAIhBnmB,EAAYkoC,IAGhB/kC,EAAW+kC,EACXA,OAAStjC,GAGEsjC,GAA4B,iBAAXA,IAC5B5nC,EAAO,QAIW,EAAduX,EAAK3V,QACTN,GAAOygC,KAAM,CACZlB,IAAKA,EAKL7gC,KAAMA,GAAQ,MACd8/B,SAAU,OACVngB,KAAMioB,IACHz/B,KAAM,SAAUs+B,GAGnBtC,EAAWphC,UAEXwU,EAAK0V,KAAM1rB,EAIVD,GAAQ,SAAUqsB,OAAQrsB,GAAOmW,UAAWgvB,IAAiBv7B,KAAM3J,GAGnEklC,KAKErrB,OAAQvY,GAAY,SAAUo9B,EAAOmD,GACxC7rB,EAAK3U,KAAM,WACVC,EAAS7D,MAAOX,KAAM8lC,GAAY,CAAElE,EAAMwG,aAAcrD,EAAQnD,QAK5D5hC,MAMRiD,GAAOqN,KAAK9F,QAAQg/B,SAAW,SAAU/lC,GACxC,OAAOR,GAAO8B,KAAM9B,GAAOo5B,OAAQ,SAAUj5B,GAC5C,OAAOK,IAASL,EAAGK,OAChBF,QAMLN,GAAOwmC,OAAS,CACfC,UAAW,SAAUjmC,EAAM+B,EAASrD,GACnC,IAAIwnC,EAAaC,EAASC,EAAWC,EAAQC,EAAWC,EACvD/X,EAAWhvB,GAAOwgB,IAAKhgB,EAAM,YAC7BwmC,EAAUhnC,GAAQQ,GAClBonB,EAAQ,GAGS,WAAboH,IACJxuB,EAAK8f,MAAM0O,SAAW,YAGvB8X,EAAYE,EAAQR,SACpBI,EAAY5mC,GAAOwgB,IAAKhgB,EAAM,OAC9BumC,EAAa/mC,GAAOwgB,IAAKhgB,EAAM,SACI,aAAbwuB,GAAwC,UAAbA,KACA,GAA9C4X,EAAYG,GAAanpC,QAAS,SAMpCipC,GADAH,EAAcM,EAAQhY,YACD3iB,IACrBs6B,EAAUD,EAAYpS,OAGtBuS,EAASxX,WAAYuX,IAAe,EACpCD,EAAUtX,WAAY0X,IAAgB,GAGlC3oC,EAAYmE,KAGhBA,EAAUA,EAAQ/E,KAAMgD,EAAMtB,EAAGc,GAAOsC,OAAQ,GAAIwkC,KAGjC,MAAfvkC,EAAQ8J,MACZub,EAAMvb,IAAQ9J,EAAQ8J,IAAMy6B,EAAUz6B,IAAQw6B,GAE1B,MAAhBtkC,EAAQ+xB,OACZ1M,EAAM0M,KAAS/xB,EAAQ+xB,KAAOwS,EAAUxS,KAASqS,GAG7C,UAAWpkC,EACfA,EAAQ0kC,MAAMzpC,KAAMgD,EAAMonB,GAG1Bof,EAAQxmB,IAAKoH,KAKhB5nB,GAAOG,GAAGmC,OAAQ,CAGjBkkC,OAAQ,SAAUjkC,GAGjB,GAAKd,UAAUnB,OACd,YAAmB0C,IAAZT,EACNxF,KACAA,KAAKuE,KAAM,SAAUpC,GACpBc,GAAOwmC,OAAOC,UAAW1pC,KAAMwF,EAASrD,KAI3C,IAAIgoC,EAAMC,EACT3mC,EAAOzD,KAAM,GAEd,OAAMyD,EAQAA,EAAK4xB,iBAAiB9xB,QAK5B4mC,EAAO1mC,EAAK4zB,wBACZ+S,EAAM3mC,EAAK+D,cAAc6H,YAClB,CACNC,IAAK66B,EAAK76B,IAAM86B,EAAIC,YACpB9S,KAAM4S,EAAK5S,KAAO6S,EAAIE,cARf,CAAEh7B,IAAK,EAAGioB,KAAM,QATxB,GAuBDtF,SAAU,WACT,GAAMjyB,KAAM,GAAZ,CAIA,IAAIuqC,EAAcd,EAAQvnC,EACzBuB,EAAOzD,KAAM,GACbwqC,EAAe,CAAEl7B,IAAK,EAAGioB,KAAM,GAGhC,GAAwC,UAAnCt0B,GAAOwgB,IAAKhgB,EAAM,YAGtBgmC,EAAShmC,EAAK4zB,4BAER,CACNoS,EAASzpC,KAAKypC,SAIdvnC,EAAMuB,EAAK+D,cACX+iC,EAAe9mC,EAAK8mC,cAAgBroC,EAAI6E,gBACxC,MAAQwjC,IACLA,IAAiBroC,EAAIuiB,MAAQ8lB,IAAiBroC,EAAI6E,kBACT,WAA3C9D,GAAOwgB,IAAK8mB,EAAc,YAE1BA,EAAeA,EAAa3nC,WAExB2nC,GAAgBA,IAAiB9mC,GAAkC,IAA1B8mC,EAAahpC,YAG1DipC,EAAevnC,GAAQsnC,GAAed,UACzBn6B,KAAOrM,GAAOwgB,IAAK8mB,EAAc,kBAAkB,GAChEC,EAAajT,MAAQt0B,GAAOwgB,IAAK8mB,EAAc,mBAAmB,IAKpE,MAAO,CACNj7B,IAAKm6B,EAAOn6B,IAAMk7B,EAAal7B,IAAMrM,GAAOwgB,IAAKhgB,EAAM,aAAa,GACpE8zB,KAAMkS,EAAOlS,KAAOiT,EAAajT,KAAOt0B,GAAOwgB,IAAKhgB,EAAM,cAAc,MAc1E8mC,aAAc,WACb,OAAOvqC,KAAKyE,IAAK,WAChB,IAAI8lC,EAAevqC,KAAKuqC,aAExB,MAAQA,GAA2D,WAA3CtnC,GAAOwgB,IAAK8mB,EAAc,YACjDA,EAAeA,EAAaA,aAG7B,OAAOA,GAAgBxjC,OAM1B9D,GAAOsB,KAAM,CAAEk0B,WAAY,cAAeD,UAAW,eAAiB,SAAUnd,EAAQkG,GACvF,IAAIjS,EAAM,gBAAkBiS,EAE5Bte,GAAOG,GAAIiY,GAAW,SAAUjZ,GAC/B,OAAO6d,EAAQjgB,KAAM,SAAUyD,EAAM4X,EAAQjZ,GAG5C,IAAIgoC,EAOJ,GANK3oC,EAAUgC,GACd2mC,EAAM3mC,EACuB,IAAlBA,EAAKlC,WAChB6oC,EAAM3mC,EAAK4L,kBAGCpJ,IAAR7D,EACJ,OAAOgoC,EAAMA,EAAK7oB,GAAS9d,EAAM4X,GAG7B+uB,EACJA,EAAIK,SACFn7B,EAAY86B,EAAIE,YAAVloC,EACPkN,EAAMlN,EAAMgoC,EAAIC,aAIjB5mC,EAAM4X,GAAWjZ,GAEhBiZ,EAAQjZ,EAAKsC,UAAUnB,WAU5BN,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAImZ,GAC7Cte,GAAOuyB,SAAUjU,GAAS4P,GAAc/vB,GAAQuxB,cAC/C,SAAUlvB,EAAMmtB,GACf,GAAKA,EAIJ,OAHAA,EAAWD,GAAQltB,EAAM8d,GAGlB4O,GAAU1oB,KAAMmpB,GACtB3tB,GAAQQ,GAAOwuB,WAAY1Q,GAAS,KACpCqP,MAQL3tB,GAAOsB,KAAM,CAAEmmC,OAAQ,SAAUC,MAAO,SAAW,SAAUjnC,EAAM/B,GAClEsB,GAAOsB,KAAM,CACZkzB,QAAS,QAAU/zB,EACnBgX,QAAS/Y,EACTipC,GAAI,QAAUlnC,GACZ,SAAUmnC,EAAcC,GAG1B7nC,GAAOG,GAAI0nC,GAAa,SAAUtT,EAAQzvB,GACzC,IAAImY,EAAYxb,UAAUnB,SAAYsnC,GAAkC,kBAAXrT,GAC5D1C,EAAQ+V,KAA6B,IAAXrT,IAA6B,IAAVzvB,EAAiB,SAAW,UAE1E,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAM9B,EAAMoG,GAC1C,IAAI7F,EAEJ,OAAKT,EAAUgC,GAGyB,IAAhCqnC,EAASjqC,QAAS,SACxB4C,EAAM,QAAUC,GAChBD,EAAK7D,SAASmH,gBAAiB,SAAWrD,GAIrB,IAAlBD,EAAKlC,UACTW,EAAMuB,EAAKsD,gBAIJZ,KAAKouB,IACX9wB,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CD,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CxB,EAAK,SAAWwB,UAIDuC,IAAV8B,EAGN9E,GAAOwgB,IAAKhgB,EAAM9B,EAAMmzB,GAGxB7xB,GAAOsgB,MAAO9f,EAAM9B,EAAMoG,EAAO+sB,IAChCnzB,EAAMue,EAAYsX,OAASvxB,EAAWia,QAM5Cjd,GAAOsB,KAAM,CACZ,YACA,WACA,eACA,YACA,cACA,YACE,SAAU6D,EAAIzG,GAChBsB,GAAOG,GAAIzB,GAAS,SAAUyB,GAC7B,OAAOpD,KAAKmnB,GAAIxlB,EAAMyB,MAOxBH,GAAOG,GAAGmC,OAAQ,CAEjBq1B,KAAM,SAAUxT,EAAO9F,EAAMle,GAC5B,OAAOpD,KAAKmnB,GAAIC,EAAO,KAAM9F,EAAMle,IAEpC2nC,OAAQ,SAAU3jB,EAAOhkB,GACxB,OAAOpD,KAAKwnB,IAAKJ,EAAO,KAAMhkB,IAG/B4nC,SAAU,SAAU9nC,EAAUkkB,EAAO9F,EAAMle,GAC1C,OAAOpD,KAAKmnB,GAAIC,EAAOlkB,EAAUoe,EAAMle,IAExC6nC,WAAY,SAAU/nC,EAAUkkB,EAAOhkB,GAGtC,OAA4B,IAArBsB,UAAUnB,OAChBvD,KAAKwnB,IAAKtkB,EAAU,MACpBlD,KAAKwnB,IAAKJ,EAAOlkB,GAAY,KAAME,IAGrC8nC,MAAO,SAAUC,EAAQC,GACxB,OAAOprC,KACLmnB,GAAI,aAAcgkB,GAClBhkB,GAAI,aAAcikB,GAASD,MAI/BloC,GAAOsB,KACN,wLAE4D4D,MAAO,KACnE,SAAUC,EAAI1E,GAGbT,GAAOG,GAAIM,GAAS,SAAU4d,EAAMle,GACnC,OAA0B,EAAnBsB,UAAUnB,OAChBvD,KAAKmnB,GAAIzjB,EAAM,KAAM4d,EAAMle,GAC3BpD,KAAKioB,QAASvkB,MAYlB,IAAI2nC,GAAQ,sDAMZpoC,GAAOqoC,MAAQ,SAAUloC,EAAID,GAC5B,IAAIyf,EAAK/P,EAAMy4B,EAUf,GARwB,iBAAZnoC,IACXyf,EAAMxf,EAAID,GACVA,EAAUC,EACVA,EAAKwf,GAKAvhB,EAAY+B,GAalB,OARAyP,EAAOvS,GAAMG,KAAMiE,UAAW,IAC9B4mC,EAAQ,WACP,OAAOloC,EAAGzC,MAAOwC,GAAWnD,KAAM6S,EAAKnS,OAAQJ,GAAMG,KAAMiE,eAItDsD,KAAO5E,EAAG4E,KAAO5E,EAAG4E,MAAQ/E,GAAO+E,OAElCsjC,GAGRroC,GAAOsoC,UAAY,SAAUC,GACvBA,EACJvoC,GAAO4c,YAEP5c,GAAOoW,OAAO,IAGhBpW,GAAO+C,QAAUD,MAAMC,QACvB/C,GAAOwoC,UAAY3pB,KAAKC,MACxB9e,GAAOO,SAAWA,GAClBP,GAAO5B,WAAaA,EACpB4B,GAAOxB,SAAWA,EAClBwB,GAAO4d,UAAYA,EACnB5d,GAAOtB,KAAOmB,EAEdG,GAAOkoB,IAAMD,KAAKC,IAElBloB,GAAOyoC,UAAY,SAAUpqC,GAK5B,IAAIK,EAAOsB,GAAOtB,KAAML,GACxB,OAAkB,WAATK,GAA8B,WAATA,KAK5BgqC,MAAOrqC,EAAMgxB,WAAYhxB,KAG5B2B,GAAO2oC,KAAO,SAAUrpC,GACvB,OAAe,MAARA,EACN,IACEA,EAAO,IAAK8D,QAASglC,GAAO,OAkBT,mBAAXQ,QAAyBA,OAAOC,KAC3CD,OAAQ,SAAU,GAAI,WACrB,OAAO5oC,KAOT,IAGC8oC,GAAUhsC,GAAOkD,OAGjB+oC,GAAKjsC,GAAOksC,EAwBb,OAtBAhpC,GAAOipC,WAAa,SAAUrmC,GAS7B,OARK9F,GAAOksC,IAAMhpC,KACjBlD,GAAOksC,EAAID,IAGPnmC,GAAQ9F,GAAOkD,SAAWA,KAC9BlD,GAAOkD,OAAS8oC,IAGV9oC,IAMiB,oBAAbhD,IACXF,GAAOkD,OAASlD,GAAOksC,EAAIhpC,IAMrBA","file":"jquery-3.7.1.min.js"} \ No newline at end of file | |||
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js b/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js new file mode 100644 index 0000000000..b6d9aa8c79 --- /dev/null +++ b/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js | |||
@@ -0,0 +1,4 @@ | |||
1 | /*! DataTables 1.13.8 | ||
2 | * ©2008-2023 SpryMedia Ltd - datatables.net/license | ||
3 | */ | ||
4 | !function(n){"use strict";var a;"function"==typeof define&&define.amd?define(["jquery"],function(t){return n(t,window,document)}):"object"==typeof exports?(a=require("jquery"),"undefined"==typeof window?module.exports=function(t,e){return t=t||window,e=e||a(t),n(e,t,t.document)}:module.exports=n(a,window,window.document)):window.DataTable=n(jQuery,window,document)}(function(P,j,v,H){"use strict";function d(t){var e=parseInt(t,10);return!isNaN(e)&&isFinite(t)?e:null}function l(t,e,n){var a=typeof t,r="string"==a;return"number"==a||"bigint"==a||!!h(t)||(e&&r&&(t=$(t,e)),n&&r&&(t=t.replace(q,"")),!isNaN(parseFloat(t))&&isFinite(t))}function a(t,e,n){var a;return!!h(t)||(h(a=t)||"string"==typeof a)&&!!l(t.replace(V,"").replace(/<script/i,""),e,n)||null}function m(t,e,n,a){var r=[],o=0,i=e.length;if(a!==H)for(;o<i;o++)t[e[o]][n]&&r.push(t[e[o]][n][a]);else for(;o<i;o++)r.push(t[e[o]][n]);return r}function f(t,e){var n,a=[];e===H?(e=0,n=t):(n=e,e=t);for(var r=e;r<n;r++)a.push(r);return a}function _(t){for(var e=[],n=0,a=t.length;n<a;n++)t[n]&&e.push(t[n]);return e}function s(t,e){return-1!==this.indexOf(t,e=e===H?0:e)}var p,e,t,w=function(t,v){if(w.factory(t,v))return w;if(this instanceof w)return P(t).DataTable(v);v=t,this.$=function(t,e){return this.api(!0).$(t,e)},this._=function(t,e){return this.api(!0).rows(t,e).data()},this.api=function(t){return new B(t?ge(this[p.iApiIndex]):this)},this.fnAddData=function(t,e){var n=this.api(!0),t=(Array.isArray(t)&&(Array.isArray(t[0])||P.isPlainObject(t[0]))?n.rows:n.row).add(t);return e!==H&&!e||n.draw(),t.flatten().toArray()},this.fnAdjustColumnSizing=function(t){var e=this.api(!0).columns.adjust(),n=e.settings()[0],a=n.oScroll;t===H||t?e.draw(!1):""===a.sX&&""===a.sY||Qt(n)},this.fnClearTable=function(t){var e=this.api(!0).clear();t!==H&&!t||e.draw()},this.fnClose=function(t){this.api(!0).row(t).child.hide()},this.fnDeleteRow=function(t,e,n){var a=this.api(!0),t=a.rows(t),r=t.settings()[0],o=r.aoData[t[0][0]];return t.remove(),e&&e.call(this,r,o),n!==H&&!n||a.draw(),o},this.fnDestroy=function(t){this.api(!0).destroy(t)},this.fnDraw=function(t){this.api(!0).draw(t)},this.fnFilter=function(t,e,n,a,r,o){var i=this.api(!0);(null===e||e===H?i:i.column(e)).search(t,n,a,o),i.draw()},this.fnGetData=function(t,e){var n,a=this.api(!0);return t!==H?(n=t.nodeName?t.nodeName.toLowerCase():"",e!==H||"td"==n||"th"==n?a.cell(t,e).data():a.row(t).data()||null):a.data().toArray()},this.fnGetNodes=function(t){var e=this.api(!0);return t!==H?e.row(t).node():e.rows().nodes().flatten().toArray()},this.fnGetPosition=function(t){var e=this.api(!0),n=t.nodeName.toUpperCase();return"TR"==n?e.row(t).index():"TD"==n||"TH"==n?[(n=e.cell(t).index()).row,n.columnVisible,n.column]:null},this.fnIsOpen=function(t){return this.api(!0).row(t).child.isShown()},this.fnOpen=function(t,e,n){return this.api(!0).row(t).child(e,n).show().child()[0]},this.fnPageChange=function(t,e){t=this.api(!0).page(t);e!==H&&!e||t.draw(!1)},this.fnSetColumnVis=function(t,e,n){t=this.api(!0).column(t).visible(e);n!==H&&!n||t.columns.adjust().draw()},this.fnSettings=function(){return ge(this[p.iApiIndex])},this.fnSort=function(t){this.api(!0).order(t).draw()},this.fnSortListener=function(t,e,n){this.api(!0).order.listener(t,e,n)},this.fnUpdate=function(t,e,n,a,r){var o=this.api(!0);return(n===H||null===n?o.row(e):o.cell(e,n)).data(t),r!==H&&!r||o.columns.adjust(),a!==H&&!a||o.draw(),0},this.fnVersionCheck=p.fnVersionCheck;var e,y=this,D=v===H,_=this.length;for(e in D&&(v={}),this.oApi=this.internal=p.internal,w.ext.internal)e&&(this[e]=$e(e));return this.each(function(){var r=1<_?be({},v,!0):v,o=0,t=this.getAttribute("id"),i=!1,e=w.defaults,l=P(this);if("table"!=this.nodeName.toLowerCase())W(null,0,"Non-table node initialisation ("+this.nodeName+")",2);else{K(e),Q(e.column),C(e,e,!0),C(e.column,e.column,!0),C(e,P.extend(r,l.data()),!0);for(var n=w.settings,o=0,s=n.length;o<s;o++){var a=n[o];if(a.nTable==this||a.nTHead&&a.nTHead.parentNode==this||a.nTFoot&&a.nTFoot.parentNode==this){var u=(r.bRetrieve!==H?r:e).bRetrieve,c=(r.bDestroy!==H?r:e).bDestroy;if(D||u)return a.oInstance;if(c){a.oInstance.fnDestroy();break}return void W(a,0,"Cannot reinitialise DataTable",3)}if(a.sTableId==this.id){n.splice(o,1);break}}null!==t&&""!==t||(t="DataTables_Table_"+w.ext._unique++,this.id=t);var f,d,h=P.extend(!0,{},w.models.oSettings,{sDestroyWidth:l[0].style.width,sInstance:t,sTableId:t}),p=(h.nTable=this,h.oApi=y.internal,h.oInit=r,n.push(h),h.oInstance=1===y.length?y:l.dataTable(),K(r),Z(r.oLanguage),r.aLengthMenu&&!r.iDisplayLength&&(r.iDisplayLength=(Array.isArray(r.aLengthMenu[0])?r.aLengthMenu[0]:r.aLengthMenu)[0]),r=be(P.extend(!0,{},e),r),F(h.oFeatures,r,["bPaginate","bLengthChange","bFilter","bSort","bSortMulti","bInfo","bProcessing","bAutoWidth","bSortClasses","bServerSide","bDeferRender"]),F(h,r,["asStripeClasses","ajax","fnServerData","fnFormatNumber","sServerMethod","aaSorting","aaSortingFixed","aLengthMenu","sPaginationType","sAjaxSource","sAjaxDataProp","iStateDuration","sDom","bSortCellsTop","iTabIndex","fnStateLoadCallback","fnStateSaveCallback","renderer","searchDelay","rowId",["iCookieDuration","iStateDuration"],["oSearch","oPreviousSearch"],["aoSearchCols","aoPreSearchCols"],["iDisplayLength","_iDisplayLength"]]),F(h.oScroll,r,[["sScrollX","sX"],["sScrollXInner","sXInner"],["sScrollY","sY"],["bScrollCollapse","bCollapse"]]),F(h.oLanguage,r,"fnInfoCallback"),L(h,"aoDrawCallback",r.fnDrawCallback,"user"),L(h,"aoServerParams",r.fnServerParams,"user"),L(h,"aoStateSaveParams",r.fnStateSaveParams,"user"),L(h,"aoStateLoadParams",r.fnStateLoadParams,"user"),L(h,"aoStateLoaded",r.fnStateLoaded,"user"),L(h,"aoRowCallback",r.fnRowCallback,"user"),L(h,"aoRowCreatedCallback",r.fnCreatedRow,"user"),L(h,"aoHeaderCallback",r.fnHeaderCallback,"user"),L(h,"aoFooterCallback",r.fnFooterCallback,"user"),L(h,"aoInitComplete",r.fnInitComplete,"user"),L(h,"aoPreDrawCallback",r.fnPreDrawCallback,"user"),h.rowIdFn=A(r.rowId),tt(h),h.oClasses),g=(P.extend(p,w.ext.classes,r.oClasses),l.addClass(p.sTable),h.iInitDisplayStart===H&&(h.iInitDisplayStart=r.iDisplayStart,h._iDisplayStart=r.iDisplayStart),null!==r.iDeferLoading&&(h.bDeferLoading=!0,t=Array.isArray(r.iDeferLoading),h._iRecordsDisplay=t?r.iDeferLoading[0]:r.iDeferLoading,h._iRecordsTotal=t?r.iDeferLoading[1]:r.iDeferLoading),h.oLanguage),t=(P.extend(!0,g,r.oLanguage),g.sUrl?(P.ajax({dataType:"json",url:g.sUrl,success:function(t){C(e.oLanguage,t),Z(t),P.extend(!0,g,t,h.oInit.oLanguage),R(h,null,"i18n",[h]),Jt(h)},error:function(){Jt(h)}}),i=!0):R(h,null,"i18n",[h]),null===r.asStripeClasses&&(h.asStripeClasses=[p.sStripeOdd,p.sStripeEven]),h.asStripeClasses),b=l.children("tbody").find("tr").eq(0),m=(-1!==P.inArray(!0,P.map(t,function(t,e){return b.hasClass(t)}))&&(P("tbody tr",this).removeClass(t.join(" ")),h.asDestroyStripes=t.slice()),[]),t=this.getElementsByTagName("thead");if(0!==t.length&&(wt(h.aoHeader,t[0]),m=Ct(h)),null===r.aoColumns)for(f=[],o=0,s=m.length;o<s;o++)f.push(null);else f=r.aoColumns;for(o=0,s=f.length;o<s;o++)nt(h,m?m[o]:null);st(h,r.aoColumnDefs,f,function(t,e){at(h,t,e)}),b.length&&(d=function(t,e){return null!==t.getAttribute("data-"+e)?e:null},P(b[0]).children("th, td").each(function(t,e){var n,a=h.aoColumns[t];a||W(h,0,"Incorrect column count",18),a.mData===t&&(n=d(e,"sort")||d(e,"order"),e=d(e,"filter")||d(e,"search"),null===n&&null===e||(a.mData={_:t+".display",sort:null!==n?t+".@data-"+n:H,type:null!==n?t+".@data-"+n:H,filter:null!==e?t+".@data-"+e:H},a._isArrayHost=!0,at(h,t)))}));var S=h.oFeatures,t=function(){if(r.aaSorting===H){var t=h.aaSorting;for(o=0,s=t.length;o<s;o++)t[o][1]=h.aoColumns[o].asSorting[0]}ce(h),S.bSort&&L(h,"aoDrawCallback",function(){var t,n;h.bSorted&&(t=I(h),n={},P.each(t,function(t,e){n[e.src]=e.dir}),R(h,null,"order",[h,t,n]),le(h))}),L(h,"aoDrawCallback",function(){(h.bSorted||"ssp"===E(h)||S.bDeferRender)&&ce(h)},"sc");var e=l.children("caption").each(function(){this._captionSide=P(this).css("caption-side")}),n=l.children("thead"),a=(0===n.length&&(n=P("<thead/>").appendTo(l)),h.nTHead=n[0],l.children("tbody")),n=(0===a.length&&(a=P("<tbody/>").insertAfter(n)),h.nTBody=a[0],l.children("tfoot"));if(0===(n=0===n.length&&0<e.length&&(""!==h.oScroll.sX||""!==h.oScroll.sY)?P("<tfoot/>").appendTo(l):n).length||0===n.children().length?l.addClass(p.sNoFooter):0<n.length&&(h.nTFoot=n[0],wt(h.aoFooter,h.nTFoot)),r.aaData)for(o=0;o<r.aaData.length;o++)x(h,r.aaData[o]);else!h.bDeferLoading&&"dom"!=E(h)||ut(h,P(h.nTBody).children("tr"));h.aiDisplay=h.aiDisplayMaster.slice(),!(h.bInitialised=!0)===i&&Jt(h)};L(h,"aoDrawCallback",de,"state_save"),r.bStateSave?(S.bStateSave=!0,he(h,0,t)):t()}}),y=null,this},c={},U=/[\r\n\u2028]/g,V=/<.*?>/g,X=/^\d{2,4}[\.\/\-]\d{1,2}[\.\/\-]\d{1,2}([T ]{1}\d{1,2}[:\.]\d{2}([\.:]\d{2})?)?$/,J=new RegExp("(\\"+["/",".","*","+","?","|","(",")","[","]","{","}","\\","$","^","-"].join("|\\")+")","g"),q=/['\u00A0,$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfkɃΞ]/gi,h=function(t){return!t||!0===t||"-"===t},$=function(t,e){return c[e]||(c[e]=new RegExp(Ot(e),"g")),"string"==typeof t&&"."!==e?t.replace(/\./g,"").replace(c[e],"."):t},N=function(t,e,n){var a=[],r=0,o=t.length;if(n!==H)for(;r<o;r++)t[r]&&t[r][e]&&a.push(t[r][e][n]);else for(;r<o;r++)t[r]&&a.push(t[r][e]);return a},G=function(t){if(!(t.length<2))for(var e=t.slice().sort(),n=e[0],a=1,r=e.length;a<r;a++){if(e[a]===n)return!1;n=e[a]}return!0},z=function(t){if(G(t))return t.slice();var e,n,a,r=[],o=t.length,i=0;t:for(n=0;n<o;n++){for(e=t[n],a=0;a<i;a++)if(r[a]===e)continue t;r.push(e),i++}return r},Y=function(t,e){if(Array.isArray(e))for(var n=0;n<e.length;n++)Y(t,e[n]);else t.push(e);return t};function i(n){var a,r,o={};P.each(n,function(t,e){(a=t.match(/^([^A-Z]+?)([A-Z])/))&&-1!=="a aa ai ao as b fn i m o s ".indexOf(a[1]+" ")&&(r=t.replace(a[0],a[2].toLowerCase()),o[r]=t,"o"===a[1])&&i(n[t])}),n._hungarianMap=o}function C(n,a,r){var o;n._hungarianMap||i(n),P.each(a,function(t,e){(o=n._hungarianMap[t])===H||!r&&a[o]!==H||("o"===o.charAt(0)?(a[o]||(a[o]={}),P.extend(!0,a[o],a[t]),C(n[o],a[o],r)):a[o]=a[t])})}function Z(t){var e,n=w.defaults.oLanguage,a=n.sDecimal;a&&Me(a),t&&(e=t.sZeroRecords,!t.sEmptyTable&&e&&"No data available in table"===n.sEmptyTable&&F(t,t,"sZeroRecords","sEmptyTable"),!t.sLoadingRecords&&e&&"Loading..."===n.sLoadingRecords&&F(t,t,"sZeroRecords","sLoadingRecords"),t.sInfoThousands&&(t.sThousands=t.sInfoThousands),e=t.sDecimal)&&a!==e&&Me(e)}Array.isArray||(Array.isArray=function(t){return"[object Array]"===Object.prototype.toString.call(t)}),Array.prototype.includes||(Array.prototype.includes=s),String.prototype.trim||(String.prototype.trim=function(){return this.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,"")}),String.prototype.includes||(String.prototype.includes=s),w.util={throttle:function(a,t){var r,o,i=t!==H?t:200;return function(){var t=this,e=+new Date,n=arguments;r&&e<r+i?(clearTimeout(o),o=setTimeout(function(){r=H,a.apply(t,n)},i)):(r=e,a.apply(t,n))}},escapeRegex:function(t){return t.replace(J,"\\$1")},set:function(a){var d;return P.isPlainObject(a)?w.util.set(a._):null===a?function(){}:"function"==typeof a?function(t,e,n){a(t,"set",e,n)}:"string"!=typeof a||-1===a.indexOf(".")&&-1===a.indexOf("[")&&-1===a.indexOf("(")?function(t,e){t[a]=e}:(d=function(t,e,n){for(var a,r,o,i,l=dt(n),n=l[l.length-1],s=0,u=l.length-1;s<u;s++){if("__proto__"===l[s]||"constructor"===l[s])throw new Error("Cannot set prototype values");if(a=l[s].match(ft),r=l[s].match(g),a){if(l[s]=l[s].replace(ft,""),t[l[s]]=[],(a=l.slice()).splice(0,s+1),i=a.join("."),Array.isArray(e))for(var c=0,f=e.length;c<f;c++)d(o={},e[c],i),t[l[s]].push(o);else t[l[s]]=e;return}r&&(l[s]=l[s].replace(g,""),t=t[l[s]](e)),null!==t[l[s]]&&t[l[s]]!==H||(t[l[s]]={}),t=t[l[s]]}n.match(g)?t[n.replace(g,"")](e):t[n.replace(ft,"")]=e},function(t,e){return d(t,e,a)})},get:function(r){var o,d;return P.isPlainObject(r)?(o={},P.each(r,function(t,e){e&&(o[t]=w.util.get(e))}),function(t,e,n,a){var r=o[e]||o._;return r!==H?r(t,e,n,a):t}):null===r?function(t){return t}:"function"==typeof r?function(t,e,n,a){return r(t,e,n,a)}:"string"!=typeof r||-1===r.indexOf(".")&&-1===r.indexOf("[")&&-1===r.indexOf("(")?function(t,e){return t[r]}:(d=function(t,e,n){var a,r,o;if(""!==n)for(var i=dt(n),l=0,s=i.length;l<s;l++){if(f=i[l].match(ft),a=i[l].match(g),f){if(i[l]=i[l].replace(ft,""),""!==i[l]&&(t=t[i[l]]),r=[],i.splice(0,l+1),o=i.join("."),Array.isArray(t))for(var u=0,c=t.length;u<c;u++)r.push(d(t[u],e,o));var f=f[0].substring(1,f[0].length-1);t=""===f?r:r.join(f);break}if(a)i[l]=i[l].replace(g,""),t=t[i[l]]();else{if(null===t||null===t[i[l]])return null;if(t===H||t[i[l]]===H)return H;t=t[i[l]]}}return t},function(t,e){return d(t,e,r)})}};var r=function(t,e,n){t[e]!==H&&(t[n]=t[e])};function K(t){r(t,"ordering","bSort"),r(t,"orderMulti","bSortMulti"),r(t,"orderClasses","bSortClasses"),r(t,"orderCellsTop","bSortCellsTop"),r(t,"order","aaSorting"),r(t,"orderFixed","aaSortingFixed"),r(t,"paging","bPaginate"),r(t,"pagingType","sPaginationType"),r(t,"pageLength","iDisplayLength"),r(t,"searching","bFilter"),"boolean"==typeof t.sScrollX&&(t.sScrollX=t.sScrollX?"100%":""),"boolean"==typeof t.scrollX&&(t.scrollX=t.scrollX?"100%":"");var e=t.aoSearchCols;if(e)for(var n=0,a=e.length;n<a;n++)e[n]&&C(w.models.oSearch,e[n])}function Q(t){r(t,"orderable","bSortable"),r(t,"orderData","aDataSort"),r(t,"orderSequence","asSorting"),r(t,"orderDataType","sortDataType");var e=t.aDataSort;"number"!=typeof e||Array.isArray(e)||(t.aDataSort=[e])}function tt(t){var e,n,a,r;w.__browser||(w.__browser=e={},r=(a=(n=P("<div/>").css({position:"fixed",top:0,left:-1*P(j).scrollLeft(),height:1,width:1,overflow:"hidden"}).append(P("<div/>").css({position:"absolute",top:1,left:1,width:100,overflow:"scroll"}).append(P("<div/>").css({width:"100%",height:10}))).appendTo("body")).children()).children(),e.barWidth=a[0].offsetWidth-a[0].clientWidth,e.bScrollOversize=100===r[0].offsetWidth&&100!==a[0].clientWidth,e.bScrollbarLeft=1!==Math.round(r.offset().left),e.bBounding=!!n[0].getBoundingClientRect().width,n.remove()),P.extend(t.oBrowser,w.__browser),t.oScroll.iBarWidth=w.__browser.barWidth}function et(t,e,n,a,r,o){var i,l=a,s=!1;for(n!==H&&(i=n,s=!0);l!==r;)t.hasOwnProperty(l)&&(i=s?e(i,t[l],l,t):t[l],s=!0,l+=o);return i}function nt(t,e){var n=w.defaults.column,a=t.aoColumns.length,n=P.extend({},w.models.oColumn,n,{nTh:e||v.createElement("th"),sTitle:n.sTitle||(e?e.innerHTML:""),aDataSort:n.aDataSort||[a],mData:n.mData||a,idx:a}),n=(t.aoColumns.push(n),t.aoPreSearchCols);n[a]=P.extend({},w.models.oSearch,n[a]),at(t,a,P(e).data())}function at(t,e,n){function a(t){return"string"==typeof t&&-1!==t.indexOf("@")}var e=t.aoColumns[e],r=t.oClasses,o=P(e.nTh),i=(!e.sWidthOrig&&(e.sWidthOrig=o.attr("width")||null,u=(o.attr("style")||"").match(/width:\s*(\d+[pxem%]+)/))&&(e.sWidthOrig=u[1]),n!==H&&null!==n&&(Q(n),C(w.defaults.column,n,!0),n.mDataProp===H||n.mData||(n.mData=n.mDataProp),n.sType&&(e._sManualType=n.sType),n.className&&!n.sClass&&(n.sClass=n.className),n.sClass&&o.addClass(n.sClass),u=e.sClass,P.extend(e,n),F(e,n,"sWidth","sWidthOrig"),u!==e.sClass&&(e.sClass=u+" "+e.sClass),n.iDataSort!==H&&(e.aDataSort=[n.iDataSort]),F(e,n,"aDataSort"),e.ariaTitle||(e.ariaTitle=o.attr("aria-label"))),e.mData),l=A(i),s=e.mRender?A(e.mRender):null,u=(e._bAttrSrc=P.isPlainObject(i)&&(a(i.sort)||a(i.type)||a(i.filter)),e._setter=null,e.fnGetData=function(t,e,n){var a=l(t,e,H,n);return s&&e?s(a,e,t,n):a},e.fnSetData=function(t,e,n){return b(i)(t,e,n)},"number"==typeof i||e._isArrayHost||(t._rowReadObject=!0),t.oFeatures.bSort||(e.bSortable=!1,o.addClass(r.sSortableNone)),-1!==P.inArray("asc",e.asSorting)),n=-1!==P.inArray("desc",e.asSorting);e.bSortable&&(u||n)?u&&!n?(e.sSortingClass=r.sSortableAsc,e.sSortingClassJUI=r.sSortJUIAscAllowed):!u&&n?(e.sSortingClass=r.sSortableDesc,e.sSortingClassJUI=r.sSortJUIDescAllowed):(e.sSortingClass=r.sSortable,e.sSortingClassJUI=r.sSortJUI):(e.sSortingClass=r.sSortableNone,e.sSortingClassJUI="")}function O(t){if(!1!==t.oFeatures.bAutoWidth){var e=t.aoColumns;ee(t);for(var n=0,a=e.length;n<a;n++)e[n].nTh.style.width=e[n].sWidth}var r=t.oScroll;""===r.sY&&""===r.sX||Qt(t),R(t,null,"column-sizing",[t])}function rt(t,e){t=it(t,"bVisible");return"number"==typeof t[e]?t[e]:null}function ot(t,e){t=it(t,"bVisible"),e=P.inArray(e,t);return-1!==e?e:null}function T(t){var n=0;return P.each(t.aoColumns,function(t,e){e.bVisible&&"none"!==P(e.nTh).css("display")&&n++}),n}function it(t,n){var a=[];return P.map(t.aoColumns,function(t,e){t[n]&&a.push(e)}),a}function lt(t){for(var e,n,a,r,o,i,l,s=t.aoColumns,u=t.aoData,c=w.ext.type.detect,f=0,d=s.length;f<d;f++)if(l=[],!(o=s[f]).sType&&o._sManualType)o.sType=o._sManualType;else if(!o.sType){for(e=0,n=c.length;e<n;e++){for(a=0,r=u.length;a<r&&(l[a]===H&&(l[a]=S(t,a,f,"type")),(i=c[e](l[a],t))||e===c.length-1)&&("html"!==i||h(l[a]));a++);if(i){o.sType=i;break}}o.sType||(o.sType="string")}}function st(t,e,n,a){var r,o,i,l,s=t.aoColumns;if(e)for(r=e.length-1;0<=r;r--)for(var u,c=(u=e[r]).target!==H?u.target:u.targets!==H?u.targets:u.aTargets,f=0,d=(c=Array.isArray(c)?c:[c]).length;f<d;f++)if("number"==typeof c[f]&&0<=c[f]){for(;s.length<=c[f];)nt(t);a(c[f],u)}else if("number"==typeof c[f]&&c[f]<0)a(s.length+c[f],u);else if("string"==typeof c[f])for(i=0,l=s.length;i<l;i++)"_all"!=c[f]&&!P(s[i].nTh).hasClass(c[f])||a(i,u);if(n)for(r=0,o=n.length;r<o;r++)a(r,n[r])}function x(t,e,n,a){for(var r=t.aoData.length,o=P.extend(!0,{},w.models.oRow,{src:n?"dom":"data",idx:r}),i=(o._aData=e,t.aoData.push(o),t.aoColumns),l=0,s=i.length;l<s;l++)i[l].sType=null;t.aiDisplayMaster.push(r);e=t.rowIdFn(e);return e!==H&&(t.aIds[e]=o),!n&&t.oFeatures.bDeferRender||St(t,r,n,a),r}function ut(n,t){var a;return(t=t instanceof P?t:P(t)).map(function(t,e){return a=mt(n,e),x(n,a.data,e,a.cells)})}function S(t,e,n,a){"search"===a?a="filter":"order"===a&&(a="sort");var r=t.iDraw,o=t.aoColumns[n],i=t.aoData[e]._aData,l=o.sDefaultContent,s=o.fnGetData(i,a,{settings:t,row:e,col:n});if(s===H)return t.iDrawError!=r&&null===l&&(W(t,0,"Requested unknown parameter "+("function"==typeof o.mData?"{function}":"'"+o.mData+"'")+" for row "+e+", column "+n,4),t.iDrawError=r),l;if(s!==i&&null!==s||null===l||a===H){if("function"==typeof s)return s.call(i)}else s=l;return null===s&&"display"===a?"":"filter"===a&&(e=w.ext.type.search)[o.sType]?e[o.sType](s):s}function ct(t,e,n,a){var r=t.aoColumns[n],o=t.aoData[e]._aData;r.fnSetData(o,a,{settings:t,row:e,col:n})}var ft=/\[.*?\]$/,g=/\(\)$/;function dt(t){return P.map(t.match(/(\\.|[^\.])+/g)||[""],function(t){return t.replace(/\\\./g,".")})}var A=w.util.get,b=w.util.set;function ht(t){return N(t.aoData,"_aData")}function pt(t){t.aoData.length=0,t.aiDisplayMaster.length=0,t.aiDisplay.length=0,t.aIds={}}function gt(t,e,n){for(var a=-1,r=0,o=t.length;r<o;r++)t[r]==e?a=r:t[r]>e&&t[r]--;-1!=a&&n===H&&t.splice(a,1)}function bt(n,a,t,e){function r(t,e){for(;t.childNodes.length;)t.removeChild(t.firstChild);t.innerHTML=S(n,a,e,"display")}var o,i,l=n.aoData[a];if("dom"!==t&&(t&&"auto"!==t||"dom"!==l.src)){var s=l.anCells;if(s)if(e!==H)r(s[e],e);else for(o=0,i=s.length;o<i;o++)r(s[o],o)}else l._aData=mt(n,l,e,e===H?H:l._aData).data;l._aSortData=null,l._aFilterData=null;var u=n.aoColumns;if(e!==H)u[e].sType=null;else{for(o=0,i=u.length;o<i;o++)u[o].sType=null;vt(n,l)}}function mt(t,e,n,a){function r(t,e){var n;"string"==typeof t&&-1!==(n=t.indexOf("@"))&&(n=t.substring(n+1),b(t)(a,e.getAttribute(n)))}function o(t){n!==H&&n!==f||(l=d[f],s=t.innerHTML.trim(),l&&l._bAttrSrc?(b(l.mData._)(a,s),r(l.mData.sort,t),r(l.mData.type,t),r(l.mData.filter,t)):h?(l._setter||(l._setter=b(l.mData)),l._setter(a,s)):a[f]=s),f++}var i,l,s,u=[],c=e.firstChild,f=0,d=t.aoColumns,h=t._rowReadObject;a=a!==H?a:h?{}:[];if(c)for(;c;)"TD"!=(i=c.nodeName.toUpperCase())&&"TH"!=i||(o(c),u.push(c)),c=c.nextSibling;else for(var p=0,g=(u=e.anCells).length;p<g;p++)o(u[p]);var e=e.firstChild?e:e.nTr;return e&&(e=e.getAttribute("id"))&&b(t.rowId)(a,e),{data:a,cells:u}}function St(t,e,n,a){var r,o,i,l,s,u,c=t.aoData[e],f=c._aData,d=[];if(null===c.nTr){for(r=n||v.createElement("tr"),c.nTr=r,c.anCells=d,r._DT_RowIndex=e,vt(t,c),l=0,s=t.aoColumns.length;l<s;l++)i=t.aoColumns[l],(o=(u=!n)?v.createElement(i.sCellType):a[l])||W(t,0,"Incorrect column count",18),o._DT_CellIndex={row:e,column:l},d.push(o),!u&&(!i.mRender&&i.mData===l||P.isPlainObject(i.mData)&&i.mData._===l+".display")||(o.innerHTML=S(t,e,l,"display")),i.sClass&&(o.className+=" "+i.sClass),i.bVisible&&!n?r.appendChild(o):!i.bVisible&&n&&o.parentNode.removeChild(o),i.fnCreatedCell&&i.fnCreatedCell.call(t.oInstance,o,S(t,e,l),f,e,l);R(t,"aoRowCreatedCallback",null,[r,f,e,d])}}function vt(t,e){var n=e.nTr,a=e._aData;n&&((t=t.rowIdFn(a))&&(n.id=t),a.DT_RowClass&&(t=a.DT_RowClass.split(" "),e.__rowc=e.__rowc?z(e.__rowc.concat(t)):t,P(n).removeClass(e.__rowc.join(" ")).addClass(a.DT_RowClass)),a.DT_RowAttr&&P(n).attr(a.DT_RowAttr),a.DT_RowData)&&P(n).data(a.DT_RowData)}function yt(t){var e,n,a,r=t.nTHead,o=t.nTFoot,i=0===P("th, td",r).length,l=t.oClasses,s=t.aoColumns;for(i&&(n=P("<tr/>").appendTo(r)),c=0,f=s.length;c<f;c++)a=s[c],e=P(a.nTh).addClass(a.sClass),i&&e.appendTo(n),t.oFeatures.bSort&&(e.addClass(a.sSortingClass),!1!==a.bSortable)&&(e.attr("tabindex",t.iTabIndex).attr("aria-controls",t.sTableId),ue(t,a.nTh,c)),a.sTitle!=e[0].innerHTML&&e.html(a.sTitle),ve(t,"header")(t,e,a,l);if(i&&wt(t.aoHeader,r),P(r).children("tr").children("th, td").addClass(l.sHeaderTH),P(o).children("tr").children("th, td").addClass(l.sFooterTH),null!==o)for(var u=t.aoFooter[0],c=0,f=u.length;c<f;c++)(a=s[c])?(a.nTf=u[c].cell,a.sClass&&P(a.nTf).addClass(a.sClass)):W(t,0,"Incorrect column count",18)}function Dt(t,e,n){var a,r,o,i,l,s,u,c,f,d=[],h=[],p=t.aoColumns.length;if(e){for(n===H&&(n=!1),a=0,r=e.length;a<r;a++){for(d[a]=e[a].slice(),d[a].nTr=e[a].nTr,o=p-1;0<=o;o--)t.aoColumns[o].bVisible||n||d[a].splice(o,1);h.push([])}for(a=0,r=d.length;a<r;a++){if(u=d[a].nTr)for(;s=u.firstChild;)u.removeChild(s);for(o=0,i=d[a].length;o<i;o++)if(f=c=1,h[a][o]===H){for(u.appendChild(d[a][o].cell),h[a][o]=1;d[a+c]!==H&&d[a][o].cell==d[a+c][o].cell;)h[a+c][o]=1,c++;for(;d[a][o+f]!==H&&d[a][o].cell==d[a][o+f].cell;){for(l=0;l<c;l++)h[a+l][o+f]=1;f++}P(d[a][o].cell).attr("rowspan",c).attr("colspan",f)}}}}function y(t,e){n="ssp"==E(s=t),(l=s.iInitDisplayStart)!==H&&-1!==l&&(s._iDisplayStart=!n&&l>=s.fnRecordsDisplay()?0:l,s.iInitDisplayStart=-1);var n=R(t,"aoPreDrawCallback","preDraw",[t]);if(-1!==P.inArray(!1,n))D(t,!1);else{var a=[],r=0,o=t.asStripeClasses,i=o.length,l=t.oLanguage,s="ssp"==E(t),u=t.aiDisplay,n=t._iDisplayStart,c=t.fnDisplayEnd();if(t.bDrawing=!0,t.bDeferLoading)t.bDeferLoading=!1,t.iDraw++,D(t,!1);else if(s){if(!t.bDestroying&&!e)return void xt(t)}else t.iDraw++;if(0!==u.length)for(var f=s?t.aoData.length:c,d=s?0:n;d<f;d++){var h,p=u[d],g=t.aoData[p],b=(null===g.nTr&&St(t,p),g.nTr);0!==i&&(h=o[r%i],g._sRowStripe!=h)&&(P(b).removeClass(g._sRowStripe).addClass(h),g._sRowStripe=h),R(t,"aoRowCallback",null,[b,g._aData,r,d,p]),a.push(b),r++}else{e=l.sZeroRecords;1==t.iDraw&&"ajax"==E(t)?e=l.sLoadingRecords:l.sEmptyTable&&0===t.fnRecordsTotal()&&(e=l.sEmptyTable),a[0]=P("<tr/>",{class:i?o[0]:""}).append(P("<td />",{valign:"top",colSpan:T(t),class:t.oClasses.sRowEmpty}).html(e))[0]}R(t,"aoHeaderCallback","header",[P(t.nTHead).children("tr")[0],ht(t),n,c,u]),R(t,"aoFooterCallback","footer",[P(t.nTFoot).children("tr")[0],ht(t),n,c,u]);s=P(t.nTBody);s.children().detach(),s.append(P(a)),R(t,"aoDrawCallback","draw",[t]),t.bSorted=!1,t.bFiltered=!1,t.bDrawing=!1}}function u(t,e){var n=t.oFeatures,a=n.bSort,n=n.bFilter;a&&ie(t),n?Rt(t,t.oPreviousSearch):t.aiDisplay=t.aiDisplayMaster.slice(),!0!==e&&(t._iDisplayStart=0),t._drawHold=e,y(t),t._drawHold=!1}function _t(t){for(var e,n,a,r,o,i,l,s=t.oClasses,u=P(t.nTable),u=P("<div/>").insertBefore(u),c=t.oFeatures,f=P("<div/>",{id:t.sTableId+"_wrapper",class:s.sWrapper+(t.nTFoot?"":" "+s.sNoFooter)}),d=(t.nHolding=u[0],t.nTableWrapper=f[0],t.nTableReinsertBefore=t.nTable.nextSibling,t.sDom.split("")),h=0;h<d.length;h++){if(e=null,"<"==(n=d[h])){if(a=P("<div/>")[0],"'"==(r=d[h+1])||'"'==r){for(o="",i=2;d[h+i]!=r;)o+=d[h+i],i++;"H"==o?o=s.sJUIHeader:"F"==o&&(o=s.sJUIFooter),-1!=o.indexOf(".")?(l=o.split("."),a.id=l[0].substr(1,l[0].length-1),a.className=l[1]):"#"==o.charAt(0)?a.id=o.substr(1,o.length-1):a.className=o,h+=i}f.append(a),f=P(a)}else if(">"==n)f=f.parent();else if("l"==n&&c.bPaginate&&c.bLengthChange)e=Gt(t);else if("f"==n&&c.bFilter)e=Lt(t);else if("r"==n&&c.bProcessing)e=Zt(t);else if("t"==n)e=Kt(t);else if("i"==n&&c.bInfo)e=Ut(t);else if("p"==n&&c.bPaginate)e=zt(t);else if(0!==w.ext.feature.length)for(var p=w.ext.feature,g=0,b=p.length;g<b;g++)if(n==p[g].cFeature){e=p[g].fnInit(t);break}e&&((l=t.aanFeatures)[n]||(l[n]=[]),l[n].push(e),f.append(e))}u.replaceWith(f),t.nHolding=null}function wt(t,e){var n,a,r,o,i,l,s,u,c,f,d=P(e).children("tr");for(t.splice(0,t.length),r=0,l=d.length;r<l;r++)t.push([]);for(r=0,l=d.length;r<l;r++)for(a=(n=d[r]).firstChild;a;){if("TD"==a.nodeName.toUpperCase()||"TH"==a.nodeName.toUpperCase())for(u=(u=+a.getAttribute("colspan"))&&0!=u&&1!=u?u:1,c=(c=+a.getAttribute("rowspan"))&&0!=c&&1!=c?c:1,s=function(t,e,n){for(var a=t[e];a[n];)n++;return n}(t,r,0),f=1==u,i=0;i<u;i++)for(o=0;o<c;o++)t[r+o][s+i]={cell:a,unique:f},t[r+o].nTr=n;a=a.nextSibling}}function Ct(t,e,n){var a=[];n||(n=t.aoHeader,e&&wt(n=[],e));for(var r=0,o=n.length;r<o;r++)for(var i=0,l=n[r].length;i<l;i++)!n[r][i].unique||a[i]&&t.bSortCellsTop||(a[i]=n[r][i].cell);return a}function Tt(r,t,n){function e(t){var e=r.jqXHR?r.jqXHR.status:null;(null===t||"number"==typeof e&&204==e)&&Ft(r,t={},[]),(e=t.error||t.sError)&&W(r,0,e),r.json=t,R(r,null,"xhr",[r,t,r.jqXHR]),n(t)}R(r,"aoServerParams","serverParams",[t]),t&&Array.isArray(t)&&(a={},o=/(.*?)\[\]$/,P.each(t,function(t,e){var n=e.name.match(o);n?(n=n[0],a[n]||(a[n]=[]),a[n].push(e.value)):a[e.name]=e.value}),t=a);var a,o,i,l=r.ajax,s=r.oInstance,u=(P.isPlainObject(l)&&l.data&&(u="function"==typeof(i=l.data)?i(t,r):i,t="function"==typeof i&&u?u:P.extend(!0,t,u),delete l.data),{data:t,success:e,dataType:"json",cache:!1,type:r.sServerMethod,error:function(t,e,n){var a=R(r,null,"xhr",[r,null,r.jqXHR]);-1===P.inArray(!0,a)&&("parsererror"==e?W(r,0,"Invalid JSON response",1):4===t.readyState&&W(r,0,"Ajax error",7)),D(r,!1)}});r.oAjaxData=t,R(r,null,"preXhr",[r,t]),r.fnServerData?r.fnServerData.call(s,r.sAjaxSource,P.map(t,function(t,e){return{name:e,value:t}}),e,r):r.sAjaxSource||"string"==typeof l?r.jqXHR=P.ajax(P.extend(u,{url:l||r.sAjaxSource})):"function"==typeof l?r.jqXHR=l.call(s,t,e,r):(r.jqXHR=P.ajax(P.extend(u,l)),l.data=i)}function xt(e){e.iDraw++,D(e,!0);var n=e._drawHold;Tt(e,At(e),function(t){e._drawHold=n,It(e,t),e._drawHold=!1})}function At(t){for(var e,n,a,r=t.aoColumns,o=r.length,i=t.oFeatures,l=t.oPreviousSearch,s=t.aoPreSearchCols,u=[],c=I(t),f=t._iDisplayStart,d=!1!==i.bPaginate?t._iDisplayLength:-1,h=function(t,e){u.push({name:t,value:e})},p=(h("sEcho",t.iDraw),h("iColumns",o),h("sColumns",N(r,"sName").join(",")),h("iDisplayStart",f),h("iDisplayLength",d),{draw:t.iDraw,columns:[],order:[],start:f,length:d,search:{value:l.sSearch,regex:l.bRegex}}),g=0;g<o;g++)n=r[g],a=s[g],e="function"==typeof n.mData?"function":n.mData,p.columns.push({data:e,name:n.sName,searchable:n.bSearchable,orderable:n.bSortable,search:{value:a.sSearch,regex:a.bRegex}}),h("mDataProp_"+g,e),i.bFilter&&(h("sSearch_"+g,a.sSearch),h("bRegex_"+g,a.bRegex),h("bSearchable_"+g,n.bSearchable)),i.bSort&&h("bSortable_"+g,n.bSortable);i.bFilter&&(h("sSearch",l.sSearch),h("bRegex",l.bRegex)),i.bSort&&(P.each(c,function(t,e){p.order.push({column:e.col,dir:e.dir}),h("iSortCol_"+t,e.col),h("sSortDir_"+t,e.dir)}),h("iSortingCols",c.length));f=w.ext.legacy.ajax;return null===f?t.sAjaxSource?u:p:f?u:p}function It(t,n){function e(t,e){return n[t]!==H?n[t]:n[e]}var a=Ft(t,n),r=e("sEcho","draw"),o=e("iTotalRecords","recordsTotal"),i=e("iTotalDisplayRecords","recordsFiltered");if(r!==H){if(+r<t.iDraw)return;t.iDraw=+r}a=a||[],pt(t),t._iRecordsTotal=parseInt(o,10),t._iRecordsDisplay=parseInt(i,10);for(var l=0,s=a.length;l<s;l++)x(t,a[l]);t.aiDisplay=t.aiDisplayMaster.slice(),y(t,!0),t._bInitComplete||qt(t,n),D(t,!1)}function Ft(t,e,n){t=P.isPlainObject(t.ajax)&&t.ajax.dataSrc!==H?t.ajax.dataSrc:t.sAjaxDataProp;if(!n)return"data"===t?e.aaData||e[t]:""!==t?A(t)(e):e;b(t)(e,n)}function Lt(n){function e(t){i.f;var e=this.value||"";o.return&&"Enter"!==t.key||e!=o.sSearch&&(Rt(n,{sSearch:e,bRegex:o.bRegex,bSmart:o.bSmart,bCaseInsensitive:o.bCaseInsensitive,return:o.return}),n._iDisplayStart=0,y(n))}var t=n.oClasses,a=n.sTableId,r=n.oLanguage,o=n.oPreviousSearch,i=n.aanFeatures,l='<input type="search" class="'+t.sFilterInput+'"/>',s=(s=r.sSearch).match(/_INPUT_/)?s.replace("_INPUT_",l):s+l,l=P("<div/>",{id:i.f?null:a+"_filter",class:t.sFilter}).append(P("<label/>").append(s)),t=null!==n.searchDelay?n.searchDelay:"ssp"===E(n)?400:0,u=P("input",l).val(o.sSearch).attr("placeholder",r.sSearchPlaceholder).on("keyup.DT search.DT input.DT paste.DT cut.DT",t?ne(e,t):e).on("mouseup.DT",function(t){setTimeout(function(){e.call(u[0],t)},10)}).on("keypress.DT",function(t){if(13==t.keyCode)return!1}).attr("aria-controls",a);return P(n.nTable).on("search.dt.DT",function(t,e){if(n===e)try{u[0]!==v.activeElement&&u.val(o.sSearch)}catch(t){}}),l[0]}function Rt(t,e,n){function a(t){o.sSearch=t.sSearch,o.bRegex=t.bRegex,o.bSmart=t.bSmart,o.bCaseInsensitive=t.bCaseInsensitive,o.return=t.return}function r(t){return t.bEscapeRegex!==H?!t.bEscapeRegex:t.bRegex}var o=t.oPreviousSearch,i=t.aoPreSearchCols;if(lt(t),"ssp"!=E(t)){Ht(t,e.sSearch,n,r(e),e.bSmart,e.bCaseInsensitive),a(e);for(var l=0;l<i.length;l++)jt(t,i[l].sSearch,l,r(i[l]),i[l].bSmart,i[l].bCaseInsensitive);Pt(t)}else a(e);t.bFiltered=!0,R(t,null,"search",[t])}function Pt(t){for(var e,n,a=w.ext.search,r=t.aiDisplay,o=0,i=a.length;o<i;o++){for(var l=[],s=0,u=r.length;s<u;s++)n=r[s],e=t.aoData[n],a[o](t,e._aFilterData,n,e._aData,s)&&l.push(n);r.length=0,P.merge(r,l)}}function jt(t,e,n,a,r,o){if(""!==e){for(var i,l=[],s=t.aiDisplay,u=Nt(e,a,r,o),c=0;c<s.length;c++)i=t.aoData[s[c]]._aFilterData[n],u.test(i)&&l.push(s[c]);t.aiDisplay=l}}function Ht(t,e,n,a,r,o){var i,l,s,u=Nt(e,a,r,o),r=t.oPreviousSearch.sSearch,o=t.aiDisplayMaster,c=[];if(0!==w.ext.search.length&&(n=!0),l=Wt(t),e.length<=0)t.aiDisplay=o.slice();else{for((l||n||a||r.length>e.length||0!==e.indexOf(r)||t.bSorted)&&(t.aiDisplay=o.slice()),i=t.aiDisplay,s=0;s<i.length;s++)u.test(t.aoData[i[s]]._sFilterRow)&&c.push(i[s]);t.aiDisplay=c}}function Nt(t,e,n,a){return t=e?t:Ot(t),n&&(t="^(?=.*?"+P.map(t.match(/["\u201C][^"\u201D]+["\u201D]|[^ ]+/g)||[""],function(t){var e;return'"'===t.charAt(0)?t=(e=t.match(/^"(.*)"$/))?e[1]:t:"“"===t.charAt(0)&&(t=(e=t.match(/^\u201C(.*)\u201D$/))?e[1]:t),t.replace('"',"")}).join(")(?=.*?")+").*$"),new RegExp(t,a?"i":"")}var Ot=w.util.escapeRegex,kt=P("<div>")[0],Mt=kt.textContent!==H;function Wt(t){for(var e,n,a,r,o,i=t.aoColumns,l=!1,s=0,u=t.aoData.length;s<u;s++)if(!(o=t.aoData[s])._aFilterData){for(a=[],e=0,n=i.length;e<n;e++)i[e].bSearchable?"string"!=typeof(r=null===(r=S(t,s,e,"filter"))?"":r)&&r.toString&&(r=r.toString()):r="",r.indexOf&&-1!==r.indexOf("&")&&(kt.innerHTML=r,r=Mt?kt.textContent:kt.innerText),r.replace&&(r=r.replace(/[\r\n\u2028]/g,"")),a.push(r);o._aFilterData=a,o._sFilterRow=a.join(" "),l=!0}return l}function Et(t){return{search:t.sSearch,smart:t.bSmart,regex:t.bRegex,caseInsensitive:t.bCaseInsensitive}}function Bt(t){return{sSearch:t.search,bSmart:t.smart,bRegex:t.regex,bCaseInsensitive:t.caseInsensitive}}function Ut(t){var e=t.sTableId,n=t.aanFeatures.i,a=P("<div/>",{class:t.oClasses.sInfo,id:n?null:e+"_info"});return n||(t.aoDrawCallback.push({fn:Vt,sName:"information"}),a.attr("role","status").attr("aria-live","polite"),P(t.nTable).attr("aria-describedby",e+"_info")),a[0]}function Vt(t){var e,n,a,r,o,i,l=t.aanFeatures.i;0!==l.length&&(i=t.oLanguage,e=t._iDisplayStart+1,n=t.fnDisplayEnd(),a=t.fnRecordsTotal(),o=(r=t.fnRecordsDisplay())?i.sInfo:i.sInfoEmpty,r!==a&&(o+=" "+i.sInfoFiltered),o=Xt(t,o+=i.sInfoPostFix),null!==(i=i.fnInfoCallback)&&(o=i.call(t.oInstance,t,e,n,a,r,o)),P(l).html(o))}function Xt(t,e){var n=t.fnFormatNumber,a=t._iDisplayStart+1,r=t._iDisplayLength,o=t.fnRecordsDisplay(),i=-1===r;return e.replace(/_START_/g,n.call(t,a)).replace(/_END_/g,n.call(t,t.fnDisplayEnd())).replace(/_MAX_/g,n.call(t,t.fnRecordsTotal())).replace(/_TOTAL_/g,n.call(t,o)).replace(/_PAGE_/g,n.call(t,i?1:Math.ceil(a/r))).replace(/_PAGES_/g,n.call(t,i?1:Math.ceil(o/r)))}function Jt(n){var a,t,e,r=n.iInitDisplayStart,o=n.aoColumns,i=n.oFeatures,l=n.bDeferLoading;if(n.bInitialised){for(_t(n),yt(n),Dt(n,n.aoHeader),Dt(n,n.aoFooter),D(n,!0),i.bAutoWidth&&ee(n),a=0,t=o.length;a<t;a++)(e=o[a]).sWidth&&(e.nTh.style.width=M(e.sWidth));R(n,null,"preInit",[n]),u(n);i=E(n);"ssp"==i&&!l||("ajax"==i?Tt(n,[],function(t){var e=Ft(n,t);for(a=0;a<e.length;a++)x(n,e[a]);n.iInitDisplayStart=r,u(n),D(n,!1),qt(n,t)}):(D(n,!1),qt(n)))}else setTimeout(function(){Jt(n)},200)}function qt(t,e){t._bInitComplete=!0,(e||t.oInit.aaData)&&O(t),R(t,null,"plugin-init",[t,e]),R(t,"aoInitComplete","init",[t,e])}function $t(t,e){e=parseInt(e,10);t._iDisplayLength=e,Se(t),R(t,null,"length",[t,e])}function Gt(a){for(var t=a.oClasses,e=a.sTableId,n=a.aLengthMenu,r=Array.isArray(n[0]),o=r?n[0]:n,i=r?n[1]:n,l=P("<select/>",{name:e+"_length","aria-controls":e,class:t.sLengthSelect}),s=0,u=o.length;s<u;s++)l[0][s]=new Option("number"==typeof i[s]?a.fnFormatNumber(i[s]):i[s],o[s]);var c=P("<div><label/></div>").addClass(t.sLength);return a.aanFeatures.l||(c[0].id=e+"_length"),c.children().append(a.oLanguage.sLengthMenu.replace("_MENU_",l[0].outerHTML)),P("select",c).val(a._iDisplayLength).on("change.DT",function(t){$t(a,P(this).val()),y(a)}),P(a.nTable).on("length.dt.DT",function(t,e,n){a===e&&P("select",c).val(n)}),c[0]}function zt(t){function c(t){y(t)}var e=t.sPaginationType,f=w.ext.pager[e],d="function"==typeof f,e=P("<div/>").addClass(t.oClasses.sPaging+e)[0],h=t.aanFeatures;return d||f.fnInit(t,e,c),h.p||(e.id=t.sTableId+"_paginate",t.aoDrawCallback.push({fn:function(t){if(d)for(var e=t._iDisplayStart,n=t._iDisplayLength,a=t.fnRecordsDisplay(),r=-1===n,o=r?0:Math.ceil(e/n),i=r?1:Math.ceil(a/n),l=f(o,i),s=0,u=h.p.length;s<u;s++)ve(t,"pageButton")(t,h.p[s],s,l,o,i);else f.fnUpdate(t,c)},sName:"pagination"})),e}function Yt(t,e,n){var a=t._iDisplayStart,r=t._iDisplayLength,o=t.fnRecordsDisplay(),o=(0===o||-1===r?a=0:"number"==typeof e?o<(a=e*r)&&(a=0):"first"==e?a=0:"previous"==e?(a=0<=r?a-r:0)<0&&(a=0):"next"==e?a+r<o&&(a+=r):"last"==e?a=Math.floor((o-1)/r)*r:W(t,0,"Unknown paging action: "+e,5),t._iDisplayStart!==a);return t._iDisplayStart=a,o?(R(t,null,"page",[t]),n&&y(t)):R(t,null,"page-nc",[t]),o}function Zt(t){return P("<div/>",{id:t.aanFeatures.r?null:t.sTableId+"_processing",class:t.oClasses.sProcessing,role:"status"}).html(t.oLanguage.sProcessing).append("<div><div></div><div></div><div></div><div></div></div>").insertBefore(t.nTable)[0]}function D(t,e){t.oFeatures.bProcessing&&P(t.aanFeatures.r).css("display",e?"block":"none"),R(t,null,"processing",[t,e])}function Kt(t){var e,n,a,r,o,i,l,s,u,c,f,d,h=P(t.nTable),p=t.oScroll;return""===p.sX&&""===p.sY?t.nTable:(e=p.sX,n=p.sY,a=t.oClasses,o=(r=h.children("caption")).length?r[0]._captionSide:null,s=P(h[0].cloneNode(!1)),i=P(h[0].cloneNode(!1)),u=function(t){return t?M(t):null},(l=h.children("tfoot")).length||(l=null),s=P(f="<div/>",{class:a.sScrollWrapper}).append(P(f,{class:a.sScrollHead}).css({overflow:"hidden",position:"relative",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollHeadInner}).css({"box-sizing":"content-box",width:p.sXInner||"100%"}).append(s.removeAttr("id").css("margin-left",0).append("top"===o?r:null).append(h.children("thead"))))).append(P(f,{class:a.sScrollBody}).css({position:"relative",overflow:"auto",width:u(e)}).append(h)),l&&s.append(P(f,{class:a.sScrollFoot}).css({overflow:"hidden",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollFootInner}).append(i.removeAttr("id").css("margin-left",0).append("bottom"===o?r:null).append(h.children("tfoot"))))),u=s.children(),c=u[0],f=u[1],d=l?u[2]:null,e&&P(f).on("scroll.DT",function(t){var e=this.scrollLeft;c.scrollLeft=e,l&&(d.scrollLeft=e)}),P(f).css("max-height",n),p.bCollapse||P(f).css("height",n),t.nScrollHead=c,t.nScrollBody=f,t.nScrollFoot=d,t.aoDrawCallback.push({fn:Qt,sName:"scrolling"}),s[0])}function Qt(n){function t(t){(t=t.style).paddingTop="0",t.paddingBottom="0",t.borderTopWidth="0",t.borderBottomWidth="0",t.height=0}var e,a,r,o,i,l=n.oScroll,s=l.sX,u=l.sXInner,c=l.sY,l=l.iBarWidth,f=P(n.nScrollHead),d=f[0].style,h=f.children("div"),p=h[0].style,h=h.children("table"),g=n.nScrollBody,b=P(g),m=g.style,S=P(n.nScrollFoot).children("div"),v=S.children("table"),y=P(n.nTHead),D=P(n.nTable),_=D[0],w=_.style,C=n.nTFoot?P(n.nTFoot):null,T=n.oBrowser,x=T.bScrollOversize,A=(N(n.aoColumns,"nTh"),[]),I=[],F=[],L=[],R=g.scrollHeight>g.clientHeight;n.scrollBarVis!==R&&n.scrollBarVis!==H?(n.scrollBarVis=R,O(n)):(n.scrollBarVis=R,D.children("thead, tfoot").remove(),C&&(R=C.clone().prependTo(D),i=C.find("tr"),a=R.find("tr"),R.find("[id]").removeAttr("id")),R=y.clone().prependTo(D),y=y.find("tr"),e=R.find("tr"),R.find("th, td").removeAttr("tabindex"),R.find("[id]").removeAttr("id"),s||(m.width="100%",f[0].style.width="100%"),P.each(Ct(n,R),function(t,e){r=rt(n,t),e.style.width=n.aoColumns[r].sWidth}),C&&k(function(t){t.style.width=""},a),f=D.outerWidth(),""===s?(w.width="100%",x&&(D.find("tbody").height()>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(D.outerWidth()-l)),f=D.outerWidth()):""!==u&&(w.width=M(u),f=D.outerWidth()),k(t,e),k(function(t){var e=j.getComputedStyle?j.getComputedStyle(t).width:M(P(t).width());F.push(t.innerHTML),A.push(e)},e),k(function(t,e){t.style.width=A[e]},y),P(e).css("height",0),C&&(k(t,a),k(function(t){L.push(t.innerHTML),I.push(M(P(t).css("width")))},a),k(function(t,e){t.style.width=I[e]},i),P(a).height(0)),k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+F[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=A[e]},e),C&&k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+L[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=I[e]},a),Math.round(D.outerWidth())<Math.round(f)?(o=g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y")?f+l:f,x&&(g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(o-l)),""!==s&&""===u||W(n,1,"Possible column misalignment",6)):o="100%",m.width=M(o),d.width=M(o),C&&(n.nScrollFoot.style.width=M(o)),c||x&&(m.height=M(_.offsetHeight+l)),R=D.outerWidth(),h[0].style.width=M(R),p.width=M(R),y=D.height()>g.clientHeight||"scroll"==b.css("overflow-y"),p[i="padding"+(T.bScrollbarLeft?"Left":"Right")]=y?l+"px":"0px",C&&(v[0].style.width=M(R),S[0].style.width=M(R),S[0].style[i]=y?l+"px":"0px"),D.children("colgroup").insertBefore(D.children("thead")),b.trigger("scroll"),!n.bSorted&&!n.bFiltered||n._drawHold||(g.scrollTop=0))}function k(t,e,n){for(var a,r,o=0,i=0,l=e.length;i<l;){for(a=e[i].firstChild,r=n?n[i].firstChild:null;a;)1===a.nodeType&&(n?t(a,r,o):t(a,o),o++),a=a.nextSibling,r=n?r.nextSibling:null;i++}}var te=/<.*?>/g;function ee(t){var e,n,a=t.nTable,r=t.aoColumns,o=t.oScroll,i=o.sY,l=o.sX,o=o.sXInner,s=r.length,u=it(t,"bVisible"),c=P("th",t.nTHead),f=a.getAttribute("width"),d=a.parentNode,h=!1,p=t.oBrowser,g=p.bScrollOversize,b=a.style.width,m=(b&&-1!==b.indexOf("%")&&(f=b),ae(N(r,"sWidthOrig"),d));for(_=0;_<u.length;_++)null!==(e=r[u[_]]).sWidth&&(e.sWidth=m[_],h=!0);if(g||!h&&!l&&!i&&s==T(t)&&s==c.length)for(_=0;_<s;_++){var S=rt(t,_);null!==S&&(r[S].sWidth=M(c.eq(_).width()))}else{var b=P(a).clone().css("visibility","hidden").removeAttr("id"),v=(b.find("tbody tr").remove(),P("<tr/>").appendTo(b.find("tbody")));for(b.find("thead, tfoot").remove(),b.append(P(t.nTHead).clone()).append(P(t.nTFoot).clone()),b.find("tfoot th, tfoot td").css("width",""),c=Ct(t,b.find("thead")[0]),_=0;_<u.length;_++)e=r[u[_]],c[_].style.width=null!==e.sWidthOrig&&""!==e.sWidthOrig?M(e.sWidthOrig):"",e.sWidthOrig&&l&&P(c[_]).append(P("<div/>").css({width:e.sWidthOrig,margin:0,padding:0,border:0,height:1}));if(t.aoData.length)for(_=0;_<u.length;_++)e=r[n=u[_]],P(re(t,n)).clone(!1).append(e.sContentPadding).appendTo(v);P("[name]",b).removeAttr("name");for(var y=P("<div/>").css(l||i?{position:"absolute",top:0,left:0,height:1,right:0,overflow:"hidden"}:{}).append(b).appendTo(d),D=(l&&o?b.width(o):l?(b.css("width","auto"),b.removeAttr("width"),b.width()<d.clientWidth&&f&&b.width(d.clientWidth)):i?b.width(d.clientWidth):f&&b.width(f),0),_=0;_<u.length;_++){var w=P(c[_]),C=w.outerWidth()-w.width(),w=p.bBounding?Math.ceil(c[_].getBoundingClientRect().width):w.outerWidth();D+=w,r[u[_]].sWidth=M(w-C)}a.style.width=M(D),y.remove()}f&&(a.style.width=M(f)),!f&&!l||t._reszEvt||(o=function(){P(j).on("resize.DT-"+t.sInstance,ne(function(){O(t)}))},g?setTimeout(o,1e3):o(),t._reszEvt=!0)}var ne=w.util.throttle;function ae(t,e){for(var n=[],a=[],r=0;r<t.length;r++)t[r]?n.push(P("<div/>").css("width",M(t[r])).appendTo(e||v.body)):n.push(null);for(r=0;r<t.length;r++)a.push(n[r]?n[r][0].offsetWidth:null);return P(n).remove(),a}function re(t,e){var n,a=oe(t,e);return a<0?null:(n=t.aoData[a]).nTr?n.anCells[e]:P("<td/>").html(S(t,a,e,"display"))[0]}function oe(t,e){for(var n,a=-1,r=-1,o=0,i=t.aoData.length;o<i;o++)(n=(n=(n=S(t,o,e,"display")+"").replace(te,"")).replace(/ /g," ")).length>a&&(a=n.length,r=o);return r}function M(t){return null===t?"0px":"number"==typeof t?t<0?"0px":t+"px":t.match(/\d$/)?t+"px":t}function I(t){function e(t){t.length&&!Array.isArray(t[0])?h.push(t):P.merge(h,t)}var n,a,r,o,i,l,s,u=[],c=t.aoColumns,f=t.aaSortingFixed,d=P.isPlainObject(f),h=[];for(Array.isArray(f)&&e(f),d&&f.pre&&e(f.pre),e(t.aaSorting),d&&f.post&&e(f.post),n=0;n<h.length;n++)for(r=(o=c[s=h[n][a=0]].aDataSort).length;a<r;a++)l=c[i=o[a]].sType||"string",h[n]._idx===H&&(h[n]._idx=P.inArray(h[n][1],c[i].asSorting)),u.push({src:s,col:i,dir:h[n][1],index:h[n]._idx,type:l,formatter:w.ext.type.order[l+"-pre"]});return u}function ie(t){var e,n,a,r,c,f=[],u=w.ext.type.order,d=t.aoData,o=(t.aoColumns,0),i=t.aiDisplayMaster;for(lt(t),e=0,n=(c=I(t)).length;e<n;e++)(r=c[e]).formatter&&o++,fe(t,r.col);if("ssp"!=E(t)&&0!==c.length){for(e=0,a=i.length;e<a;e++)f[i[e]]=e;o===c.length?i.sort(function(t,e){for(var n,a,r,o,i=c.length,l=d[t]._aSortData,s=d[e]._aSortData,u=0;u<i;u++)if(0!=(r=(n=l[(o=c[u]).col])<(a=s[o.col])?-1:a<n?1:0))return"asc"===o.dir?r:-r;return(n=f[t])<(a=f[e])?-1:a<n?1:0}):i.sort(function(t,e){for(var n,a,r,o=c.length,i=d[t]._aSortData,l=d[e]._aSortData,s=0;s<o;s++)if(n=i[(r=c[s]).col],a=l[r.col],0!==(r=(u[r.type+"-"+r.dir]||u["string-"+r.dir])(n,a)))return r;return(n=f[t])<(a=f[e])?-1:a<n?1:0})}t.bSorted=!0}function le(t){for(var e=t.aoColumns,n=I(t),a=t.oLanguage.oAria,r=0,o=e.length;r<o;r++){var i=e[r],l=i.asSorting,s=i.ariaTitle||i.sTitle.replace(/<.*?>/g,""),u=i.nTh;u.removeAttribute("aria-sort"),i=i.bSortable?s+("asc"===(0<n.length&&n[0].col==r&&(u.setAttribute("aria-sort","asc"==n[0].dir?"ascending":"descending"),l[n[0].index+1])||l[0])?a.sSortAscending:a.sSortDescending):s,u.setAttribute("aria-label",i)}}function se(t,e,n,a){function r(t,e){var n=t._idx;return(n=n===H?P.inArray(t[1],s):n)+1<s.length?n+1:e?null:0}var o,i=t.aoColumns[e],l=t.aaSorting,s=i.asSorting;"number"==typeof l[0]&&(l=t.aaSorting=[l]),n&&t.oFeatures.bSortMulti?-1!==(i=P.inArray(e,N(l,"0")))?null===(o=null===(o=r(l[i],!0))&&1===l.length?0:o)?l.splice(i,1):(l[i][1]=s[o],l[i]._idx=o):(l.push([e,s[0],0]),l[l.length-1]._idx=0):l.length&&l[0][0]==e?(o=r(l[0]),l.length=1,l[0][1]=s[o],l[0]._idx=o):(l.length=0,l.push([e,s[0]]),l[0]._idx=0),u(t),"function"==typeof a&&a(t)}function ue(e,t,n,a){var r=e.aoColumns[n];me(t,{},function(t){!1!==r.bSortable&&(e.oFeatures.bProcessing?(D(e,!0),setTimeout(function(){se(e,n,t.shiftKey,a),"ssp"!==E(e)&&D(e,!1)},0)):se(e,n,t.shiftKey,a))})}function ce(t){var e,n,a,r=t.aLastSort,o=t.oClasses.sSortColumn,i=I(t),l=t.oFeatures;if(l.bSort&&l.bSortClasses){for(e=0,n=r.length;e<n;e++)a=r[e].src,P(N(t.aoData,"anCells",a)).removeClass(o+(e<2?e+1:3));for(e=0,n=i.length;e<n;e++)a=i[e].src,P(N(t.aoData,"anCells",a)).addClass(o+(e<2?e+1:3))}t.aLastSort=i}function fe(t,e){for(var n,a,r,o=t.aoColumns[e],i=w.ext.order[o.sSortDataType],l=(i&&(n=i.call(t.oInstance,t,e,ot(t,e))),w.ext.type.order[o.sType+"-pre"]),s=0,u=t.aoData.length;s<u;s++)(a=t.aoData[s])._aSortData||(a._aSortData=[]),a._aSortData[e]&&!i||(r=i?n[s]:S(t,s,e,"sort"),a._aSortData[e]=l?l(r):r)}function de(n){var t;n._bLoadingState||(t={time:+new Date,start:n._iDisplayStart,length:n._iDisplayLength,order:P.extend(!0,[],n.aaSorting),search:Et(n.oPreviousSearch),columns:P.map(n.aoColumns,function(t,e){return{visible:t.bVisible,search:Et(n.aoPreSearchCols[e])}})},n.oSavedState=t,R(n,"aoStateSaveParams","stateSaveParams",[n,t]),n.oFeatures.bStateSave&&!n.bDestroying&&n.fnStateSaveCallback.call(n.oInstance,n,t))}function he(e,t,n){var a;if(e.oFeatures.bStateSave)return(a=e.fnStateLoadCallback.call(e.oInstance,e,function(t){pe(e,t,n)}))!==H&&pe(e,a,n),!0;n()}function pe(n,t,e){var a,r,o=n.aoColumns,i=(n._bLoadingState=!0,n._bInitComplete?new w.Api(n):null);if(t&&t.time){var l=R(n,"aoStateLoadParams","stateLoadParams",[n,t]);if(-1!==P.inArray(!1,l))n._bLoadingState=!1;else{l=n.iStateDuration;if(0<l&&t.time<+new Date-1e3*l)n._bLoadingState=!1;else if(t.columns&&o.length!==t.columns.length)n._bLoadingState=!1;else{if(n.oLoadedState=P.extend(!0,{},t),t.length!==H&&(i?i.page.len(t.length):n._iDisplayLength=t.length),t.start!==H&&(null===i?(n._iDisplayStart=t.start,n.iInitDisplayStart=t.start):Yt(n,t.start/n._iDisplayLength)),t.order!==H&&(n.aaSorting=[],P.each(t.order,function(t,e){n.aaSorting.push(e[0]>=o.length?[0,e[1]]:e)})),t.search!==H&&P.extend(n.oPreviousSearch,Bt(t.search)),t.columns){for(a=0,r=t.columns.length;a<r;a++){var s=t.columns[a];s.visible!==H&&(i?i.column(a).visible(s.visible,!1):o[a].bVisible=s.visible),s.search!==H&&P.extend(n.aoPreSearchCols[a],Bt(s.search))}i&&i.columns.adjust()}n._bLoadingState=!1,R(n,"aoStateLoaded","stateLoaded",[n,t])}}}else n._bLoadingState=!1;e()}function ge(t){var e=w.settings,t=P.inArray(t,N(e,"nTable"));return-1!==t?e[t]:null}function W(t,e,n,a){if(n="DataTables warning: "+(t?"table id="+t.sTableId+" - ":"")+n,a&&(n+=". For more information about this error, please see https://datatables.net/tn/"+a),e)j.console&&console.log&&console.log(n);else{e=w.ext,e=e.sErrMode||e.errMode;if(t&&R(t,null,"error",[t,a,n]),"alert"==e)alert(n);else{if("throw"==e)throw new Error(n);"function"==typeof e&&e(t,a,n)}}}function F(n,a,t,e){Array.isArray(t)?P.each(t,function(t,e){Array.isArray(e)?F(n,a,e[0],e[1]):F(n,a,e)}):(e===H&&(e=t),a[t]!==H&&(n[e]=a[t]))}function be(t,e,n){var a,r;for(r in e)e.hasOwnProperty(r)&&(a=e[r],P.isPlainObject(a)?(P.isPlainObject(t[r])||(t[r]={}),P.extend(!0,t[r],a)):n&&"data"!==r&&"aaData"!==r&&Array.isArray(a)?t[r]=a.slice():t[r]=a);return t}function me(e,t,n){P(e).on("click.DT",t,function(t){P(e).trigger("blur"),n(t)}).on("keypress.DT",t,function(t){13===t.which&&(t.preventDefault(),n(t))}).on("selectstart.DT",function(){return!1})}function L(t,e,n,a){n&&t[e].push({fn:n,sName:a})}function R(n,t,e,a){var r=[];return t&&(r=P.map(n[t].slice().reverse(),function(t,e){return t.fn.apply(n.oInstance,a)})),null!==e&&(t=P.Event(e+".dt"),(e=P(n.nTable)).trigger(t,a),0===e.parents("body").length&&P("body").trigger(t,a),r.push(t.result)),r}function Se(t){var e=t._iDisplayStart,n=t.fnDisplayEnd(),a=t._iDisplayLength;n<=e&&(e=n-a),e-=e%a,t._iDisplayStart=e=-1===a||e<0?0:e}function ve(t,e){var t=t.renderer,n=w.ext.renderer[e];return P.isPlainObject(t)&&t[e]?n[t[e]]||n._:"string"==typeof t&&n[t]||n._}function E(t){return t.oFeatures.bServerSide?"ssp":t.ajax||t.sAjaxSource?"ajax":"dom"}function ye(t,n){var a;return Array.isArray(t)?P.map(t,function(t){return ye(t,n)}):"number"==typeof t?[n[t]]:(a=P.map(n,function(t,e){return t.nTable}),P(a).filter(t).map(function(t){var e=P.inArray(this,a);return n[e]}).toArray())}function De(r,o,t){var e,n;t&&(e=new B(r)).one("draw",function(){t(e.ajax.json())}),"ssp"==E(r)?u(r,o):(D(r,!0),(n=r.jqXHR)&&4!==n.readyState&&n.abort(),Tt(r,[],function(t){pt(r);for(var e=Ft(r,t),n=0,a=e.length;n<a;n++)x(r,e[n]);u(r,o),D(r,!1)}))}function _e(t,e,n,a,r){for(var o,i,l,s,u=[],c=typeof e,f=0,d=(e=e&&"string"!=c&&"function"!=c&&e.length!==H?e:[e]).length;f<d;f++)for(l=0,s=(i=e[f]&&e[f].split&&!e[f].match(/[\[\(:]/)?e[f].split(","):[e[f]]).length;l<s;l++)(o=n("string"==typeof i[l]?i[l].trim():i[l]))&&o.length&&(u=u.concat(o));var h=p.selector[t];if(h.length)for(f=0,d=h.length;f<d;f++)u=h[f](a,r,u);return z(u)}function we(t){return(t=t||{}).filter&&t.search===H&&(t.search=t.filter),P.extend({search:"none",order:"current",page:"all"},t)}function Ce(t){for(var e=0,n=t.length;e<n;e++)if(0<t[e].length)return t[0]=t[e],t[0].length=1,t.length=1,t.context=[t.context[e]],t;return t.length=0,t}function Te(o,t,e,n){function i(t,e){var n;if(Array.isArray(t)||t instanceof P)for(var a=0,r=t.length;a<r;a++)i(t[a],e);else t.nodeName&&"tr"===t.nodeName.toLowerCase()?l.push(t):(n=P("<tr><td></td></tr>").addClass(e),P("td",n).addClass(e).html(t)[0].colSpan=T(o),l.push(n[0]))}var l=[];i(e,n),t._details&&t._details.detach(),t._details=P(l),t._detailsShow&&t._details.insertAfter(t.nTr)}function xe(t,e){var n=t.context;if(n.length&&t.length){var a=n[0].aoData[t[0]];if(a._details){(a._detailsShow=e)?(a._details.insertAfter(a.nTr),P(a.nTr).addClass("dt-hasChild")):(a._details.detach(),P(a.nTr).removeClass("dt-hasChild")),R(n[0],null,"childRow",[e,t.row(t[0])]);var s=n[0],r=new B(s),a=".dt.DT_details",e="draw"+a,t="column-sizing"+a,a="destroy"+a,u=s.aoData;if(r.off(e+" "+t+" "+a),N(u,"_details").length>0){r.on(e,function(t,e){if(s!==e)return;r.rows({page:"current"}).eq(0).each(function(t){var e=u[t];if(e._detailsShow)e._details.insertAfter(e.nTr)})});r.on(t,function(t,e,n,a){if(s!==e)return;var r,o=T(e);for(var i=0,l=u.length;i<l;i++){r=u[i];if(r._details)r._details.each(function(){var t=P(this).children("td");if(t.length==1)t.attr("colspan",o)})}});r.on(a,function(t,e){if(s!==e)return;for(var n=0,a=u.length;n<a;n++)if(u[n]._details)Re(r,n)})}Le(n)}}}function Ae(t,e,n,a,r){for(var o=[],i=0,l=r.length;i<l;i++)o.push(S(t,r[i],e));return o}var Ie=[],o=Array.prototype,B=function(t,e){if(!(this instanceof B))return new B(t,e);function n(t){var e,n,a,r;t=t,a=w.settings,r=P.map(a,function(t,e){return t.nTable}),(t=t?t.nTable&&t.oApi?[t]:t.nodeName&&"table"===t.nodeName.toLowerCase()?-1!==(e=P.inArray(t,r))?[a[e]]:null:t&&"function"==typeof t.settings?t.settings().toArray():("string"==typeof t?n=P(t):t instanceof P&&(n=t),n?n.map(function(t){return-1!==(e=P.inArray(this,r))?a[e]:null}).toArray():void 0):[])&&o.push.apply(o,t)}var o=[];if(Array.isArray(t))for(var a=0,r=t.length;a<r;a++)n(t[a]);else n(t);this.context=z(o),e&&P.merge(this,e),this.selector={rows:null,cols:null,opts:null},B.extend(this,this,Ie)},Fe=(w.Api=B,P.extend(B.prototype,{any:function(){return 0!==this.count()},concat:o.concat,context:[],count:function(){return this.flatten().length},each:function(t){for(var e=0,n=this.length;e<n;e++)t.call(this,this[e],e,this);return this},eq:function(t){var e=this.context;return e.length>t?new B(e[t],this[t]):null},filter:function(t){var e=[];if(o.filter)e=o.filter.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)t.call(this,this[n],n,this)&&e.push(this[n]);return new B(this.context,e)},flatten:function(){var t=[];return new B(this.context,t.concat.apply(t,this.toArray()))},join:o.join,indexOf:o.indexOf||function(t,e){for(var n=e||0,a=this.length;n<a;n++)if(this[n]===t)return n;return-1},iterator:function(t,e,n,a){var r,o,i,l,s,u,c,f,d=[],h=this.context,p=this.selector;for("string"==typeof t&&(a=n,n=e,e=t,t=!1),o=0,i=h.length;o<i;o++){var g=new B(h[o]);if("table"===e)(r=n.call(g,h[o],o))!==H&&d.push(r);else if("columns"===e||"rows"===e)(r=n.call(g,h[o],this[o],o))!==H&&d.push(r);else if("column"===e||"column-rows"===e||"row"===e||"cell"===e)for(c=this[o],"column-rows"===e&&(u=Fe(h[o],p.opts)),l=0,s=c.length;l<s;l++)f=c[l],(r="cell"===e?n.call(g,h[o],f.row,f.column,o,l):n.call(g,h[o],f,o,l,u))!==H&&d.push(r)}return d.length||a?((t=(a=new B(h,t?d.concat.apply([],d):d)).selector).rows=p.rows,t.cols=p.cols,t.opts=p.opts,a):this},lastIndexOf:o.lastIndexOf||function(t,e){return this.indexOf.apply(this.toArray.reverse(),arguments)},length:0,map:function(t){var e=[];if(o.map)e=o.map.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)e.push(t.call(this,this[n],n));return new B(this.context,e)},pluck:function(t){var e=w.util.get(t);return this.map(function(t){return e(t)})},pop:o.pop,push:o.push,reduce:o.reduce||function(t,e){return et(this,t,e,0,this.length,1)},reduceRight:o.reduceRight||function(t,e){return et(this,t,e,this.length-1,-1,-1)},reverse:o.reverse,selector:null,shift:o.shift,slice:function(){return new B(this.context,this)},sort:o.sort,splice:o.splice,toArray:function(){return o.slice.call(this)},to$:function(){return P(this)},toJQuery:function(){return P(this)},unique:function(){return new B(this.context,z(this))},unshift:o.unshift}),B.extend=function(t,e,n){if(n.length&&e&&(e instanceof B||e.__dt_wrapper))for(var a,r=0,o=n.length;r<o;r++)e[(a=n[r]).name]="function"===a.type?function(e,n,a){return function(){var t=n.apply(e,arguments);return B.extend(t,t,a.methodExt),t}}(t,a.val,a):"object"===a.type?{}:a.val,e[a.name].__dt_wrapper=!0,B.extend(t,e[a.name],a.propExt)},B.register=e=function(t,e){if(Array.isArray(t))for(var n=0,a=t.length;n<a;n++)B.register(t[n],e);else for(var r=t.split("."),o=Ie,i=0,l=r.length;i<l;i++){var s,u,c=function(t,e){for(var n=0,a=t.length;n<a;n++)if(t[n].name===e)return t[n];return null}(o,u=(s=-1!==r[i].indexOf("()"))?r[i].replace("()",""):r[i]);c||o.push(c={name:u,val:{},methodExt:[],propExt:[],type:"object"}),i===l-1?(c.val=e,c.type="function"==typeof e?"function":P.isPlainObject(e)?"object":"other"):o=s?c.methodExt:c.propExt}},B.registerPlural=t=function(t,e,n){B.register(t,n),B.register(e,function(){var t=n.apply(this,arguments);return t===this?this:t instanceof B?t.length?Array.isArray(t[0])?new B(t.context,t[0]):t[0]:H:t})},e("tables()",function(t){return t!==H&&null!==t?new B(ye(t,this.context)):this}),e("table()",function(t){var t=this.tables(t),e=t.context;return e.length?new B(e[0]):t}),t("tables().nodes()","table().node()",function(){return this.iterator("table",function(t){return t.nTable},1)}),t("tables().body()","table().body()",function(){return this.iterator("table",function(t){return t.nTBody},1)}),t("tables().header()","table().header()",function(){return this.iterator("table",function(t){return t.nTHead},1)}),t("tables().footer()","table().footer()",function(){return this.iterator("table",function(t){return t.nTFoot},1)}),t("tables().containers()","table().container()",function(){return this.iterator("table",function(t){return t.nTableWrapper},1)}),e("draw()",function(e){return this.iterator("table",function(t){"page"===e?y(t):u(t,!1===(e="string"==typeof e?"full-hold"!==e:e))})}),e("page()",function(e){return e===H?this.page.info().page:this.iterator("table",function(t){Yt(t,e)})}),e("page.info()",function(t){var e,n,a,r,o;return 0===this.context.length?H:(n=(e=this.context[0])._iDisplayStart,a=e.oFeatures.bPaginate?e._iDisplayLength:-1,r=e.fnRecordsDisplay(),{page:(o=-1===a)?0:Math.floor(n/a),pages:o?1:Math.ceil(r/a),start:n,end:e.fnDisplayEnd(),length:a,recordsTotal:e.fnRecordsTotal(),recordsDisplay:r,serverSide:"ssp"===E(e)})}),e("page.len()",function(e){return e===H?0!==this.context.length?this.context[0]._iDisplayLength:H:this.iterator("table",function(t){$t(t,e)})}),e("ajax.json()",function(){var t=this.context;if(0<t.length)return t[0].json}),e("ajax.params()",function(){var t=this.context;if(0<t.length)return t[0].oAjaxData}),e("ajax.reload()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),e("ajax.url()",function(e){var t=this.context;return e===H?0===t.length?H:(t=t[0]).ajax?P.isPlainObject(t.ajax)?t.ajax.url:t.ajax:t.sAjaxSource:this.iterator("table",function(t){P.isPlainObject(t.ajax)?t.ajax.url=e:t.ajax=e})}),e("ajax.url().load()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),function(t,e){var n,a=[],r=t.aiDisplay,o=t.aiDisplayMaster,i=e.search,l=e.order,e=e.page;if("ssp"==E(t))return"removed"===i?[]:f(0,o.length);if("current"==e)for(u=t._iDisplayStart,c=t.fnDisplayEnd();u<c;u++)a.push(r[u]);else if("current"==l||"applied"==l){if("none"==i)a=o.slice();else if("applied"==i)a=r.slice();else if("removed"==i){for(var s={},u=0,c=r.length;u<c;u++)s[r[u]]=null;a=P.map(o,function(t){return s.hasOwnProperty(t)?null:t})}}else if("index"==l||"original"==l)for(u=0,c=t.aoData.length;u<c;u++)("none"==i||-1===(n=P.inArray(u,r))&&"removed"==i||0<=n&&"applied"==i)&&a.push(u);return a}),Le=(e("rows()",function(e,n){e===H?e="":P.isPlainObject(e)&&(n=e,e=""),n=we(n);var t=this.iterator("table",function(t){return _e("row",e,function(n){var t=d(n),a=r.aoData;if(null!==t&&!o)return[t];if(i=i||Fe(r,o),null!==t&&-1!==P.inArray(t,i))return[t];if(null===n||n===H||""===n)return i;if("function"==typeof n)return P.map(i,function(t){var e=a[t];return n(t,e._aData,e.nTr)?t:null});if(n.nodeName)return t=n._DT_RowIndex,e=n._DT_CellIndex,t!==H?a[t]&&a[t].nTr===n?[t]:[]:e?a[e.row]&&a[e.row].nTr===n.parentNode?[e.row]:[]:(t=P(n).closest("*[data-dt-row]")).length?[t.data("dt-row")]:[];if("string"==typeof n&&"#"===n.charAt(0)){var e=r.aIds[n.replace(/^#/,"")];if(e!==H)return[e.idx]}t=_(m(r.aoData,i,"nTr"));return P(t).filter(n).map(function(){return this._DT_RowIndex}).toArray()},r=t,o=n);var r,o,i},1);return t.selector.rows=e,t.selector.opts=n,t}),e("rows().nodes()",function(){return this.iterator("row",function(t,e){return t.aoData[e].nTr||H},1)}),e("rows().data()",function(){return this.iterator(!0,"rows",function(t,e){return m(t.aoData,e,"_aData")},1)}),t("rows().cache()","row().cache()",function(n){return this.iterator("row",function(t,e){t=t.aoData[e];return"search"===n?t._aFilterData:t._aSortData},1)}),t("rows().invalidate()","row().invalidate()",function(n){return this.iterator("row",function(t,e){bt(t,e,n)})}),t("rows().indexes()","row().index()",function(){return this.iterator("row",function(t,e){return e},1)}),t("rows().ids()","row().id()",function(t){for(var e=[],n=this.context,a=0,r=n.length;a<r;a++)for(var o=0,i=this[a].length;o<i;o++){var l=n[a].rowIdFn(n[a].aoData[this[a][o]]._aData);e.push((!0===t?"#":"")+l)}return new B(n,e)}),t("rows().remove()","row().remove()",function(){var f=this;return this.iterator("row",function(t,e,n){var a,r,o,i,l,s,u=t.aoData,c=u[e];for(u.splice(e,1),a=0,r=u.length;a<r;a++)if(s=(l=u[a]).anCells,null!==l.nTr&&(l.nTr._DT_RowIndex=a),null!==s)for(o=0,i=s.length;o<i;o++)s[o]._DT_CellIndex.row=a;gt(t.aiDisplayMaster,e),gt(t.aiDisplay,e),gt(f[n],e,!1),0<t._iRecordsDisplay&&t._iRecordsDisplay--,Se(t);n=t.rowIdFn(c._aData);n!==H&&delete t.aIds[n]}),this.iterator("table",function(t){for(var e=0,n=t.aoData.length;e<n;e++)t.aoData[e].idx=e}),this}),e("rows.add()",function(o){var t=this.iterator("table",function(t){for(var e,n=[],a=0,r=o.length;a<r;a++)(e=o[a]).nodeName&&"TR"===e.nodeName.toUpperCase()?n.push(ut(t,e)[0]):n.push(x(t,e));return n},1),e=this.rows(-1);return e.pop(),P.merge(e,t),e}),e("row()",function(t,e){return Ce(this.rows(t,e))}),e("row().data()",function(t){var e,n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._aData:H:((e=n[0].aoData[this[0]])._aData=t,Array.isArray(t)&&e.nTr&&e.nTr.id&&b(n[0].rowId)(t,e.nTr.id),bt(n[0],this[0],"data"),this)}),e("row().node()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]].nTr||null}),e("row.add()",function(e){e instanceof P&&e.length&&(e=e[0]);var t=this.iterator("table",function(t){return e.nodeName&&"TR"===e.nodeName.toUpperCase()?ut(t,e)[0]:x(t,e)});return this.row(t[0])}),P(v).on("plugin-init.dt",function(t,e){var n=new B(e),a="on-plugin-init",r="stateSaveParams."+a,o="destroy. "+a,a=(n.on(r,function(t,e,n){for(var a=e.rowIdFn,r=e.aoData,o=[],i=0;i<r.length;i++)r[i]._detailsShow&&o.push("#"+a(r[i]._aData));n.childRows=o}),n.on(o,function(){n.off(r+" "+o)}),n.state.loaded());a&&a.childRows&&n.rows(P.map(a.childRows,function(t){return t.replace(/:/g,"\\:")})).every(function(){R(e,null,"requestChild",[this])})}),w.util.throttle(function(t){de(t[0])},500)),Re=function(t,e){var n=t.context;n.length&&(e=n[0].aoData[e!==H?e:t[0]])&&e._details&&(e._details.remove(),e._detailsShow=H,e._details=H,P(e.nTr).removeClass("dt-hasChild"),Le(n))},Pe="row().child",je=Pe+"()",He=(e(je,function(t,e){var n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._details:H:(!0===t?this.child.show():!1===t?Re(this):n.length&&this.length&&Te(n[0],n[0].aoData[this[0]],t,e),this)}),e([Pe+".show()",je+".show()"],function(t){return xe(this,!0),this}),e([Pe+".hide()",je+".hide()"],function(){return xe(this,!1),this}),e([Pe+".remove()",je+".remove()"],function(){return Re(this),this}),e(Pe+".isShown()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]]._detailsShow||!1}),/^([^:]+):(name|visIdx|visible)$/),Ne=(e("columns()",function(n,a){n===H?n="":P.isPlainObject(n)&&(a=n,n=""),a=we(a);var t=this.iterator("table",function(t){return e=n,l=a,s=(i=t).aoColumns,u=N(s,"sName"),c=N(s,"nTh"),_e("column",e,function(n){var a,t=d(n);if(""===n)return f(s.length);if(null!==t)return[0<=t?t:s.length+t];if("function"==typeof n)return a=Fe(i,l),P.map(s,function(t,e){return n(e,Ae(i,e,0,0,a),c[e])?e:null});var r="string"==typeof n?n.match(He):"";if(r)switch(r[2]){case"visIdx":case"visible":var e,o=parseInt(r[1],10);return o<0?[(e=P.map(s,function(t,e){return t.bVisible?e:null}))[e.length+o]]:[rt(i,o)];case"name":return P.map(u,function(t,e){return t===r[1]?e:null});default:return[]}return n.nodeName&&n._DT_CellIndex?[n._DT_CellIndex.column]:(t=P(c).filter(n).map(function(){return P.inArray(this,c)}).toArray()).length||!n.nodeName?t:(t=P(n).closest("*[data-dt-column]")).length?[t.data("dt-column")]:[]},i,l);var i,e,l,s,u,c},1);return t.selector.cols=n,t.selector.opts=a,t}),t("columns().header()","column().header()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTh},1)}),t("columns().footer()","column().footer()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTf},1)}),t("columns().data()","column().data()",function(){return this.iterator("column-rows",Ae,1)}),t("columns().dataSrc()","column().dataSrc()",function(){return this.iterator("column",function(t,e){return t.aoColumns[e].mData},1)}),t("columns().cache()","column().cache()",function(o){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"search"===o?"_aFilterData":"_aSortData",e)},1)}),t("columns().nodes()","column().nodes()",function(){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"anCells",e)},1)}),t("columns().visible()","column().visible()",function(f,n){var e=this,t=this.iterator("column",function(t,e){if(f===H)return t.aoColumns[e].bVisible;var n,a,r=e,e=f,o=t.aoColumns,i=o[r],l=t.aoData;if(e===H)i.bVisible;else if(i.bVisible!==e){if(e)for(var s=P.inArray(!0,N(o,"bVisible"),r+1),u=0,c=l.length;u<c;u++)a=l[u].nTr,n=l[u].anCells,a&&a.insertBefore(n[r],n[s]||null);else P(N(t.aoData,"anCells",r)).detach();i.bVisible=e}});return f!==H&&this.iterator("table",function(t){Dt(t,t.aoHeader),Dt(t,t.aoFooter),t.aiDisplay.length||P(t.nTBody).find("td[colspan]").attr("colspan",T(t)),de(t),e.iterator("column",function(t,e){R(t,null,"column-visibility",[t,e,f,n])}),n!==H&&!n||e.columns.adjust()}),t}),t("columns().indexes()","column().index()",function(n){return this.iterator("column",function(t,e){return"visible"===n?ot(t,e):e},1)}),e("columns.adjust()",function(){return this.iterator("table",function(t){O(t)},1)}),e("column.index()",function(t,e){var n;if(0!==this.context.length)return n=this.context[0],"fromVisible"===t||"toData"===t?rt(n,e):"fromData"===t||"toVisible"===t?ot(n,e):void 0}),e("column()",function(t,e){return Ce(this.columns(t,e))}),e("cells()",function(g,t,b){var a,r,o,i,l,s,e;return P.isPlainObject(g)&&(g.row===H?(b=g,g=null):(b=t,t=null)),P.isPlainObject(t)&&(b=t,t=null),null===t||t===H?this.iterator("table",function(t){return a=t,t=g,e=we(b),f=a.aoData,d=Fe(a,e),n=_(m(f,d,"anCells")),h=P(Y([],n)),p=a.aoColumns.length,_e("cell",t,function(t){var e,n="function"==typeof t;if(null===t||t===H||n){for(o=[],i=0,l=d.length;i<l;i++)for(r=d[i],s=0;s<p;s++)u={row:r,column:s},(!n||(c=f[r],t(u,S(a,r,s),c.anCells?c.anCells[s]:null)))&&o.push(u);return o}return P.isPlainObject(t)?t.column!==H&&t.row!==H&&-1!==P.inArray(t.row,d)?[t]:[]:(e=h.filter(t).map(function(t,e){return{row:e._DT_CellIndex.row,column:e._DT_CellIndex.column}}).toArray()).length||!t.nodeName?e:(c=P(t).closest("*[data-dt-row]")).length?[{row:c.data("dt-row"),column:c.data("dt-column")}]:[]},a,e);var a,e,r,o,i,l,s,u,c,f,d,n,h,p}):(e=b?{page:b.page,order:b.order,search:b.search}:{},a=this.columns(t,e),r=this.rows(g,e),e=this.iterator("table",function(t,e){var n=[];for(o=0,i=r[e].length;o<i;o++)for(l=0,s=a[e].length;l<s;l++)n.push({row:r[e][o],column:a[e][l]});return n},1),e=b&&b.selected?this.cells(e,b):e,P.extend(e.selector,{cols:t,rows:g,opts:b}),e)}),t("cells().nodes()","cell().node()",function(){return this.iterator("cell",function(t,e,n){t=t.aoData[e];return t&&t.anCells?t.anCells[n]:H},1)}),e("cells().data()",function(){return this.iterator("cell",function(t,e,n){return S(t,e,n)},1)}),t("cells().cache()","cell().cache()",function(a){return a="search"===a?"_aFilterData":"_aSortData",this.iterator("cell",function(t,e,n){return t.aoData[e][a][n]},1)}),t("cells().render()","cell().render()",function(a){return this.iterator("cell",function(t,e,n){return S(t,e,n,a)},1)}),t("cells().indexes()","cell().index()",function(){return this.iterator("cell",function(t,e,n){return{row:e,column:n,columnVisible:ot(t,n)}},1)}),t("cells().invalidate()","cell().invalidate()",function(a){return this.iterator("cell",function(t,e,n){bt(t,e,a,n)})}),e("cell()",function(t,e,n){return Ce(this.cells(t,e,n))}),e("cell().data()",function(t){var e=this.context,n=this[0];return t===H?e.length&&n.length?S(e[0],n[0].row,n[0].column):H:(ct(e[0],n[0].row,n[0].column,t),bt(e[0],n[0].row,"data",n[0].column),this)}),e("order()",function(e,t){var n=this.context;return e===H?0!==n.length?n[0].aaSorting:H:("number"==typeof e?e=[[e,t]]:e.length&&!Array.isArray(e[0])&&(e=Array.prototype.slice.call(arguments)),this.iterator("table",function(t){t.aaSorting=e.slice()}))}),e("order.listener()",function(e,n,a){return this.iterator("table",function(t){ue(t,e,n,a)})}),e("order.fixed()",function(e){var t;return e?this.iterator("table",function(t){t.aaSortingFixed=P.extend(!0,{},e)}):(t=(t=this.context).length?t[0].aaSortingFixed:H,Array.isArray(t)?{pre:t}:t)}),e(["columns().order()","column().order()"],function(a){var r=this;return this.iterator("table",function(t,e){var n=[];P.each(r[e],function(t,e){n.push([e,a])}),t.aaSorting=n})}),e("search()",function(e,n,a,r){var t=this.context;return e===H?0!==t.length?t[0].oPreviousSearch.sSearch:H:this.iterator("table",function(t){t.oFeatures.bFilter&&Rt(t,P.extend({},t.oPreviousSearch,{sSearch:e+"",bRegex:null!==n&&n,bSmart:null===a||a,bCaseInsensitive:null===r||r}),1)})}),t("columns().search()","column().search()",function(a,r,o,i){return this.iterator("column",function(t,e){var n=t.aoPreSearchCols;if(a===H)return n[e].sSearch;t.oFeatures.bFilter&&(P.extend(n[e],{sSearch:a+"",bRegex:null!==r&&r,bSmart:null===o||o,bCaseInsensitive:null===i||i}),Rt(t,t.oPreviousSearch,1))})}),e("state()",function(){return this.context.length?this.context[0].oSavedState:null}),e("state.clear()",function(){return this.iterator("table",function(t){t.fnStateSaveCallback.call(t.oInstance,t,{})})}),e("state.loaded()",function(){return this.context.length?this.context[0].oLoadedState:null}),e("state.save()",function(){return this.iterator("table",function(t){de(t)})}),w.use=function(t,e){"lib"===e||t.fn?P=t:"win"==e||t.document?v=(j=t).document:"datetime"!==e&&"DateTime"!==t.type||(w.DateTime=t)},w.factory=function(t,e){var n=!1;return t&&t.document&&(v=(j=t).document),e&&e.fn&&e.fn.jquery&&(P=e,n=!0),n},w.versionCheck=w.fnVersionCheck=function(t){for(var e,n,a=w.version.split("."),r=t.split("."),o=0,i=r.length;o<i;o++)if((e=parseInt(a[o],10)||0)!==(n=parseInt(r[o],10)||0))return n<e;return!0},w.isDataTable=w.fnIsDataTable=function(t){var r=P(t).get(0),o=!1;return t instanceof w.Api||(P.each(w.settings,function(t,e){var n=e.nScrollHead?P("table",e.nScrollHead)[0]:null,a=e.nScrollFoot?P("table",e.nScrollFoot)[0]:null;e.nTable!==r&&n!==r&&a!==r||(o=!0)}),o)},w.tables=w.fnTables=function(e){var t=!1,n=(P.isPlainObject(e)&&(t=e.api,e=e.visible),P.map(w.settings,function(t){if(!e||P(t.nTable).is(":visible"))return t.nTable}));return t?new B(n):n},w.camelToHungarian=C,e("$()",function(t,e){e=this.rows(e).nodes(),e=P(e);return P([].concat(e.filter(t).toArray(),e.find(t).toArray()))}),P.each(["on","one","off"],function(t,n){e(n+"()",function(){var t=Array.prototype.slice.call(arguments),e=(t[0]=P.map(t[0].split(/\s/),function(t){return t.match(/\.dt\b/)?t:t+".dt"}).join(" "),P(this.tables().nodes()));return e[n].apply(e,t),this})}),e("clear()",function(){return this.iterator("table",function(t){pt(t)})}),e("settings()",function(){return new B(this.context,this.context)}),e("init()",function(){var t=this.context;return t.length?t[0].oInit:null}),e("data()",function(){return this.iterator("table",function(t){return N(t.aoData,"_aData")}).flatten()}),e("destroy()",function(c){return c=c||!1,this.iterator("table",function(e){var n,t=e.oClasses,a=e.nTable,r=e.nTBody,o=e.nTHead,i=e.nTFoot,l=P(a),r=P(r),s=P(e.nTableWrapper),u=P.map(e.aoData,function(t){return t.nTr}),i=(e.bDestroying=!0,R(e,"aoDestroyCallback","destroy",[e]),c||new B(e).columns().visible(!0),s.off(".DT").find(":not(tbody *)").off(".DT"),P(j).off(".DT-"+e.sInstance),a!=o.parentNode&&(l.children("thead").detach(),l.append(o)),i&&a!=i.parentNode&&(l.children("tfoot").detach(),l.append(i)),e.aaSorting=[],e.aaSortingFixed=[],ce(e),P(u).removeClass(e.asStripeClasses.join(" ")),P("th, td",o).removeClass(t.sSortable+" "+t.sSortableAsc+" "+t.sSortableDesc+" "+t.sSortableNone),r.children().detach(),r.append(u),e.nTableWrapper.parentNode),o=c?"remove":"detach",u=(l[o](),s[o](),!c&&i&&(i.insertBefore(a,e.nTableReinsertBefore),l.css("width",e.sDestroyWidth).removeClass(t.sTable),n=e.asDestroyStripes.length)&&r.children().each(function(t){P(this).addClass(e.asDestroyStripes[t%n])}),P.inArray(e,w.settings));-1!==u&&w.settings.splice(u,1)})}),P.each(["column","row","cell"],function(t,s){e(s+"s().every()",function(o){var i=this.selector.opts,l=this;return this.iterator(s,function(t,e,n,a,r){o.call(l[s](e,"cell"===s?n:i,"cell"===s?i:H),e,n,a,r)})})}),e("i18n()",function(t,e,n){var a=this.context[0],t=A(t)(a.oLanguage);return t===H&&(t=e),"string"==typeof(t=n!==H&&P.isPlainObject(t)?t[n]!==H?t[n]:t._:t)?t.replace("%d",n):t}),w.version="1.13.8",w.settings=[],w.models={},w.models.oSearch={bCaseInsensitive:!0,sSearch:"",bRegex:!1,bSmart:!0,return:!1},w.models.oRow={nTr:null,anCells:null,_aData:[],_aSortData:null,_aFilterData:null,_sFilterRow:null,_sRowStripe:"",src:null,idx:-1},w.models.oColumn={idx:null,aDataSort:null,asSorting:null,bSearchable:null,bSortable:null,bVisible:null,_sManualType:null,_bAttrSrc:!1,fnCreatedCell:null,fnGetData:null,fnSetData:null,mData:null,mRender:null,nTh:null,nTf:null,sClass:null,sContentPadding:null,sDefaultContent:null,sName:null,sSortDataType:"std",sSortingClass:null,sSortingClassJUI:null,sTitle:null,sType:null,sWidth:null,sWidthOrig:null},w.defaults={aaData:null,aaSorting:[[0,"asc"]],aaSortingFixed:[],ajax:null,aLengthMenu:[10,25,50,100],aoColumns:null,aoColumnDefs:null,aoSearchCols:[],asStripeClasses:null,bAutoWidth:!0,bDeferRender:!1,bDestroy:!1,bFilter:!0,bInfo:!0,bLengthChange:!0,bPaginate:!0,bProcessing:!1,bRetrieve:!1,bScrollCollapse:!1,bServerSide:!1,bSort:!0,bSortMulti:!0,bSortCellsTop:!1,bSortClasses:!0,bStateSave:!1,fnCreatedRow:null,fnDrawCallback:null,fnFooterCallback:null,fnFormatNumber:function(t){return t.toString().replace(/\B(?=(\d{3})+(?!\d))/g,this.oLanguage.sThousands)},fnHeaderCallback:null,fnInfoCallback:null,fnInitComplete:null,fnPreDrawCallback:null,fnRowCallback:null,fnServerData:null,fnServerParams:null,fnStateLoadCallback:function(t){try{return JSON.parse((-1===t.iStateDuration?sessionStorage:localStorage).getItem("DataTables_"+t.sInstance+"_"+location.pathname))}catch(t){return{}}},fnStateLoadParams:null,fnStateLoaded:null,fnStateSaveCallback:function(t,e){try{(-1===t.iStateDuration?sessionStorage:localStorage).setItem("DataTables_"+t.sInstance+"_"+location.pathname,JSON.stringify(e))}catch(t){}},fnStateSaveParams:null,iStateDuration:7200,iDeferLoading:null,iDisplayLength:10,iDisplayStart:0,iTabIndex:0,oClasses:{},oLanguage:{oAria:{sSortAscending:": activate to sort column ascending",sSortDescending:": activate to sort column descending"},oPaginate:{sFirst:"First",sLast:"Last",sNext:"Next",sPrevious:"Previous"},sEmptyTable:"No data available in table",sInfo:"Showing _START_ to _END_ of _TOTAL_ entries",sInfoEmpty:"Showing 0 to 0 of 0 entries",sInfoFiltered:"(filtered from _MAX_ total entries)",sInfoPostFix:"",sDecimal:"",sThousands:",",sLengthMenu:"Show _MENU_ entries",sLoadingRecords:"Loading...",sProcessing:"",sSearch:"Search:",sSearchPlaceholder:"",sUrl:"",sZeroRecords:"No matching records found"},oSearch:P.extend({},w.models.oSearch),sAjaxDataProp:"data",sAjaxSource:null,sDom:"lfrtip",searchDelay:null,sPaginationType:"simple_numbers",sScrollX:"",sScrollXInner:"",sScrollY:"",sServerMethod:"GET",renderer:null,rowId:"DT_RowId"},i(w.defaults),w.defaults.column={aDataSort:null,iDataSort:-1,asSorting:["asc","desc"],bSearchable:!0,bSortable:!0,bVisible:!0,fnCreatedCell:null,mData:null,mRender:null,sCellType:"td",sClass:"",sContentPadding:"",sDefaultContent:null,sName:"",sSortDataType:"std",sTitle:null,sType:null,sWidth:null},i(w.defaults.column),w.models.oSettings={oFeatures:{bAutoWidth:null,bDeferRender:null,bFilter:null,bInfo:null,bLengthChange:null,bPaginate:null,bProcessing:null,bServerSide:null,bSort:null,bSortMulti:null,bSortClasses:null,bStateSave:null},oScroll:{bCollapse:null,iBarWidth:0,sX:null,sXInner:null,sY:null},oLanguage:{fnInfoCallback:null},oBrowser:{bScrollOversize:!1,bScrollbarLeft:!1,bBounding:!1,barWidth:0},ajax:null,aanFeatures:[],aoData:[],aiDisplay:[],aiDisplayMaster:[],aIds:{},aoColumns:[],aoHeader:[],aoFooter:[],oPreviousSearch:{},aoPreSearchCols:[],aaSorting:null,aaSortingFixed:[],asStripeClasses:null,asDestroyStripes:[],sDestroyWidth:0,aoRowCallback:[],aoHeaderCallback:[],aoFooterCallback:[],aoDrawCallback:[],aoRowCreatedCallback:[],aoPreDrawCallback:[],aoInitComplete:[],aoStateSaveParams:[],aoStateLoadParams:[],aoStateLoaded:[],sTableId:"",nTable:null,nTHead:null,nTFoot:null,nTBody:null,nTableWrapper:null,bDeferLoading:!1,bInitialised:!1,aoOpenRows:[],sDom:null,searchDelay:null,sPaginationType:"two_button",iStateDuration:0,aoStateSave:[],aoStateLoad:[],oSavedState:null,oLoadedState:null,sAjaxSource:null,sAjaxDataProp:null,jqXHR:null,json:H,oAjaxData:H,fnServerData:null,aoServerParams:[],sServerMethod:null,fnFormatNumber:null,aLengthMenu:null,iDraw:0,bDrawing:!1,iDrawError:-1,_iDisplayLength:10,_iDisplayStart:0,_iRecordsTotal:0,_iRecordsDisplay:0,oClasses:{},bFiltered:!1,bSorted:!1,bSortCellsTop:null,oInit:null,aoDestroyCallback:[],fnRecordsTotal:function(){return"ssp"==E(this)?+this._iRecordsTotal:this.aiDisplayMaster.length},fnRecordsDisplay:function(){return"ssp"==E(this)?+this._iRecordsDisplay:this.aiDisplay.length},fnDisplayEnd:function(){var t=this._iDisplayLength,e=this._iDisplayStart,n=e+t,a=this.aiDisplay.length,r=this.oFeatures,o=r.bPaginate;return r.bServerSide?!1===o||-1===t?e+a:Math.min(e+t,this._iRecordsDisplay):!o||a<n||-1===t?a:n},oInstance:null,sInstance:null,iTabIndex:0,nScrollHead:null,nScrollFoot:null,aLastSort:[],oPlugins:{},rowIdFn:null,rowId:null},w.ext=p={buttons:{},classes:{},builder:"-source-",errMode:"alert",feature:[],search:[],selector:{cell:[],column:[],row:[]},internal:{},legacy:{ajax:null},pager:{},renderer:{pageButton:{},header:{}},order:{},type:{detect:[],search:{},order:{}},_unique:0,fnVersionCheck:w.fnVersionCheck,iApiIndex:0,oJUIClasses:{},sVersion:w.version},P.extend(p,{afnFiltering:p.search,aTypes:p.type.detect,ofnSearch:p.type.search,oSort:p.type.order,afnSortData:p.order,aoFeatures:p.feature,oApi:p.internal,oStdClasses:p.classes,oPagination:p.pager}),P.extend(w.ext.classes,{sTable:"dataTable",sNoFooter:"no-footer",sPageButton:"paginate_button",sPageButtonActive:"current",sPageButtonDisabled:"disabled",sStripeOdd:"odd",sStripeEven:"even",sRowEmpty:"dataTables_empty",sWrapper:"dataTables_wrapper",sFilter:"dataTables_filter",sInfo:"dataTables_info",sPaging:"dataTables_paginate paging_",sLength:"dataTables_length",sProcessing:"dataTables_processing",sSortAsc:"sorting_asc",sSortDesc:"sorting_desc",sSortable:"sorting",sSortableAsc:"sorting_desc_disabled",sSortableDesc:"sorting_asc_disabled",sSortableNone:"sorting_disabled",sSortColumn:"sorting_",sFilterInput:"",sLengthSelect:"",sScrollWrapper:"dataTables_scroll",sScrollHead:"dataTables_scrollHead",sScrollHeadInner:"dataTables_scrollHeadInner",sScrollBody:"dataTables_scrollBody",sScrollFoot:"dataTables_scrollFoot",sScrollFootInner:"dataTables_scrollFootInner",sHeaderTH:"",sFooterTH:"",sSortJUIAsc:"",sSortJUIDesc:"",sSortJUI:"",sSortJUIAscAllowed:"",sSortJUIDescAllowed:"",sSortJUIWrapper:"",sSortIcon:"",sJUIHeader:"",sJUIFooter:""}),w.ext.pager);function Oe(t,e){var n=[],a=Ne.numbers_length,r=Math.floor(a/2);return e<=a?n=f(0,e):t<=r?((n=f(0,a-2)).push("ellipsis"),n.push(e-1)):((e-1-r<=t?n=f(e-(a-2),e):((n=f(t-r+2,t+r-1)).push("ellipsis"),n.push(e-1),n)).splice(0,0,"ellipsis"),n.splice(0,0,0)),n.DT_el="span",n}P.extend(Ne,{simple:function(t,e){return["previous","next"]},full:function(t,e){return["first","previous","next","last"]},numbers:function(t,e){return[Oe(t,e)]},simple_numbers:function(t,e){return["previous",Oe(t,e),"next"]},full_numbers:function(t,e){return["first","previous",Oe(t,e),"next","last"]},first_last_numbers:function(t,e){return["first",Oe(t,e),"last"]},_numbers:Oe,numbers_length:7}),P.extend(!0,w.ext.renderer,{pageButton:{_:function(u,t,c,e,f,d){function h(t,e){for(var n,a=b.sPageButtonDisabled,r=function(t){Yt(u,t.data.action,!0)},o=0,i=e.length;o<i;o++)if(n=e[o],Array.isArray(n)){var l=P("<"+(n.DT_el||"div")+"/>").appendTo(t);h(l,n)}else{var s=!1;switch(p=null,g=n){case"ellipsis":t.append('<span class="ellipsis">…</span>');break;case"first":p=m.sFirst,0===f&&(s=!0);break;case"previous":p=m.sPrevious,0===f&&(s=!0);break;case"next":p=m.sNext,0!==d&&f!==d-1||(s=!0);break;case"last":p=m.sLast,0!==d&&f!==d-1||(s=!0);break;default:p=u.fnFormatNumber(n+1),g=f===n?b.sPageButtonActive:""}null!==p&&(l=u.oInit.pagingTag||"a",s&&(g+=" "+a),me(P("<"+l+">",{class:b.sPageButton+" "+g,"aria-controls":u.sTableId,"aria-disabled":s?"true":null,"aria-label":S[n],role:"link","aria-current":g===b.sPageButtonActive?"page":null,"data-dt-idx":n,tabindex:s?-1:u.iTabIndex,id:0===c&&"string"==typeof n?u.sTableId+"_"+n:null}).html(p).appendTo(t),{action:n},r))}}var p,g,n,b=u.oClasses,m=u.oLanguage.oPaginate,S=u.oLanguage.oAria.paginate||{};try{n=P(t).find(v.activeElement).data("dt-idx")}catch(t){}h(P(t).empty(),e),n!==H&&P(t).find("[data-dt-idx="+n+"]").trigger("focus")}}}),P.extend(w.ext.type.detect,[function(t,e){e=e.oLanguage.sDecimal;return l(t,e)?"num"+e:null},function(t,e){var n;return(!t||t instanceof Date||X.test(t))&&(null!==(n=Date.parse(t))&&!isNaN(n)||h(t))?"date":null},function(t,e){e=e.oLanguage.sDecimal;return l(t,e,!0)?"num-fmt"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e)?"html-num"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e,!0)?"html-num-fmt"+e:null},function(t,e){return h(t)||"string"==typeof t&&-1!==t.indexOf("<")?"html":null}]),P.extend(w.ext.type.search,{html:function(t){return h(t)?t:"string"==typeof t?t.replace(U," ").replace(V,""):""},string:function(t){return!h(t)&&"string"==typeof t?t.replace(U," "):t}});function ke(t,e,n,a){var r;return 0===t||t&&"-"!==t?"number"==(r=typeof t)||"bigint"==r?t:+(t=(t=e?$(t,e):t).replace&&(n&&(t=t.replace(n,"")),a)?t.replace(a,""):t):-1/0}function Me(n){P.each({num:function(t){return ke(t,n)},"num-fmt":function(t){return ke(t,n,q)},"html-num":function(t){return ke(t,n,V)},"html-num-fmt":function(t){return ke(t,n,V,q)}},function(t,e){p.type.order[t+n+"-pre"]=e,t.match(/^html\-/)&&(p.type.search[t+n]=p.type.search.html)})}P.extend(p.type.order,{"date-pre":function(t){t=Date.parse(t);return isNaN(t)?-1/0:t},"html-pre":function(t){return h(t)?"":t.replace?t.replace(/<.*?>/g,"").toLowerCase():t+""},"string-pre":function(t){return h(t)?"":"string"==typeof t?t.toLowerCase():t.toString?t.toString():""},"string-asc":function(t,e){return t<e?-1:e<t?1:0},"string-desc":function(t,e){return t<e?1:e<t?-1:0}}),Me(""),P.extend(!0,w.ext.renderer,{header:{_:function(r,o,i,l){P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass))})},jqueryui:function(r,o,i,l){P("<div/>").addClass(l.sSortJUIWrapper).append(o.contents()).append(P("<span/>").addClass(l.sSortIcon+" "+i.sSortingClassJUI)).appendTo(o),P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass),o.find("span."+l.sSortIcon).removeClass(l.sSortJUIAsc+" "+l.sSortJUIDesc+" "+l.sSortJUI+" "+l.sSortJUIAscAllowed+" "+l.sSortJUIDescAllowed).addClass("asc"==a[e]?l.sSortJUIAsc:"desc"==a[e]?l.sSortJUIDesc:i.sSortingClassJUI))})}}});function We(t){return"string"==typeof(t=Array.isArray(t)?t.join(","):t)?t.replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">").replace(/"/g,"""):t}function Ee(t,e,n,a,r){return j.moment?t[e](r):j.luxon?t[n](r):a?t[a](r):t}var Be=!1;function Ue(t,e,n){var a;if(j.moment){if(!(a=j.moment.utc(t,e,n,!0)).isValid())return null}else if(j.luxon){if(!(a=e&&"string"==typeof t?j.luxon.DateTime.fromFormat(t,e):j.luxon.DateTime.fromISO(t)).isValid)return null;a.setLocale(n)}else e?(Be||alert("DataTables warning: Formatted date without Moment.js or Luxon - https://datatables.net/tn/17"),Be=!0):a=new Date(t);return a}function Ve(s){return function(a,r,o,i){0===arguments.length?(o="en",a=r=null):1===arguments.length?(o="en",r=a,a=null):2===arguments.length&&(o=r,r=a,a=null);var l="datetime-"+r;return w.ext.type.order[l]||(w.ext.type.detect.unshift(function(t){return t===l&&l}),w.ext.type.order[l+"-asc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:t<e?-1:1},w.ext.type.order[l+"-desc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:e<t?-1:1}),function(t,e){var n;return null!==t&&t!==H||(t="--now"===i?(n=new Date,new Date(Date.UTC(n.getFullYear(),n.getMonth(),n.getDate(),n.getHours(),n.getMinutes(),n.getSeconds()))):""),"type"===e?l:""===t?"sort"!==e?"":Ue("0000-01-01 00:00:00",null,o):!(null===r||a!==r||"sort"===e||"type"===e||t instanceof Date)||null===(n=Ue(t,a,o))?t:"sort"===e?n:(t=null===r?Ee(n,"toDate","toJSDate","")[s]():Ee(n,"format","toFormat","toISOString",r),"display"===e?We(t):t)}}}var Xe=",",Je=".";if(j.Intl!==H)try{for(var qe=(new Intl.NumberFormat).formatToParts(100000.1),n=0;n<qe.length;n++)"group"===qe[n].type?Xe=qe[n].value:"decimal"===qe[n].type&&(Je=qe[n].value)}catch(t){}function $e(e){return function(){var t=[ge(this[w.ext.iApiIndex])].concat(Array.prototype.slice.call(arguments));return w.ext.internal[e].apply(this,t)}}return w.datetime=function(n,a){var r="datetime-detect-"+n;a=a||"en",w.ext.type.order[r]||(w.ext.type.detect.unshift(function(t){var e=Ue(t,n,a);return!(""!==t&&!e)&&r}),w.ext.type.order[r+"-pre"]=function(t){return Ue(t,n,a)||0})},w.render={date:Ve("toLocaleDateString"),datetime:Ve("toLocaleString"),time:Ve("toLocaleTimeString"),number:function(a,r,o,i,l){return null!==a&&a!==H||(a=Xe),null!==r&&r!==H||(r=Je),{display:function(t){if("number"!=typeof t&&"string"!=typeof t)return t;if(""===t||null===t)return t;var e=t<0?"-":"",n=parseFloat(t);if(isNaN(n))return We(t);n=n.toFixed(o),t=Math.abs(n);n=parseInt(t,10),t=o?r+(t-n).toFixed(o).substring(2):"";return(e=0===n&&0===parseFloat(t)?"":e)+(i||"")+n.toString().replace(/\B(?=(\d{3})+(?!\d))/g,a)+t+(l||"")}}},text:function(){return{display:We,filter:We}}},P.extend(w.ext.internal,{_fnExternApiFunc:$e,_fnBuildAjax:Tt,_fnAjaxUpdate:xt,_fnAjaxParameters:At,_fnAjaxUpdateDraw:It,_fnAjaxDataSrc:Ft,_fnAddColumn:nt,_fnColumnOptions:at,_fnAdjustColumnSizing:O,_fnVisibleToColumnIndex:rt,_fnColumnIndexToVisible:ot,_fnVisbleColumns:T,_fnGetColumns:it,_fnColumnTypes:lt,_fnApplyColumnDefs:st,_fnHungarianMap:i,_fnCamelToHungarian:C,_fnLanguageCompat:Z,_fnBrowserDetect:tt,_fnAddData:x,_fnAddTr:ut,_fnNodeToDataIndex:function(t,e){return e._DT_RowIndex!==H?e._DT_RowIndex:null},_fnNodeToColumnIndex:function(t,e,n){return P.inArray(n,t.aoData[e].anCells)},_fnGetCellData:S,_fnSetCellData:ct,_fnSplitObjNotation:dt,_fnGetObjectDataFn:A,_fnSetObjectDataFn:b,_fnGetDataMaster:ht,_fnClearTable:pt,_fnDeleteIndex:gt,_fnInvalidate:bt,_fnGetRowElements:mt,_fnCreateTr:St,_fnBuildHead:yt,_fnDrawHead:Dt,_fnDraw:y,_fnReDraw:u,_fnAddOptionsHtml:_t,_fnDetectHeader:wt,_fnGetUniqueThs:Ct,_fnFeatureHtmlFilter:Lt,_fnFilterComplete:Rt,_fnFilterCustom:Pt,_fnFilterColumn:jt,_fnFilter:Ht,_fnFilterCreateSearch:Nt,_fnEscapeRegex:Ot,_fnFilterData:Wt,_fnFeatureHtmlInfo:Ut,_fnUpdateInfo:Vt,_fnInfoMacros:Xt,_fnInitialise:Jt,_fnInitComplete:qt,_fnLengthChange:$t,_fnFeatureHtmlLength:Gt,_fnFeatureHtmlPaginate:zt,_fnPageChange:Yt,_fnFeatureHtmlProcessing:Zt,_fnProcessingDisplay:D,_fnFeatureHtmlTable:Kt,_fnScrollDraw:Qt,_fnApplyToChildren:k,_fnCalculateColumnWidths:ee,_fnThrottle:ne,_fnConvertToWidth:ae,_fnGetWidestNode:re,_fnGetMaxLenString:oe,_fnStringToCss:M,_fnSortFlatten:I,_fnSort:ie,_fnSortAria:le,_fnSortListener:se,_fnSortAttachListener:ue,_fnSortingClasses:ce,_fnSortData:fe,_fnSaveState:de,_fnLoadState:he,_fnImplementState:pe,_fnSettingsFromNode:ge,_fnLog:W,_fnMap:F,_fnBindAction:me,_fnCallbackReg:L,_fnCallbackFire:R,_fnLengthOverflow:Se,_fnRenderer:ve,_fnDataSource:E,_fnRowAttributes:vt,_fnExtend:be,_fnCalculateEnd:function(){}}),((P.fn.dataTable=w).$=P).fn.dataTableSettings=w.settings,P.fn.dataTableExt=w.ext,P.fn.DataTable=function(t){return P(this).dataTable(t).api()},P.each(w,function(t,e){P.fn.DataTable[t]=e}),w}); \ No newline at end of file | ||
diff --git a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js index f2c45c833e..d4ac31234c 100644 --- a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js +++ b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js | |||
@@ -657,7 +657,7 @@ $(document).ready(function() { | |||
657 | hljs.initHighlightingOnLoad(); | 657 | hljs.initHighlightingOnLoad(); |
658 | 658 | ||
659 | // Prevent invalid links from jumping page scroll | 659 | // Prevent invalid links from jumping page scroll |
660 | $('a[href=#]').click(function() { | 660 | $('a[href="#"]').click(function() { |
661 | return false; | 661 | return false; |
662 | }); | 662 | }); |
663 | 663 | ||
diff --git a/bitbake/lib/toaster/toastergui/static/js/projectpage.js b/bitbake/lib/toaster/toastergui/static/js/projectpage.js index 506471e091..a3c95810a7 100644 --- a/bitbake/lib/toaster/toastergui/static/js/projectpage.js +++ b/bitbake/lib/toaster/toastergui/static/js/projectpage.js | |||
@@ -61,7 +61,7 @@ function projectPageInit(ctx) { | |||
61 | distroChangeInput.val(urlParams.setDistro); | 61 | distroChangeInput.val(urlParams.setDistro); |
62 | distroChangeBtn.click(); | 62 | distroChangeBtn.click(); |
63 | } else { | 63 | } else { |
64 | updateDistroName(prjInfo.distro.name); | 64 | updateDistroName(prjInfo.distro?.name); |
65 | } | 65 | } |
66 | 66 | ||
67 | /* Now we're really ready show the page */ | 67 | /* Now we're really ready show the page */ |
diff --git a/bitbake/lib/toaster/toastergui/templates/base.html b/bitbake/lib/toaster/toastergui/templates/base.html index 9e19cc33ca..e90be69620 100644 --- a/bitbake/lib/toaster/toastergui/templates/base.html +++ b/bitbake/lib/toaster/toastergui/templates/base.html | |||
@@ -14,11 +14,11 @@ | |||
14 | 14 | ||
15 | <meta name="viewport" content="width=device-width, initial-scale=1.0" /> | 15 | <meta name="viewport" content="width=device-width, initial-scale=1.0" /> |
16 | <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" /> | 16 | <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" /> |
17 | <script src="{% static 'js/jquery-2.0.3.min.js' %}"> | 17 | <script src="{% static 'js/jquery-3.7.1.min.js' %}"> |
18 | </script> | 18 | </script> |
19 | <script src="{% static 'js/jquery.cookie.js' %}"> | 19 | <script src="{% static 'js/jquery.cookie.js' %}"> |
20 | </script> | 20 | </script> |
21 | <script src="{% static 'js/bootstrap.min.js' %}"> | 21 | <script src="{% static 'js/bootstrap-3.4.1.min.js' %}"> |
22 | </script> | 22 | </script> |
23 | <script src="{% static 'js/typeahead.jquery.js' %}"> | 23 | <script src="{% static 'js/typeahead.jquery.js' %}"> |
24 | </script> | 24 | </script> |
@@ -94,7 +94,7 @@ | |||
94 | </a> | 94 | </a> |
95 | <a class="brand" href="/">Toaster</a> | 95 | <a class="brand" href="/">Toaster</a> |
96 | {% if DEBUG %} | 96 | {% if DEBUG %} |
97 | <span class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i> | 97 | <span id="toaster-version-info-sign" class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i> |
98 | {% endif %} | 98 | {% endif %} |
99 | </div> | 99 | </div> |
100 | </div> | 100 | </div> |
@@ -123,7 +123,7 @@ | |||
123 | {% endif %} | 123 | {% endif %} |
124 | {% endif %} | 124 | {% endif %} |
125 | <li id="navbar-docs"> | 125 | <li id="navbar-docs"> |
126 | <a target="_blank" href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html"> | 126 | <a target="_blank" href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual"> |
127 | <i class="glyphicon glyphicon-book"></i> | 127 | <i class="glyphicon glyphicon-book"></i> |
128 | Documentation | 128 | Documentation |
129 | </a> | 129 | </a> |
@@ -132,7 +132,8 @@ | |||
132 | {% if project_enable %} | 132 | {% if project_enable %} |
133 | <a class="btn btn-default navbar-btn navbar-right" id="new-project-button" href="{% url 'newproject' %}">New project</a> | 133 | <a class="btn btn-default navbar-btn navbar-right" id="new-project-button" href="{% url 'newproject' %}">New project</a> |
134 | {% endif %} | 134 | {% endif %} |
135 | </div> | 135 | <a class="btn btn-default navbar-btn navbar-right" id="import_page" style="margin-right: 5px !important" id="import-cmdline-button" href="{% url 'cmdlines' %}">Import command line builds</a> |
136 | </div> | ||
136 | </div> | 137 | </div> |
137 | </nav> | 138 | </nav> |
138 | 139 | ||
diff --git a/bitbake/lib/toaster/toastergui/templates/base_specific.html b/bitbake/lib/toaster/toastergui/templates/base_specific.html index e377cadd73..425f7ed73d 100644 --- a/bitbake/lib/toaster/toastergui/templates/base_specific.html +++ b/bitbake/lib/toaster/toastergui/templates/base_specific.html | |||
@@ -14,11 +14,11 @@ | |||
14 | 14 | ||
15 | <meta name="viewport" content="width=device-width, initial-scale=1.0" /> | 15 | <meta name="viewport" content="width=device-width, initial-scale=1.0" /> |
16 | <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" /> | 16 | <meta http-equiv="Content-Type" content="text/html;charset=UTF-8" /> |
17 | <script src="{% static 'js/jquery-2.0.3.min.js' %}"> | 17 | <script src="{% static 'js/jquery-3.7.1.min.js' %}"> |
18 | </script> | 18 | </script> |
19 | <script src="{% static 'js/jquery.cookie.js' %}"> | 19 | <script src="{% static 'js/jquery.cookie.js' %}"> |
20 | </script> | 20 | </script> |
21 | <script src="{% static 'js/bootstrap.min.js' %}"> | 21 | <script src="{% static 'js/bootstrap-3.4.1.min.js' %}"> |
22 | </script> | 22 | </script> |
23 | <script src="{% static 'js/typeahead.jquery.js' %}"> | 23 | <script src="{% static 'js/typeahead.jquery.js' %}"> |
24 | </script> | 24 | </script> |
diff --git a/bitbake/lib/toaster/toastergui/templates/command_line_builds.html b/bitbake/lib/toaster/toastergui/templates/command_line_builds.html new file mode 100644 index 0000000000..05db6727e7 --- /dev/null +++ b/bitbake/lib/toaster/toastergui/templates/command_line_builds.html | |||
@@ -0,0 +1,209 @@ | |||
1 | {% extends "base.html" %} | ||
2 | {% load projecttags %} | ||
3 | {% load humanize %} | ||
4 | {% load static %} | ||
5 | |||
6 | {% block title %} Import Builds from eventlogs - Toaster {% endblock %} | ||
7 | |||
8 | {% block pagecontent %} | ||
9 | |||
10 | <div class="container-fluid"> | ||
11 | <div id="overlay" class="hide"> | ||
12 | <div class="spinner"> | ||
13 | <div class="fa-spin"> | ||
14 | </div> | ||
15 | </div> | ||
16 | </div> | ||
17 | <div class="row"> | ||
18 | <div class="col-md-12"> | ||
19 | <div class="page-header"> | ||
20 | <div class="row"> | ||
21 | <div class="col-md-6"> | ||
22 | <h1>Import command line builds</h1> | ||
23 | </div> | ||
24 | {% if import_all %} | ||
25 | <div class="col-md-6"> | ||
26 | <button id="import_all" type="button" class="btn btn-primary navbar-btn navbar-right"> | ||
27 | <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import All | ||
28 | </button> | ||
29 | </div> | ||
30 | {% endif %} | ||
31 | </div> | ||
32 | </div> | ||
33 | {% if messages %} | ||
34 | <div class="row-fluid" id="empty-state-{{table_name}}"> | ||
35 | {% for message in messages %} | ||
36 | <div class="alert alert-danger">{{message}}</div> | ||
37 | {%endfor%} | ||
38 | </div> | ||
39 | {% endif %} | ||
40 | <div class="row"> | ||
41 | <h4 style="margin-left: 15px;"><strong>Import eventlog file</strong></h4> | ||
42 | <form method="POST" enctype="multipart/form-data" action="{% url 'cmdlines' %}" id="form_file"> | ||
43 | {% csrf_token %} | ||
44 | <div class="col-md-6" style="padding-left: 20px;"> | ||
45 | <div class="row"> | ||
46 | <input type="hidden" value="{{dir}}" name="dir"> | ||
47 | <div class="col-md-3"> {{ form.eventlog_file}} </div> | ||
48 | </div> | ||
49 | <div class="row" style="padding-top: 10px;"> | ||
50 | <div class="col-md-6"> | ||
51 | <button id="file_import" type="submit" disabled="disabled" class="btn btn-default navbar-btn" > | ||
52 | <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import | ||
53 | </button> | ||
54 | </div> | ||
55 | </div> | ||
56 | </div> | ||
57 | </form> | ||
58 | </div> | ||
59 | |||
60 | <div class="row" style="padding-top: 20px;"> | ||
61 | <div class="col-md-8 "> | ||
62 | <h4><strong>Eventlogs from existing build directory: </strong> | ||
63 | <a href="#" data-toggle="tooltip" title="{{dir}}"> | ||
64 | <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-info-circle" viewBox="0 0 16 16" data-toggle="tooltip"> | ||
65 | <path d="M8 15A7 7 0 1 1 8 1a7 7 0 0 1 0 14m0 1A8 8 0 1 0 8 0a8 8 0 0 0 0 16"/> | ||
66 | <path d="m8.93 6.588-2.29.287-.082.38.45.083c.294.07.352.176.288.469l-.738 3.468c-.194.897.105 1.319.808 1.319.545 0 1.178-.252 1.465-.598l.088-.416c-.2.176-.492.246-.686.246-.275 0-.375-.193-.304-.533zM9 4.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0"/> | ||
67 | </svg> | ||
68 | </a> | ||
69 | </h4> | ||
70 | {% if files %} | ||
71 | <div class="table-responsive"> | ||
72 | <table class="table col-md-6 table-bordered table-hover" id="eventlog-table" style="border-collapse: collapse;"> | ||
73 | <thead> | ||
74 | <tr class="row"> | ||
75 | <th scope="col">Name</th> | ||
76 | <th scope="col">Size</th> | ||
77 | <th scope="col">Action</th> | ||
78 | </tr> | ||
79 | </thead> | ||
80 | <tbody> | ||
81 | {% for file in files %} | ||
82 | <tr class="row" style="height: 48px;"> | ||
83 | <th scope="row" class="col-md-4" style="vertical-align: middle;"> | ||
84 | <input type="hidden" value="{{file.name}}" name="{{file.name}}">{{file.name}} | ||
85 | </th> | ||
86 | <td class="col-md-4 align-middle" style="vertical-align: middle;">{{file.size|filesizeformat}}</td> | ||
87 | <td class="col-md-4 align-middle" style="vertical-align: middle;"> | ||
88 | {% if file.imported == True and file.build_id is not None %} | ||
89 | <a href="{% url 'builddashboard' file.build_id %}">Build Details</a> | ||
90 | {% elif request.session.file == file.name or request.session.all_builds %} | ||
91 | <a data-toggle="tooltip" title="Build in progress"> | ||
92 | <span class="glyphicon glyphicon-upload" style="font-size: 18px; color:grey"></span> | ||
93 | </a> | ||
94 | {%else%} | ||
95 | <a onclick="_ajax_update('{{file.name}}', false, '{{dir}}')" data-toggle="tooltip" title="Import File"> | ||
96 | <span class="glyphicon glyphicon-upload" style="font-size: 18px;"></span> | ||
97 | </a> | ||
98 | {%endif%} | ||
99 | </td> | ||
100 | </tr> | ||
101 | {% endfor%} | ||
102 | </tbody> | ||
103 | </table> | ||
104 | </div> | ||
105 | {% else %} | ||
106 | <div class="row-fluid" id="empty-state-{{table_name}}"> | ||
107 | <div class="alert alert-info">Sorry - no files found</div> | ||
108 | </div> | ||
109 | {%endif%} | ||
110 | </div> | ||
111 | </div> | ||
112 | </div> | ||
113 | </div> | ||
114 | </div> | ||
115 | |||
116 | <link rel="stylesheet" href="{% static 'css/jquery.dataTables-1.13.8.min.css' %}" type='text/css'/> | ||
117 | <script src="{% static 'js/jquery.dataTables-1.13.8.min.js' %}"> </script> | ||
118 | <script> | ||
119 | |||
120 | function _ajax_update(file, all, dir){ | ||
121 | function getCookie(name) { | ||
122 | var cookieValue = null; | ||
123 | if (document.cookie && document.cookie !== '') { | ||
124 | var cookies = document.cookie.split(';'); | ||
125 | for (var i = 0; i < cookies.length; i++) { | ||
126 | var cookie = jQuery.trim(cookies[i]); | ||
127 | // Does this cookie string begin with the name we want? | ||
128 | if (cookie.substring(0, name.length + 1) === (name + '=')) { | ||
129 | cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); | ||
130 | break; | ||
131 | } | ||
132 | } | ||
133 | } | ||
134 | return cookieValue; | ||
135 | } | ||
136 | var csrftoken = getCookie('csrftoken'); | ||
137 | |||
138 | function csrfSafeMethod(method) { | ||
139 | // these HTTP methods do not require CSRF protection | ||
140 | return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method)); | ||
141 | } | ||
142 | $.ajaxSetup({ | ||
143 | beforeSend: function (xhr, settings) { | ||
144 | if (!csrfSafeMethod(settings.type) && !this.crossDomain) { | ||
145 | xhr.setRequestHeader("X-CSRFToken", csrftoken); | ||
146 | } | ||
147 | } | ||
148 | }); | ||
149 | |||
150 | $.ajax({ | ||
151 | url:'/toastergui/cmdline/', | ||
152 | type: "POST", | ||
153 | data: {file: file, all: all, dir: dir}, | ||
154 | success:function(data){ | ||
155 | if (data['response']=='building'){ | ||
156 | location.reload() | ||
157 | } else { | ||
158 | window.location = '/toastergui/builds/' | ||
159 | } | ||
160 | }, | ||
161 | complete:function(data){ | ||
162 | }, | ||
163 | error:function (xhr, textStatus, thrownError){ | ||
164 | console.log('fail'); | ||
165 | } | ||
166 | }); | ||
167 | } | ||
168 | |||
169 | $('#import_all').on('click', function(){ | ||
170 | _ajax_update("{{files | safe}}", true, "{{dir | safe}}"); | ||
171 | }); | ||
172 | |||
173 | |||
174 | $('#import_page').hide(); | ||
175 | |||
176 | $(function () { | ||
177 | $('[data-toggle="tooltip"]').tooltip() | ||
178 | }) | ||
179 | |||
180 | |||
181 | $("#id_eventlog_file").change(function(){ | ||
182 | $('#file_import').prop("disabled", false); | ||
183 | $('#file_import').addClass('btn-primary') | ||
184 | $('#file_import').removeClass('btn-default') | ||
185 | }) | ||
186 | |||
187 | $(document).ajaxStart(function(){ | ||
188 | $('#overlay').removeClass('hide'); | ||
189 | window.setTimeout( | ||
190 | function() { | ||
191 | window.location = '/toastergui/builds/' | ||
192 | }, 10000) | ||
193 | }); | ||
194 | |||
195 | $( "#form_file").on( "submit", function( event ) { | ||
196 | $('#overlay').removeClass('hide'); | ||
197 | window.setTimeout( | ||
198 | function() { | ||
199 | window.location = '/toastergui/builds/' | ||
200 | }, 10000) | ||
201 | }); | ||
202 | |||
203 | $(document).ready( function () { | ||
204 | $('#eventlog-table').DataTable({order: [[0, 'desc']], "pageLength": 50}); | ||
205 | }); | ||
206 | |||
207 | </script> | ||
208 | |||
209 | {% endblock %} | ||
diff --git a/bitbake/lib/toaster/toastergui/templates/configvars.html b/bitbake/lib/toaster/toastergui/templates/configvars.html index 33fef9316d..691dace3a2 100644 --- a/bitbake/lib/toaster/toastergui/templates/configvars.html +++ b/bitbake/lib/toaster/toastergui/templates/configvars.html | |||
@@ -66,7 +66,7 @@ | |||
66 | <td class="description"> | 66 | <td class="description"> |
67 | {% if variable.description %} | 67 | {% if variable.description %} |
68 | {{variable.description}} | 68 | {{variable.description}} |
69 | <a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-{{variable.variable_name|variable_parent_name}}" target="_blank"> | 69 | <a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-{{variable.variable_name|variable_parent_name}}" target="_blank"> |
70 | <span class="glyphicon glyphicon-new-window get-info"></span></a> | 70 | <span class="glyphicon glyphicon-new-window get-info"></span></a> |
71 | {% endif %} | 71 | {% endif %} |
72 | </td> | 72 | </td> |
diff --git a/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html b/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html index ca248962f0..41553c4f9d 100644 --- a/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html +++ b/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html | |||
@@ -11,7 +11,7 @@ | |||
11 | <script src="{% static 'js/layerDepsModal.js' %}"></script> | 11 | <script src="{% static 'js/layerDepsModal.js' %}"></script> |
12 | <script src="{% static 'js/projectpage.js' %}"></script> | 12 | <script src="{% static 'js/projectpage.js' %}"></script> |
13 | 13 | ||
14 | <script src="{% static 'js/bootstrap.min.js' %}"></script> | 14 | <script src="{% static 'js/bootstrap-3.4.1.min.js' %}"></script> |
15 | <script src="{% static 'js/filtersnippet.js' %}"></script> | 15 | <script src="{% static 'js/filtersnippet.js' %}"></script> |
16 | <script src="{% static 'js/importlayer.js' %}"></script> | 16 | <script src="{% static 'js/importlayer.js' %}"></script> |
17 | <script src="{% static 'js/highlight.pack.js' %}"></script> | 17 | <script src="{% static 'js/highlight.pack.js' %}"></script> |
diff --git a/bitbake/lib/toaster/toastergui/templates/landing.html b/bitbake/lib/toaster/toastergui/templates/landing.html index bfaaf6fc83..589ee22634 100644 --- a/bitbake/lib/toaster/toastergui/templates/landing.html +++ b/bitbake/lib/toaster/toastergui/templates/landing.html | |||
@@ -12,10 +12,10 @@ | |||
12 | <div class="col-md-6"> | 12 | <div class="col-md-6"> |
13 | <h1>This is Toaster</h1> | 13 | <h1>This is Toaster</h1> |
14 | 14 | ||
15 | <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p> | 15 | <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://docs.yoctoproject.org/bitbake.html">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p> |
16 | 16 | ||
17 | <p class="top-air"> | 17 | <p class="top-air"> |
18 | <a class="btn btn-info btn-lg" href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#toaster-manual-setup-and-use"> | 18 | <a class="btn btn-info btn-lg" href="http://docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" style="min-width: 460px;"> |
19 | Toaster is ready to capture your command line builds | 19 | Toaster is ready to capture your command line builds |
20 | </a> | 20 | </a> |
21 | </p> | 21 | </p> |
@@ -23,7 +23,7 @@ | |||
23 | {% if lvs_nos %} | 23 | {% if lvs_nos %} |
24 | {% if project_enable %} | 24 | {% if project_enable %} |
25 | <p class="top-air"> | 25 | <p class="top-air"> |
26 | <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}"> | 26 | <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}" style="min-width: 460px;"> |
27 | Create your first Toaster project to run manage builds | 27 | Create your first Toaster project to run manage builds |
28 | </a> | 28 | </a> |
29 | </p> | 29 | </p> |
@@ -33,7 +33,7 @@ | |||
33 | Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can: | 33 | Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can: |
34 | <ul> | 34 | <ul> |
35 | <li> | 35 | <li> |
36 | <a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#layer-source">Configure a layer source</a> | 36 | <a href="http://docs.yoctoproject.org/toaster-manual/reference.html#layer-source">Configure a layer source</a> |
37 | </li> | 37 | </li> |
38 | <li> | 38 | <li> |
39 | <a href="{% url 'newproject' %}">Create a project</a>, then import layers | 39 | <a href="{% url 'newproject' %}">Create a project</a>, then import layers |
@@ -42,9 +42,15 @@ | |||
42 | </div> | 42 | </div> |
43 | {% endif %} | 43 | {% endif %} |
44 | 44 | ||
45 | <p class="top-air"> | ||
46 | <a class="btn btn-info btn-lg" href="{% url 'cmdlines' %}" style="min-width: 460px;"> | ||
47 | Import command line event logs from build directory | ||
48 | </a> | ||
49 | </p> | ||
50 | |||
45 | <ul class="list-unstyled lead"> | 51 | <ul class="list-unstyled lead"> |
46 | <li> | 52 | <li> |
47 | <a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html"> | 53 | <a href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual"> |
48 | Read the Toaster manual | 54 | Read the Toaster manual |
49 | </a> | 55 | </a> |
50 | </li> | 56 | </li> |
diff --git a/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html b/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html deleted file mode 100644 index e7200b8412..0000000000 --- a/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html +++ /dev/null | |||
@@ -1,34 +0,0 @@ | |||
1 | {% extends "base.html" %} | ||
2 | |||
3 | {% load static %} | ||
4 | {% load projecttags %} | ||
5 | {% load humanize %} | ||
6 | |||
7 | {% block title %} Welcome to Toaster {% endblock %} | ||
8 | |||
9 | {% block pagecontent %} | ||
10 | |||
11 | <div class="container"> | ||
12 | <div class="row"> | ||
13 | <!-- Empty - no build module --> | ||
14 | <div class="page-header top-air"> | ||
15 | <h1> | ||
16 | This page only works with Toaster in 'Build' mode | ||
17 | </h1> | ||
18 | </div> | ||
19 | <div class="alert alert-info lead"> | ||
20 | <p"> | ||
21 | The 'Build' mode allows you to configure and run your Yocto Project builds from Toaster. | ||
22 | <ul> | ||
23 | <li><a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#intro-modes"> | ||
24 | Read about the 'Build' mode | ||
25 | </a></li> | ||
26 | <li><a href="/"> | ||
27 | View your builds | ||
28 | </a></li> | ||
29 | </ul> | ||
30 | </p> | ||
31 | </div> | ||
32 | </div> | ||
33 | |||
34 | {% endblock %} | ||
diff --git a/bitbake/lib/toaster/toastergui/templates/layerdetails.html b/bitbake/lib/toaster/toastergui/templates/layerdetails.html index 1e26e31c8b..923ca3bfe4 100644 --- a/bitbake/lib/toaster/toastergui/templates/layerdetails.html +++ b/bitbake/lib/toaster/toastergui/templates/layerdetails.html | |||
@@ -355,7 +355,7 @@ | |||
355 | {% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %} | 355 | {% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %} |
356 | <dt>Layer index</dt> | 356 | <dt>Layer index</dt> |
357 | <dd> | 357 | <dd> |
358 | <a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a> | 358 | <a href="https://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a> |
359 | </dd> | 359 | </dd> |
360 | {% endif %} | 360 | {% endif %} |
361 | </dl> | 361 | </dl> |
diff --git a/bitbake/lib/toaster/toastergui/templates/mrb_section.html b/bitbake/lib/toaster/toastergui/templates/mrb_section.html index 98d9fac822..9fc7dfaee4 100644 --- a/bitbake/lib/toaster/toastergui/templates/mrb_section.html +++ b/bitbake/lib/toaster/toastergui/templates/mrb_section.html | |||
@@ -63,7 +63,7 @@ | |||
63 | <%/if%> | 63 | <%/if%> |
64 | </div> | 64 | </div> |
65 | 65 | ||
66 | <div data-build-state="<%:state%>"> | 66 | <div class="build-state" data-build-state="<%:state%>"> |
67 | <%if state == 'Cloning'%> | 67 | <%if state == 'Cloning'%> |
68 | <%include tmpl='#cloning-repos-build-template'/%> | 68 | <%include tmpl='#cloning-repos-build-template'/%> |
69 | <%else state == 'Parsing'%> | 69 | <%else state == 'Parsing'%> |
diff --git a/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html index a5d5893571..2493954deb 100644 --- a/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html +++ b/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html | |||
@@ -18,7 +18,7 @@ | |||
18 | </ul> | 18 | </ul> |
19 | <div class="tab-content"> | 19 | <div class="tab-content"> |
20 | <div class="tab-pane active" id="dependencies"> | 20 | <div class="tab-pane active" id="dependencies"> |
21 | {% ifequal runtime_deps|length 0 %} | 21 | {% if runtime_deps|length == 0 %} |
22 | <div class="alert alert-info"> | 22 | <div class="alert alert-info"> |
23 | <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies. | 23 | <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies. |
24 | </div> | 24 | </div> |
@@ -54,8 +54,8 @@ | |||
54 | {% endfor %} | 54 | {% endfor %} |
55 | </tbody> | 55 | </tbody> |
56 | </table> | 56 | </table> |
57 | {% endifequal %} | 57 | {% endif %} |
58 | {% ifnotequal other_deps|length 0 %} | 58 | {% if other_deps|length != 0 %} |
59 | <h3>Other runtime relationships</h3> | 59 | <h3>Other runtime relationships</h3> |
60 | <table class="table table-bordered table-hover"> | 60 | <table class="table table-bordered table-hover"> |
61 | <thead> | 61 | <thead> |
@@ -93,7 +93,7 @@ | |||
93 | {% endfor %} | 93 | {% endfor %} |
94 | </tbody> | 94 | </tbody> |
95 | </table> | 95 | </table> |
96 | {% endifnotequal %} | 96 | {% endif %} |
97 | </div> <!-- tab-pane --> | 97 | </div> <!-- tab-pane --> |
98 | </div> <!-- tab-content --> | 98 | </div> <!-- tab-content --> |
99 | {% endblock tabcontent %} | 99 | {% endblock tabcontent %} |
diff --git a/bitbake/lib/toaster/toastergui/templates/package_detail_base.html b/bitbake/lib/toaster/toastergui/templates/package_detail_base.html index 66f8e7f069..a4fcd2aa42 100644 --- a/bitbake/lib/toaster/toastergui/templates/package_detail_base.html +++ b/bitbake/lib/toaster/toastergui/templates/package_detail_base.html | |||
@@ -127,7 +127,7 @@ | |||
127 | {% comment %} | 127 | {% comment %} |
128 | # Removed per team meeting of 1/29/2014 until | 128 | # Removed per team meeting of 1/29/2014 until |
129 | # decision on index search algorithm | 129 | # decision on index search algorithm |
130 | <a href="http://layers.openembedded.org" target="_blank"> | 130 | <a href="https://layers.openembedded.org" target="_blank"> |
131 | <i class="glyphicon glyphicon-share get-info"></i> | 131 | <i class="glyphicon glyphicon-share get-info"></i> |
132 | </a> | 132 | </a> |
133 | {% endcomment %} | 133 | {% endcomment %} |
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html index 95e56ded26..1f5ed6d913 100644 --- a/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html +++ b/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html | |||
@@ -14,7 +14,7 @@ | |||
14 | {% include "package_included_tabs.html" with active_tab="dependencies" %} | 14 | {% include "package_included_tabs.html" with active_tab="dependencies" %} |
15 | <div class="tab-content"> | 15 | <div class="tab-content"> |
16 | <div class="tab-pane active" id="dependencies"> | 16 | <div class="tab-pane active" id="dependencies"> |
17 | {% ifnotequal runtime_deps|length 0 %} | 17 | {% if runtime_deps|length != 0 %} |
18 | <table class="table table-bordered table-hover"> | 18 | <table class="table table-bordered table-hover"> |
19 | <thead> | 19 | <thead> |
20 | <tr> | 20 | <tr> |
@@ -48,9 +48,9 @@ | |||
48 | <div class="alert alert-info"> | 48 | <div class="alert alert-info"> |
49 | <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies. | 49 | <strong>{{package.fullpackagespec}}</strong> has no runtime dependencies. |
50 | </div> | 50 | </div> |
51 | {% endifnotequal %} | 51 | {% endif %} |
52 | 52 | ||
53 | {% ifnotequal other_deps|length 0 %} | 53 | {% if other_deps|length != 0 %} |
54 | <h3>Other runtime relationships</h3> | 54 | <h3>Other runtime relationships</h3> |
55 | <table class="table table-bordered table-hover"> | 55 | <table class="table table-bordered table-hover"> |
56 | <thead> | 56 | <thead> |
@@ -103,7 +103,7 @@ | |||
103 | {% endfor %} | 103 | {% endfor %} |
104 | </tbody> | 104 | </tbody> |
105 | </table> | 105 | </table> |
106 | {% endifnotequal %} | 106 | {% endif %} |
107 | </div> <!-- end tab-pane --> | 107 | </div> <!-- end tab-pane --> |
108 | </div> <!-- end tab content --> | 108 | </div> <!-- end tab content --> |
109 | {% endwith %} | 109 | {% endwith %} |
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html index fb310c7fc7..dae4549e21 100644 --- a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html +++ b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html | |||
@@ -15,7 +15,7 @@ | |||
15 | <div class="tab-content"> | 15 | <div class="tab-content"> |
16 | <div class="tab-pane active" id="brought-in-by"> | 16 | <div class="tab-pane active" id="brought-in-by"> |
17 | 17 | ||
18 | {% ifequal reverse_count 0 %} | 18 | {% if reverse_count == 0 %} |
19 | <div class="alert alert-info"> | 19 | <div class="alert alert-info"> |
20 | <strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies. | 20 | <strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies. |
21 | </div> | 21 | </div> |
@@ -43,7 +43,7 @@ | |||
43 | {% endfor %} | 43 | {% endfor %} |
44 | </tbody> | 44 | </tbody> |
45 | </table> | 45 | </table> |
46 | {% endifequal %} | 46 | {% endif %} |
47 | </div> <!-- end tab-pane --> | 47 | </div> <!-- end tab-pane --> |
48 | </div> <!-- end tab content --> | 48 | </div> <!-- end tab content --> |
49 | {% endwith %} | 49 | {% endwith %} |
diff --git a/bitbake/lib/toaster/toastergui/templates/project.html b/bitbake/lib/toaster/toastergui/templates/project.html index d8ad2c79dc..22239a82fd 100644 --- a/bitbake/lib/toaster/toastergui/templates/project.html +++ b/bitbake/lib/toaster/toastergui/templates/project.html | |||
@@ -139,7 +139,7 @@ | |||
139 | <ul> | 139 | <ul> |
140 | <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li> | 140 | <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li> |
141 | <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li> | 141 | <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li> |
142 | <li><a href="https://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li> | 142 | <li><a href="http://docs.yoctoproject.org/dev-manual/common-tasks.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li> |
143 | <li>Or type a layer name below</li> | 143 | <li>Or type a layer name below</li> |
144 | </ul> | 144 | </ul> |
145 | </div> | 145 | </div> |
diff --git a/bitbake/lib/toaster/toastergui/templates/project_specific.html b/bitbake/lib/toaster/toastergui/templates/project_specific.html index 42725c0dba..76d45b1b39 100644 --- a/bitbake/lib/toaster/toastergui/templates/project_specific.html +++ b/bitbake/lib/toaster/toastergui/templates/project_specific.html | |||
@@ -137,7 +137,7 @@ | |||
137 | <ul> | 137 | <ul> |
138 | <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li> | 138 | <li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li> |
139 | <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li> | 139 | <li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li> |
140 | <li><a href="https://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li> | 140 | <li><a href="http://docs.yoctoproject.org/dev-manual/common-tasks.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li> |
141 | <li>Or type a layer name below</li> | 141 | <li>Or type a layer name below</li> |
142 | </ul> | 142 | </ul> |
143 | </div> | 143 | </div> |
diff --git a/bitbake/lib/toaster/toastergui/templates/projectconf.html b/bitbake/lib/toaster/toastergui/templates/projectconf.html index bd49f1f585..c306835832 100644 --- a/bitbake/lib/toaster/toastergui/templates/projectconf.html +++ b/bitbake/lib/toaster/toastergui/templates/projectconf.html | |||
@@ -73,7 +73,7 @@ | |||
73 | 73 | ||
74 | {% if image_install_append_defined %} | 74 | {% if image_install_append_defined %} |
75 | <dt> | 75 | <dt> |
76 | <span class="js-config-var-name js-config-var-managed-name">IMAGE_INSTALL_append</span> | 76 | <span class="js-config-var-name js-config-var-managed-name">IMAGE_INSTALL:append</span> |
77 | <span class="glyphicon glyphicon-question-sign get-help" title="Specifies additional packages to install into an image. If your build creates more than one image, the packages will be installed in all of them"></span> | 77 | <span class="glyphicon glyphicon-question-sign get-help" title="Specifies additional packages to install into an image. If your build creates more than one image, the packages will be installed in all of them"></span> |
78 | </dt> | 78 | </dt> |
79 | <dd class="variable-list"> | 79 | <dd class="variable-list"> |
@@ -83,7 +83,7 @@ | |||
83 | <form id="change-image_install-form" class="form-inline" style="display:none;"> | 83 | <form id="change-image_install-form" class="form-inline" style="display:none;"> |
84 | <div class="row"> | 84 | <div class="row"> |
85 | <div class="col-md-4"> | 85 | <div class="col-md-4"> |
86 | <span class="help-block">To set IMAGE_INSTALL_append to more than one package, type the package names separated by a space.</span> | 86 | <span class="help-block">To set IMAGE_INSTALL:append to more than one package, type the package names separated by a space.</span> |
87 | </div> | 87 | </div> |
88 | </div> | 88 | </div> |
89 | <div class="form-group"> | 89 | <div class="form-group"> |
@@ -167,8 +167,8 @@ | |||
167 | {% for fstype in vars_fstypes %} | 167 | {% for fstype in vars_fstypes %} |
168 | <input type="hidden" class="js-checkbox-fstypes-list" value="{{fstype}}"> | 168 | <input type="hidden" class="js-checkbox-fstypes-list" value="{{fstype}}"> |
169 | {% endfor %} | 169 | {% endfor %} |
170 | {% for b in vars_blacklist %} | 170 | {% for b in vars_disallowed %} |
171 | <input type="hidden" class="js-config-blacklist-name" value="{{b}}"> | 171 | <input type="hidden" class="js-config-disallowed-name" value="{{b}}"> |
172 | {% endfor %} | 172 | {% endfor %} |
173 | {% for b in vars_managed %} | 173 | {% for b in vars_managed %} |
174 | <input type="hidden" class="js-config-managed-name" value="{{b}}"> | 174 | <input type="hidden" class="js-config-managed-name" value="{{b}}"> |
@@ -201,12 +201,12 @@ | |||
201 | <p>Toaster cannot set any variables that impact 1) the configuration of the build servers, | 201 | <p>Toaster cannot set any variables that impact 1) the configuration of the build servers, |
202 | or 2) where artifacts produced by the build are stored. Such variables include: </p> | 202 | or 2) where artifacts produced by the build are stored. Such variables include: </p> |
203 | <p> | 203 | <p> |
204 | <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-BB_DISKMON_DIRS" target="_blank">BB_DISKMON_DIRS</a></code> | 204 | <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-BB_DISKMON_DIRS" target="_blank">BB_DISKMON_DIRS</a></code> |
205 | <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-BB_NUMBER_THREADS" target="_blank">BB_NUMBER_THREADS</a></code> | 205 | <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-BB_NUMBER_THREADS" target="_blank">BB_NUMBER_THREADS</a></code> |
206 | <code>CVS_PROXY_HOST</code> | 206 | <code>CVS_PROXY_HOST</code> |
207 | <code>CVS_PROXY_PORT</code> | 207 | <code>CVS_PROXY_PORT</code> |
208 | <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PARALLEL_MAKE" target="_blank">PARALLEL_MAKE</a></code> | 208 | <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-PARALLEL_MAKE" target="_blank">PARALLEL_MAKE</a></code> |
209 | <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-TMPDIR" target="_blank">TMPDIR</a></code></p> | 209 | <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-TMPDIR" target="_blank">TMPDIR</a></code></p> |
210 | <p>Plus the following standard shell environment variables:</p> | 210 | <p>Plus the following standard shell environment variables:</p> |
211 | <p><code>http_proxy</code> <code>ftp_proxy</code> <code>https_proxy</code> <code>all_proxy</code></p> | 211 | <p><code>http_proxy</code> <code>ftp_proxy</code> <code>https_proxy</code> <code>all_proxy</code></p> |
212 | </div> | 212 | </div> |
@@ -238,9 +238,9 @@ function validate_new_variable() { | |||
238 | } | 238 | } |
239 | } | 239 | } |
240 | 240 | ||
241 | var blacklist_configvars = document.getElementsByClassName('js-config-blacklist-name'); | 241 | var disallowed_configvars = document.getElementsByClassName('js-config-disallowed-name'); |
242 | for (var i = 0, length = blacklist_configvars.length; i < length; i++) { | 242 | for (var i = 0, length = disallowed_configvars.length; i < length; i++) { |
243 | if (blacklist_configvars[i].value.toUpperCase() == variable.toUpperCase()) { | 243 | if (disallowed_configvars[i].value.toUpperCase() == variable.toUpperCase()) { |
244 | error_msg = "You cannot edit this variable in Toaster because it is set by the build servers"; | 244 | error_msg = "You cannot edit this variable in Toaster because it is set by the build servers"; |
245 | } | 245 | } |
246 | } | 246 | } |
@@ -771,10 +771,10 @@ $(document).ready(function() { | |||
771 | 771 | ||
772 | {% if image_install_append_defined %} | 772 | {% if image_install_append_defined %} |
773 | 773 | ||
774 | // init IMAGE_INSTALL_append trash icon | 774 | // init IMAGE_INSTALL:append trash icon |
775 | setDeleteTooltip($('#delete-image_install-icon')); | 775 | setDeleteTooltip($('#delete-image_install-icon')); |
776 | 776 | ||
777 | // change IMAGE_INSTALL_append variable | 777 | // change IMAGE_INSTALL:append variable |
778 | $('#change-image_install-icon').click(function() { | 778 | $('#change-image_install-icon').click(function() { |
779 | // preset the edit value | 779 | // preset the edit value |
780 | var current_val = $("span#image_install").text().trim(); | 780 | var current_val = $("span#image_install").text().trim(); |
@@ -814,7 +814,7 @@ $(document).ready(function() { | |||
814 | $('#apply-change-image_install').click(function(){ | 814 | $('#apply-change-image_install').click(function(){ |
815 | // insure these non-empty values have single space prefix | 815 | // insure these non-empty values have single space prefix |
816 | var value = " " + $('#new-image_install').val().trim(); | 816 | var value = " " + $('#new-image_install').val().trim(); |
817 | postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL_append:'+value}); | 817 | postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL:append:'+value}); |
818 | $('#image_install').text(value); | 818 | $('#image_install').text(value); |
819 | $('#image_install').removeClass('text-muted'); | 819 | $('#image_install').removeClass('text-muted'); |
820 | $("#change-image_install-form").slideUp(function () { | 820 | $("#change-image_install-form").slideUp(function () { |
@@ -826,10 +826,10 @@ $(document).ready(function() { | |||
826 | }); | 826 | }); |
827 | }); | 827 | }); |
828 | 828 | ||
829 | // delete IMAGE_INSTALL_append variable value | 829 | // delete IMAGE_INSTALL:append variable value |
830 | $('#delete-image_install-icon').click(function(){ | 830 | $('#delete-image_install-icon').click(function(){ |
831 | $(this).tooltip('hide'); | 831 | $(this).tooltip('hide'); |
832 | postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL_append:'+''}); | 832 | postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL:append:'+''}); |
833 | $('#image_install').parent().fadeOut(1000, function(){ | 833 | $('#image_install').parent().fadeOut(1000, function(){ |
834 | $('#image_install').addClass('text-muted'); | 834 | $('#image_install').addClass('text-muted'); |
835 | $('#image_install').text('Not set'); | 835 | $('#image_install').text('Not set'); |
@@ -1011,7 +1011,7 @@ $(document).ready(function() { | |||
1011 | $(".save").attr("disabled","disabled"); | 1011 | $(".save").attr("disabled","disabled"); |
1012 | 1012 | ||
1013 | // Reload page if admin-removed core managed value is manually added back in | 1013 | // Reload page if admin-removed core managed value is manually added back in |
1014 | if (0 <= " DISTRO DL_DIR IMAGE_FSTYPES IMAGE_INSTALL_append PACKAGE_CLASSES SSTATE_DIR ".indexOf( " "+variable+" " )) { | 1014 | if (0 <= " DISTRO DL_DIR IMAGE_FSTYPES IMAGE_INSTALL:append PACKAGE_CLASSES SSTATE_DIR ".indexOf( " "+variable+" " )) { |
1015 | // delayed reload to avoid race condition with postEditAjaxRequest | 1015 | // delayed reload to avoid race condition with postEditAjaxRequest |
1016 | do_reload=true; | 1016 | do_reload=true; |
1017 | } | 1017 | } |
diff --git a/bitbake/lib/toaster/toastergui/templates/recipe.html b/bitbake/lib/toaster/toastergui/templates/recipe.html index 3f76e656fe..4b5301b548 100644 --- a/bitbake/lib/toaster/toastergui/templates/recipe.html +++ b/bitbake/lib/toaster/toastergui/templates/recipe.html | |||
@@ -186,9 +186,9 @@ | |||
186 | <i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i> | 186 | <i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i> |
187 | </td> | 187 | </td> |
188 | <td> | 188 | <td> |
189 | {% ifnotequal task.sstate_result task.SSTATE_NA %} | 189 | {% if task.sstate_result != task.SSTATE_NA %} |
190 | {{task.get_sstate_result_display}} | 190 | {{task.get_sstate_result_display}} |
191 | {% endifnotequal %} | 191 | {% endif %} |
192 | </td> | 192 | </td> |
193 | 193 | ||
194 | </tr> | 194 | </tr> |
diff --git a/bitbake/lib/toaster/toastergui/templates/target.html b/bitbake/lib/toaster/toastergui/templates/target.html index 1924a0dad7..d5f60e77a8 100644 --- a/bitbake/lib/toaster/toastergui/templates/target.html +++ b/bitbake/lib/toaster/toastergui/templates/target.html | |||
@@ -8,11 +8,11 @@ | |||
8 | 8 | ||
9 | {% block nav-target %} | 9 | {% block nav-target %} |
10 | {% for t in build.get_sorted_target_list %} | 10 | {% for t in build.get_sorted_target_list %} |
11 | {% ifequal target.pk t.pk %} | 11 | {% if target.pk == t.pk %} |
12 | <li class="active"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li> | 12 | <li class="active"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li> |
13 | {% else %} | 13 | {% else %} |
14 | <li><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li> | 14 | <li><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li> |
15 | {% endifequal %} | 15 | {% endif %} |
16 | {% endfor %} | 16 | {% endfor %} |
17 | {% endblock %} | 17 | {% endblock %} |
18 | 18 | ||
diff --git a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py index c432f59a78..bd398f0012 100644 --- a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py +++ b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py | |||
@@ -167,8 +167,8 @@ def check_filter_status(options, filter): | |||
167 | def variable_parent_name(value): | 167 | def variable_parent_name(value): |
168 | """ filter extended variable names to the parent name | 168 | """ filter extended variable names to the parent name |
169 | """ | 169 | """ |
170 | value=re.sub('_\$.*', '', value) | 170 | value = re.sub(r'_\$.*', '', value) |
171 | return re.sub('_[a-z].*', '', value) | 171 | return re.sub(r'_[a-z].*', '', value) |
172 | 172 | ||
173 | @register.filter | 173 | @register.filter |
174 | def filter_setin_files(file_list, matchstr): | 174 | def filter_setin_files(file_list, matchstr): |
diff --git a/bitbake/lib/toaster/toastergui/urls.py b/bitbake/lib/toaster/toastergui/urls.py index d2df4e6048..7f8489d3aa 100644 --- a/bitbake/lib/toaster/toastergui/urls.py +++ b/bitbake/lib/toaster/toastergui/urls.py | |||
@@ -6,7 +6,7 @@ | |||
6 | # SPDX-License-Identifier: GPL-2.0-only | 6 | # SPDX-License-Identifier: GPL-2.0-only |
7 | # | 7 | # |
8 | 8 | ||
9 | from django.conf.urls import url | 9 | from django.urls import re_path as url |
10 | from django.views.generic import RedirectView | 10 | from django.views.generic import RedirectView |
11 | 11 | ||
12 | from toastergui import tables | 12 | from toastergui import tables |
@@ -95,6 +95,7 @@ urlpatterns = [ | |||
95 | # project URLs | 95 | # project URLs |
96 | url(r'^newproject/$', views.newproject, name='newproject'), | 96 | url(r'^newproject/$', views.newproject, name='newproject'), |
97 | 97 | ||
98 | url(r'^cmdline/$', views.CommandLineBuilds.as_view(), name='cmdlines'), | ||
98 | url(r'^projects/$', | 99 | url(r'^projects/$', |
99 | tables.ProjectsTable.as_view(template_name="projects-toastertable.html"), | 100 | tables.ProjectsTable.as_view(template_name="projects-toastertable.html"), |
100 | name='all-projects'), | 101 | name='all-projects'), |
@@ -206,8 +207,7 @@ urlpatterns = [ | |||
206 | url(r'^js-unit-tests/$', views.jsunittests, name='js-unit-tests'), | 207 | url(r'^js-unit-tests/$', views.jsunittests, name='js-unit-tests'), |
207 | 208 | ||
208 | # image customisation functionality | 209 | # image customisation functionality |
209 | url(r'^xhr_customrecipe/(?P<recipe_id>\d+)' | 210 | url(r'^xhr_customrecipe/(?P<recipe_id>\d+)/packages/(?P<package_id>\d+|)$', |
210 | '/packages/(?P<package_id>\d+|)$', | ||
211 | api.XhrCustomRecipePackages.as_view(), | 211 | api.XhrCustomRecipePackages.as_view(), |
212 | name='xhr_customrecipe_packages'), | 212 | name='xhr_customrecipe_packages'), |
213 | 213 | ||
diff --git a/bitbake/lib/toaster/toastergui/views.py b/bitbake/lib/toaster/toastergui/views.py index 9a5e48e3bb..40aed265dc 100644 --- a/bitbake/lib/toaster/toastergui/views.py +++ b/bitbake/lib/toaster/toastergui/views.py | |||
@@ -6,24 +6,36 @@ | |||
6 | # SPDX-License-Identifier: GPL-2.0-only | 6 | # SPDX-License-Identifier: GPL-2.0-only |
7 | # | 7 | # |
8 | 8 | ||
9 | import ast | ||
9 | import re | 10 | import re |
11 | import subprocess | ||
12 | import sys | ||
13 | |||
14 | import bb.cooker | ||
15 | from bb.ui import toasterui | ||
16 | from bb.ui import eventreplay | ||
10 | 17 | ||
11 | from django.db.models import F, Q, Sum | 18 | from django.db.models import F, Q, Sum |
12 | from django.db import IntegrityError | 19 | from django.db import IntegrityError |
13 | from django.shortcuts import render, redirect, get_object_or_404 | 20 | from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect |
14 | from django.utils.http import urlencode | 21 | from django.utils.http import urlencode |
15 | from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe | 22 | from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe |
16 | from orm.models import LogMessage, Variable, Package_Dependency, Package | 23 | from orm.models import LogMessage, Variable, Package_Dependency, Package |
17 | from orm.models import Task_Dependency, Package_File | 24 | from orm.models import Task_Dependency, Package_File |
18 | from orm.models import Target_Installed_Package, Target_File | 25 | from orm.models import Target_Installed_Package, Target_File |
19 | from orm.models import TargetKernelFile, TargetSDKFile, Target_Image_File | 26 | from orm.models import TargetKernelFile, TargetSDKFile, Target_Image_File |
20 | from orm.models import BitbakeVersion, CustomImageRecipe | 27 | from orm.models import BitbakeVersion, CustomImageRecipe, EventLogsImports |
21 | 28 | ||
22 | from django.urls import reverse, resolve | 29 | from django.urls import reverse, resolve |
30 | from django.contrib import messages | ||
31 | |||
23 | from django.core.exceptions import ObjectDoesNotExist | 32 | from django.core.exceptions import ObjectDoesNotExist |
33 | from django.core.files.storage import FileSystemStorage | ||
34 | from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile | ||
24 | from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger | 35 | from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger |
25 | from django.http import HttpResponseNotFound, JsonResponse | 36 | from django.http import HttpResponseNotFound, JsonResponse |
26 | from django.utils import timezone | 37 | from django.utils import timezone |
38 | from django.views.generic import TemplateView | ||
27 | from datetime import timedelta, datetime | 39 | from datetime import timedelta, datetime |
28 | from toastergui.templatetags.projecttags import json as jsonfilter | 40 | from toastergui.templatetags.projecttags import json as jsonfilter |
29 | from decimal import Decimal | 41 | from decimal import Decimal |
@@ -32,13 +44,20 @@ import os | |||
32 | from os.path import dirname | 44 | from os.path import dirname |
33 | import mimetypes | 45 | import mimetypes |
34 | 46 | ||
47 | from toastergui.forms import LoadFileForm | ||
48 | |||
49 | from collections import namedtuple | ||
50 | |||
35 | import logging | 51 | import logging |
36 | 52 | ||
53 | from toastermain.logs import log_view_mixin | ||
54 | |||
37 | logger = logging.getLogger("toaster") | 55 | logger = logging.getLogger("toaster") |
38 | 56 | ||
39 | # Project creation and managed build enable | 57 | # Project creation and managed build enable |
40 | project_enable = ('1' == os.environ.get('TOASTER_BUILDSERVER')) | 58 | project_enable = ('1' == os.environ.get('TOASTER_BUILDSERVER')) |
41 | is_project_specific = ('1' == os.environ.get('TOASTER_PROJECTSPECIFIC')) | 59 | is_project_specific = ('1' == os.environ.get('TOASTER_PROJECTSPECIFIC')) |
60 | import_page = False | ||
42 | 61 | ||
43 | class MimeTypeFinder(object): | 62 | class MimeTypeFinder(object): |
44 | # setting this to False enables additional non-standard mimetypes | 63 | # setting this to False enables additional non-standard mimetypes |
@@ -56,6 +75,7 @@ class MimeTypeFinder(object): | |||
56 | return guessed_type | 75 | return guessed_type |
57 | 76 | ||
58 | # single point to add global values into the context before rendering | 77 | # single point to add global values into the context before rendering |
78 | @log_view_mixin | ||
59 | def toaster_render(request, page, context): | 79 | def toaster_render(request, page, context): |
60 | context['project_enable'] = project_enable | 80 | context['project_enable'] = project_enable |
61 | context['project_specific'] = is_project_specific | 81 | context['project_specific'] = is_project_specific |
@@ -665,16 +685,17 @@ def recipe_packages(request, build_id, recipe_id): | |||
665 | return response | 685 | return response |
666 | 686 | ||
667 | from django.http import HttpResponse | 687 | from django.http import HttpResponse |
688 | @log_view_mixin | ||
668 | def xhr_dirinfo(request, build_id, target_id): | 689 | def xhr_dirinfo(request, build_id, target_id): |
669 | top = request.GET.get('start', '/') | 690 | top = request.GET.get('start', '/') |
670 | return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json") | 691 | return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json") |
671 | 692 | ||
672 | from django.utils.functional import Promise | 693 | from django.utils.functional import Promise |
673 | from django.utils.encoding import force_text | 694 | from django.utils.encoding import force_str |
674 | class LazyEncoder(json.JSONEncoder): | 695 | class LazyEncoder(json.JSONEncoder): |
675 | def default(self, obj): | 696 | def default(self, obj): |
676 | if isinstance(obj, Promise): | 697 | if isinstance(obj, Promise): |
677 | return force_text(obj) | 698 | return force_str(obj) |
678 | return super(LazyEncoder, self).default(obj) | 699 | return super(LazyEncoder, self).default(obj) |
679 | 700 | ||
680 | from toastergui.templatetags.projecttags import filtered_filesizeformat | 701 | from toastergui.templatetags.projecttags import filtered_filesizeformat |
@@ -1404,7 +1425,7 @@ if True: | |||
1404 | if not os.path.isdir('%s/conf' % request.POST['importdir']): | 1425 | if not os.path.isdir('%s/conf' % request.POST['importdir']): |
1405 | raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir']) | 1426 | raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir']) |
1406 | from django.core import management | 1427 | from django.core import management |
1407 | management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'], interactive=False) | 1428 | management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir']) |
1408 | prj = Project.objects.get(name = request.POST['projectname']) | 1429 | prj = Project.objects.get(name = request.POST['projectname']) |
1409 | prj.merged_attr = True | 1430 | prj.merged_attr = True |
1410 | prj.save() | 1431 | prj.save() |
@@ -1606,12 +1627,13 @@ if True: | |||
1606 | # make sure we have a machine set for this project | 1627 | # make sure we have a machine set for this project |
1607 | ProjectVariable.objects.get_or_create(project=new_project, | 1628 | ProjectVariable.objects.get_or_create(project=new_project, |
1608 | name="MACHINE", | 1629 | name="MACHINE", |
1609 | value="qemux86") | 1630 | value="qemux86-64") |
1610 | context = {'project': new_project} | 1631 | context = {'project': new_project} |
1611 | return toaster_render(request, "js-unit-tests.html", context) | 1632 | return toaster_render(request, "js-unit-tests.html", context) |
1612 | 1633 | ||
1613 | from django.views.decorators.csrf import csrf_exempt | 1634 | from django.views.decorators.csrf import csrf_exempt |
1614 | @csrf_exempt | 1635 | @csrf_exempt |
1636 | @log_view_mixin | ||
1615 | def xhr_testreleasechange(request, pid): | 1637 | def xhr_testreleasechange(request, pid): |
1616 | def response(data): | 1638 | def response(data): |
1617 | return HttpResponse(jsonfilter(data), | 1639 | return HttpResponse(jsonfilter(data), |
@@ -1648,6 +1670,7 @@ if True: | |||
1648 | except Exception as e: | 1670 | except Exception as e: |
1649 | return response({"error": str(e) }) | 1671 | return response({"error": str(e) }) |
1650 | 1672 | ||
1673 | @log_view_mixin | ||
1651 | def xhr_configvaredit(request, pid): | 1674 | def xhr_configvaredit(request, pid): |
1652 | try: | 1675 | try: |
1653 | prj = Project.objects.get(id = pid) | 1676 | prj = Project.objects.get(id = pid) |
@@ -1683,12 +1706,12 @@ if True: | |||
1683 | t=request.POST['configvarDel'].strip() | 1706 | t=request.POST['configvarDel'].strip() |
1684 | pt = ProjectVariable.objects.get(pk = int(t)).delete() | 1707 | pt = ProjectVariable.objects.get(pk = int(t)).delete() |
1685 | 1708 | ||
1686 | # return all project settings, filter out blacklist and elsewhere-managed variables | 1709 | # return all project settings, filter out disallowed and elsewhere-managed variables |
1687 | vars_managed,vars_fstypes,vars_blacklist = get_project_configvars_context() | 1710 | vars_managed,vars_fstypes,vars_disallowed = get_project_configvars_context() |
1688 | configvars_query = ProjectVariable.objects.filter(project_id = pid).all() | 1711 | configvars_query = ProjectVariable.objects.filter(project_id = pid).all() |
1689 | for var in vars_managed: | 1712 | for var in vars_managed: |
1690 | configvars_query = configvars_query.exclude(name = var) | 1713 | configvars_query = configvars_query.exclude(name = var) |
1691 | for var in vars_blacklist: | 1714 | for var in vars_disallowed: |
1692 | configvars_query = configvars_query.exclude(name = var) | 1715 | configvars_query = configvars_query.exclude(name = var) |
1693 | 1716 | ||
1694 | return_data = { | 1717 | return_data = { |
@@ -1708,7 +1731,7 @@ if True: | |||
1708 | except ProjectVariable.DoesNotExist: | 1731 | except ProjectVariable.DoesNotExist: |
1709 | pass | 1732 | pass |
1710 | try: | 1733 | try: |
1711 | return_data['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value, | 1734 | return_data['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value, |
1712 | except ProjectVariable.DoesNotExist: | 1735 | except ProjectVariable.DoesNotExist: |
1713 | pass | 1736 | pass |
1714 | try: | 1737 | try: |
@@ -1726,6 +1749,7 @@ if True: | |||
1726 | return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json") | 1749 | return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json") |
1727 | 1750 | ||
1728 | 1751 | ||
1752 | @log_view_mixin | ||
1729 | def customrecipe_download(request, pid, recipe_id): | 1753 | def customrecipe_download(request, pid, recipe_id): |
1730 | recipe = get_object_or_404(CustomImageRecipe, pk=recipe_id) | 1754 | recipe = get_object_or_404(CustomImageRecipe, pk=recipe_id) |
1731 | 1755 | ||
@@ -1781,7 +1805,7 @@ if True: | |||
1781 | 'MACHINE', 'BBLAYERS' | 1805 | 'MACHINE', 'BBLAYERS' |
1782 | } | 1806 | } |
1783 | 1807 | ||
1784 | vars_blacklist = { | 1808 | vars_disallowed = { |
1785 | 'PARALLEL_MAKE','BB_NUMBER_THREADS', | 1809 | 'PARALLEL_MAKE','BB_NUMBER_THREADS', |
1786 | 'BB_DISKMON_DIRS','BB_NUMBER_THREADS','CVS_PROXY_HOST','CVS_PROXY_PORT', | 1810 | 'BB_DISKMON_DIRS','BB_NUMBER_THREADS','CVS_PROXY_HOST','CVS_PROXY_PORT', |
1787 | 'PARALLEL_MAKE','TMPDIR', | 1811 | 'PARALLEL_MAKE','TMPDIR', |
@@ -1790,7 +1814,7 @@ if True: | |||
1790 | 1814 | ||
1791 | vars_fstypes = Target_Image_File.SUFFIXES | 1815 | vars_fstypes = Target_Image_File.SUFFIXES |
1792 | 1816 | ||
1793 | return(vars_managed,sorted(vars_fstypes),vars_blacklist) | 1817 | return(vars_managed,sorted(vars_fstypes),vars_disallowed) |
1794 | 1818 | ||
1795 | def projectconf(request, pid): | 1819 | def projectconf(request, pid): |
1796 | 1820 | ||
@@ -1799,12 +1823,12 @@ if True: | |||
1799 | except Project.DoesNotExist: | 1823 | except Project.DoesNotExist: |
1800 | return HttpResponseNotFound("<h1>Project id " + pid + " is unavailable</h1>") | 1824 | return HttpResponseNotFound("<h1>Project id " + pid + " is unavailable</h1>") |
1801 | 1825 | ||
1802 | # remove blacklist and externally managed varaibles from this list | 1826 | # remove disallowed and externally managed varaibles from this list |
1803 | vars_managed,vars_fstypes,vars_blacklist = get_project_configvars_context() | 1827 | vars_managed,vars_fstypes,vars_disallowed = get_project_configvars_context() |
1804 | configvars = ProjectVariable.objects.filter(project_id = pid).all() | 1828 | configvars = ProjectVariable.objects.filter(project_id = pid).all() |
1805 | for var in vars_managed: | 1829 | for var in vars_managed: |
1806 | configvars = configvars.exclude(name = var) | 1830 | configvars = configvars.exclude(name = var) |
1807 | for var in vars_blacklist: | 1831 | for var in vars_disallowed: |
1808 | configvars = configvars.exclude(name = var) | 1832 | configvars = configvars.exclude(name = var) |
1809 | 1833 | ||
1810 | context = { | 1834 | context = { |
@@ -1812,7 +1836,7 @@ if True: | |||
1812 | 'configvars': configvars, | 1836 | 'configvars': configvars, |
1813 | 'vars_managed': vars_managed, | 1837 | 'vars_managed': vars_managed, |
1814 | 'vars_fstypes': vars_fstypes, | 1838 | 'vars_fstypes': vars_fstypes, |
1815 | 'vars_blacklist': vars_blacklist, | 1839 | 'vars_disallowed': vars_disallowed, |
1816 | } | 1840 | } |
1817 | 1841 | ||
1818 | try: | 1842 | try: |
@@ -1839,7 +1863,7 @@ if True: | |||
1839 | except ProjectVariable.DoesNotExist: | 1863 | except ProjectVariable.DoesNotExist: |
1840 | pass | 1864 | pass |
1841 | try: | 1865 | try: |
1842 | context['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value | 1866 | context['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value |
1843 | context['image_install_append_defined'] = "1" | 1867 | context['image_install_append_defined'] = "1" |
1844 | except ProjectVariable.DoesNotExist: | 1868 | except ProjectVariable.DoesNotExist: |
1845 | pass | 1869 | pass |
@@ -1933,3 +1957,163 @@ if True: | |||
1933 | except (ObjectDoesNotExist, IOError): | 1957 | except (ObjectDoesNotExist, IOError): |
1934 | return toaster_render(request, "unavailable_artifact.html") | 1958 | return toaster_render(request, "unavailable_artifact.html") |
1935 | 1959 | ||
1960 | |||
1961 | class CommandLineBuilds(TemplateView): | ||
1962 | model = EventLogsImports | ||
1963 | template_name = 'command_line_builds.html' | ||
1964 | |||
1965 | def get_context_data(self, **kwargs): | ||
1966 | context = super(CommandLineBuilds, self).get_context_data(**kwargs) | ||
1967 | #get value from BB_DEFAULT_EVENTLOG defined in bitbake.conf | ||
1968 | eventlog = subprocess.check_output(['bitbake-getvar', 'BB_DEFAULT_EVENTLOG', '--value']) | ||
1969 | if eventlog: | ||
1970 | logs_dir = os.path.dirname(eventlog.decode().strip('\n')) | ||
1971 | files = os.listdir(logs_dir) | ||
1972 | imported_files = EventLogsImports.objects.all() | ||
1973 | files_list = [] | ||
1974 | |||
1975 | # Filter files that end with ".json" | ||
1976 | event_files = [] | ||
1977 | for file in files: | ||
1978 | if file.endswith(".json"): | ||
1979 | # because BB_DEFAULT_EVENTLOG is a directory, we need to check if the file is a valid eventlog | ||
1980 | with open("{}/{}".format(logs_dir, file)) as efile: | ||
1981 | content = efile.read() | ||
1982 | if 'allvariables' in content: | ||
1983 | event_files.append(file) | ||
1984 | |||
1985 | #build dict for template using db data | ||
1986 | for event_file in event_files: | ||
1987 | if imported_files.filter(name=event_file): | ||
1988 | files_list.append({ | ||
1989 | 'name': event_file, | ||
1990 | 'imported': True, | ||
1991 | 'build_id': imported_files.filter(name=event_file)[0].build_id, | ||
1992 | 'size': os.path.getsize("{}/{}".format(logs_dir, event_file)) | ||
1993 | }) | ||
1994 | else: | ||
1995 | files_list.append({ | ||
1996 | 'name': event_file, | ||
1997 | 'imported': False, | ||
1998 | 'build_id': None, | ||
1999 | 'size': os.path.getsize("{}/{}".format(logs_dir, event_file)) | ||
2000 | }) | ||
2001 | context['import_all'] = True | ||
2002 | |||
2003 | context['files'] = files_list | ||
2004 | context['dir'] = logs_dir | ||
2005 | else: | ||
2006 | context['files'] = [] | ||
2007 | context['dir'] = '' | ||
2008 | |||
2009 | # enable session variable | ||
2010 | if not self.request.session.get('file'): | ||
2011 | self.request.session['file'] = "" | ||
2012 | |||
2013 | context['form'] = LoadFileForm() | ||
2014 | context['project_enable'] = project_enable | ||
2015 | return context | ||
2016 | |||
2017 | def post(self, request, **kwargs): | ||
2018 | logs_dir = request.POST.get('dir') | ||
2019 | all_files = request.POST.get('all') | ||
2020 | |||
2021 | # check if a build is already in progress | ||
2022 | if Build.objects.filter(outcome=Build.IN_PROGRESS): | ||
2023 | messages.add_message( | ||
2024 | self.request, | ||
2025 | messages.ERROR, | ||
2026 | "A build is already in progress. Please wait for it to complete before starting a new build." | ||
2027 | ) | ||
2028 | return JsonResponse({'response': 'building'}) | ||
2029 | imported_files = EventLogsImports.objects.all() | ||
2030 | try: | ||
2031 | if all_files == 'true': | ||
2032 | # use of session variable to deactivate icon for builds in progress | ||
2033 | request.session['all_builds'] = True | ||
2034 | request.session.modified = True | ||
2035 | request.session.save() | ||
2036 | |||
2037 | files = ast.literal_eval(request.POST.get('file')) | ||
2038 | for file in files: | ||
2039 | if imported_files.filter(name=file.get('name')).exists(): | ||
2040 | imported_files.filter(name=file.get('name'))[0].imported = True | ||
2041 | else: | ||
2042 | with open("{}/{}".format(logs_dir, file.get('name'))) as eventfile: | ||
2043 | # load variables from the first line | ||
2044 | variables = None | ||
2045 | while line := eventfile.readline().strip(): | ||
2046 | try: | ||
2047 | variables = json.loads(line)['allvariables'] | ||
2048 | break | ||
2049 | except (KeyError, json.JSONDecodeError): | ||
2050 | continue | ||
2051 | if not variables: | ||
2052 | raise Exception("File content missing build variables") | ||
2053 | eventfile.seek(0) | ||
2054 | params = namedtuple('ConfigParams', ['observe_only'])(True) | ||
2055 | player = eventreplay.EventPlayer(eventfile, variables) | ||
2056 | |||
2057 | toasterui.main(player, player, params) | ||
2058 | event_log_import = EventLogsImports.objects.create(name=file.get('name'), imported=True) | ||
2059 | event_log_import.build_id = Build.objects.last().id | ||
2060 | event_log_import.save() | ||
2061 | else: | ||
2062 | if self.request.FILES.get('eventlog_file'): | ||
2063 | file = self.request.FILES['eventlog_file'] | ||
2064 | else: | ||
2065 | file = request.POST.get('file') | ||
2066 | # use of session variable to deactivate icon for build in progress | ||
2067 | request.session['file'] = file | ||
2068 | request.session['all_builds'] = False | ||
2069 | request.session.modified = True | ||
2070 | request.session.save() | ||
2071 | |||
2072 | if imported_files.filter(name=file).exists(): | ||
2073 | imported_files.filter(name=file)[0].imported = True | ||
2074 | else: | ||
2075 | if isinstance(file, InMemoryUploadedFile) or isinstance(file, TemporaryUploadedFile): | ||
2076 | variables = None | ||
2077 | while line := file.readline().strip(): | ||
2078 | try: | ||
2079 | variables = json.loads(line)['allvariables'] | ||
2080 | break | ||
2081 | except (KeyError, json.JSONDecodeError): | ||
2082 | continue | ||
2083 | if not variables: | ||
2084 | raise Exception("File content missing build variables") | ||
2085 | file.seek(0) | ||
2086 | params = namedtuple('ConfigParams', ['observe_only'])(True) | ||
2087 | player = eventreplay.EventPlayer(file, variables) | ||
2088 | if not os.path.exists('{}/{}'.format(logs_dir, file.name)): | ||
2089 | fs = FileSystemStorage(location=logs_dir) | ||
2090 | fs.save(file.name, file) | ||
2091 | toasterui.main(player, player, params) | ||
2092 | else: | ||
2093 | with open("{}/{}".format(logs_dir, file)) as eventfile: | ||
2094 | # load variables from the first line | ||
2095 | variables = None | ||
2096 | while line := eventfile.readline().strip(): | ||
2097 | try: | ||
2098 | variables = json.loads(line)['allvariables'] | ||
2099 | break | ||
2100 | except (KeyError, json.JSONDecodeError): | ||
2101 | continue | ||
2102 | if not variables: | ||
2103 | raise Exception("File content missing build variables") | ||
2104 | eventfile.seek(0) | ||
2105 | params = namedtuple('ConfigParams', ['observe_only'])(True) | ||
2106 | player = eventreplay.EventPlayer(eventfile, variables) | ||
2107 | toasterui.main(player, player, params) | ||
2108 | event_log_import = EventLogsImports.objects.create(name=file, imported=True) | ||
2109 | event_log_import.build_id = Build.objects.last().id | ||
2110 | event_log_import.save() | ||
2111 | request.session['file'] = "" | ||
2112 | except Exception: | ||
2113 | messages.add_message( | ||
2114 | self.request, | ||
2115 | messages.ERROR, | ||
2116 | "The file content is not in the correct format. Update file content or upload a different file." | ||
2117 | ) | ||
2118 | return HttpResponseRedirect("/toastergui/cmdline/") | ||
2119 | return HttpResponseRedirect('/toastergui/builds/') | ||
diff --git a/bitbake/lib/toaster/toastergui/widgets.py b/bitbake/lib/toaster/toastergui/widgets.py index ceff52942e..b32abf40b3 100644 --- a/bitbake/lib/toaster/toastergui/widgets.py +++ b/bitbake/lib/toaster/toastergui/widgets.py | |||
@@ -7,6 +7,7 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | from django.views.generic import View, TemplateView | 9 | from django.views.generic import View, TemplateView |
10 | from django.utils.decorators import method_decorator | ||
10 | from django.views.decorators.cache import cache_control | 11 | from django.views.decorators.cache import cache_control |
11 | from django.shortcuts import HttpResponse | 12 | from django.shortcuts import HttpResponse |
12 | from django.core.cache import cache | 13 | from django.core.cache import cache |
@@ -31,6 +32,7 @@ import re | |||
31 | import os | 32 | import os |
32 | 33 | ||
33 | from toastergui.tablefilter import TableFilterMap | 34 | from toastergui.tablefilter import TableFilterMap |
35 | from toastermain.logs import log_view_mixin | ||
34 | 36 | ||
35 | try: | 37 | try: |
36 | from urllib import unquote_plus | 38 | from urllib import unquote_plus |
@@ -63,8 +65,8 @@ class ToasterTable(TemplateView): | |||
63 | self.default_orderby = "" | 65 | self.default_orderby = "" |
64 | 66 | ||
65 | # prevent HTTP caching of table data | 67 | # prevent HTTP caching of table data |
66 | @cache_control(must_revalidate=True, | 68 | @method_decorator(cache_control(must_revalidate=True, |
67 | max_age=0, no_store=True, no_cache=True) | 69 | max_age=0, no_store=True, no_cache=True)) |
68 | def dispatch(self, *args, **kwargs): | 70 | def dispatch(self, *args, **kwargs): |
69 | return super(ToasterTable, self).dispatch(*args, **kwargs) | 71 | return super(ToasterTable, self).dispatch(*args, **kwargs) |
70 | 72 | ||
@@ -83,6 +85,7 @@ class ToasterTable(TemplateView): | |||
83 | 85 | ||
84 | return context | 86 | return context |
85 | 87 | ||
88 | @log_view_mixin | ||
86 | def get(self, request, *args, **kwargs): | 89 | def get(self, request, *args, **kwargs): |
87 | if request.GET.get('format', None) == 'json': | 90 | if request.GET.get('format', None) == 'json': |
88 | 91 | ||
@@ -304,6 +307,7 @@ class ToasterTable(TemplateView): | |||
304 | 307 | ||
305 | self.setup_columns(**kwargs) | 308 | self.setup_columns(**kwargs) |
306 | 309 | ||
310 | self.apply_orderby('pk') | ||
307 | if search: | 311 | if search: |
308 | self.apply_search(search) | 312 | self.apply_search(search) |
309 | if filters: | 313 | if filters: |
@@ -413,6 +417,7 @@ class ToasterTypeAhead(View): | |||
413 | def __init__(self, *args, **kwargs): | 417 | def __init__(self, *args, **kwargs): |
414 | super(ToasterTypeAhead, self).__init__() | 418 | super(ToasterTypeAhead, self).__init__() |
415 | 419 | ||
420 | @log_view_mixin | ||
416 | def get(self, request, *args, **kwargs): | 421 | def get(self, request, *args, **kwargs): |
417 | def response(data): | 422 | def response(data): |
418 | return HttpResponse(json.dumps(data, | 423 | return HttpResponse(json.dumps(data, |
@@ -468,6 +473,7 @@ class MostRecentBuildsView(View): | |||
468 | 473 | ||
469 | return False | 474 | return False |
470 | 475 | ||
476 | @log_view_mixin | ||
471 | def get(self, request, *args, **kwargs): | 477 | def get(self, request, *args, **kwargs): |
472 | """ | 478 | """ |
473 | Returns a list of builds in JSON format. | 479 | Returns a list of builds in JSON format. |
diff --git a/bitbake/lib/toaster/toastermain/logs.py b/bitbake/lib/toaster/toastermain/logs.py new file mode 100644 index 0000000000..62d871963a --- /dev/null +++ b/bitbake/lib/toaster/toastermain/logs.py | |||
@@ -0,0 +1,158 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # -*- coding: utf-8 -*- | ||
3 | |||
4 | import os | ||
5 | import logging | ||
6 | import json | ||
7 | from pathlib import Path | ||
8 | from django.http import HttpRequest | ||
9 | |||
10 | BUILDDIR = Path(os.environ.get('BUILDDIR', '/tmp')) | ||
11 | |||
12 | def log_api_request(request, response, view, logger_name='api'): | ||
13 | """Helper function for LogAPIMixin""" | ||
14 | |||
15 | repjson = { | ||
16 | 'view': view, | ||
17 | 'path': request.path, | ||
18 | 'method': request.method, | ||
19 | 'status': response.status_code | ||
20 | } | ||
21 | |||
22 | logger = logging.getLogger(logger_name) | ||
23 | logger.info( | ||
24 | json.dumps(repjson, indent=4, separators=(", ", " : ")) | ||
25 | ) | ||
26 | |||
27 | |||
28 | def log_view_mixin(view): | ||
29 | def log_view_request(*args, **kwargs): | ||
30 | # get request from args else kwargs | ||
31 | request = None | ||
32 | if len(args) > 0: | ||
33 | for req in args: | ||
34 | if isinstance(req, HttpRequest): | ||
35 | request = req | ||
36 | break | ||
37 | elif request is None: | ||
38 | request = kwargs.get('request') | ||
39 | |||
40 | response = view(*args, **kwargs) | ||
41 | view_name = 'unknown' | ||
42 | if hasattr(request, 'resolver_match'): | ||
43 | if hasattr(request.resolver_match, 'view_name'): | ||
44 | view_name = request.resolver_match.view_name | ||
45 | |||
46 | log_api_request( | ||
47 | request, response, view_name, 'toaster') | ||
48 | return response | ||
49 | return log_view_request | ||
50 | |||
51 | |||
52 | |||
53 | class LogAPIMixin: | ||
54 | """Logs API requests | ||
55 | |||
56 | tested with: | ||
57 | - APIView | ||
58 | - ModelViewSet | ||
59 | - ReadOnlyModelViewSet | ||
60 | - GenericAPIView | ||
61 | |||
62 | Note: you can set `view_name` attribute in View to override get_view_name() | ||
63 | """ | ||
64 | |||
65 | def get_view_name(self): | ||
66 | if hasattr(self, 'view_name'): | ||
67 | return self.view_name | ||
68 | return super().get_view_name() | ||
69 | |||
70 | def finalize_response(self, request, response, *args, **kwargs): | ||
71 | log_api_request(request, response, self.get_view_name()) | ||
72 | return super().finalize_response(request, response, *args, **kwargs) | ||
73 | |||
74 | |||
75 | LOGGING_SETTINGS = { | ||
76 | 'version': 1, | ||
77 | 'disable_existing_loggers': False, | ||
78 | 'filters': { | ||
79 | 'require_debug_false': { | ||
80 | '()': 'django.utils.log.RequireDebugFalse' | ||
81 | } | ||
82 | }, | ||
83 | 'formatters': { | ||
84 | 'datetime': { | ||
85 | 'format': '%(asctime)s %(levelname)s %(message)s' | ||
86 | }, | ||
87 | 'verbose': { | ||
88 | 'format': '{levelname} {asctime} {module} {name}.{funcName} {process:d} {thread:d} {message}', | ||
89 | 'datefmt': "%d/%b/%Y %H:%M:%S", | ||
90 | 'style': '{', | ||
91 | }, | ||
92 | 'api': { | ||
93 | 'format': '\n{levelname} {asctime} {name}.{funcName}:\n{message}', | ||
94 | 'style': '{' | ||
95 | } | ||
96 | }, | ||
97 | 'handlers': { | ||
98 | 'mail_admins': { | ||
99 | 'level': 'ERROR', | ||
100 | 'filters': ['require_debug_false'], | ||
101 | 'class': 'django.utils.log.AdminEmailHandler' | ||
102 | }, | ||
103 | 'console': { | ||
104 | 'level': 'DEBUG', | ||
105 | 'class': 'logging.StreamHandler', | ||
106 | 'formatter': 'datetime', | ||
107 | }, | ||
108 | 'file_django': { | ||
109 | 'level': 'INFO', | ||
110 | 'class': 'logging.handlers.TimedRotatingFileHandler', | ||
111 | 'filename': BUILDDIR / 'toaster_logs/django.log', | ||
112 | 'when': 'D', # interval type | ||
113 | 'interval': 1, # defaults to 1 | ||
114 | 'backupCount': 10, # how many files to keep | ||
115 | 'formatter': 'verbose', | ||
116 | }, | ||
117 | 'file_api': { | ||
118 | 'level': 'INFO', | ||
119 | 'class': 'logging.handlers.TimedRotatingFileHandler', | ||
120 | 'filename': BUILDDIR / 'toaster_logs/api.log', | ||
121 | 'when': 'D', | ||
122 | 'interval': 1, | ||
123 | 'backupCount': 10, | ||
124 | 'formatter': 'verbose', | ||
125 | }, | ||
126 | 'file_toaster': { | ||
127 | 'level': 'INFO', | ||
128 | 'class': 'logging.handlers.TimedRotatingFileHandler', | ||
129 | 'filename': BUILDDIR / 'toaster_logs/web.log', | ||
130 | 'when': 'D', | ||
131 | 'interval': 1, | ||
132 | 'backupCount': 10, | ||
133 | 'formatter': 'verbose', | ||
134 | }, | ||
135 | }, | ||
136 | 'loggers': { | ||
137 | 'django.request': { | ||
138 | 'handlers': ['file_django', 'console'], | ||
139 | 'level': 'WARN', | ||
140 | 'propagate': True, | ||
141 | }, | ||
142 | 'django': { | ||
143 | 'handlers': ['file_django', 'console'], | ||
144 | 'level': 'WARNING', | ||
145 | 'propogate': True, | ||
146 | }, | ||
147 | 'toaster': { | ||
148 | 'handlers': ['file_toaster'], | ||
149 | 'level': 'INFO', | ||
150 | 'propagate': False, | ||
151 | }, | ||
152 | 'api': { | ||
153 | 'handlers': ['file_api'], | ||
154 | 'level': 'INFO', | ||
155 | 'propagate': False, | ||
156 | } | ||
157 | } | ||
158 | } | ||
diff --git a/bitbake/lib/toaster/toastermain/management/commands/buildimport.py b/bitbake/lib/toaster/toastermain/management/commands/buildimport.py index 59da6ff7ac..f7139aa041 100644 --- a/bitbake/lib/toaster/toastermain/management/commands/buildimport.py +++ b/bitbake/lib/toaster/toastermain/management/commands/buildimport.py | |||
@@ -451,7 +451,7 @@ class Command(BaseCommand): | |||
451 | # Catch vars relevant to Toaster (in case no Toaster section) | 451 | # Catch vars relevant to Toaster (in case no Toaster section) |
452 | self.update_project_vars(project,'DISTRO') | 452 | self.update_project_vars(project,'DISTRO') |
453 | self.update_project_vars(project,'MACHINE') | 453 | self.update_project_vars(project,'MACHINE') |
454 | self.update_project_vars(project,'IMAGE_INSTALL_append') | 454 | self.update_project_vars(project,'IMAGE_INSTALL:append') |
455 | self.update_project_vars(project,'IMAGE_FSTYPES') | 455 | self.update_project_vars(project,'IMAGE_FSTYPES') |
456 | self.update_project_vars(project,'PACKAGE_CLASSES') | 456 | self.update_project_vars(project,'PACKAGE_CLASSES') |
457 | # These vars are typically only assigned by Toaster | 457 | # These vars are typically only assigned by Toaster |
@@ -545,7 +545,7 @@ class Command(BaseCommand): | |||
545 | # Find the directory's release, and promote to default_release if local paths | 545 | # Find the directory's release, and promote to default_release if local paths |
546 | release = self.find_import_release(layers_list,lv_dict,default_release) | 546 | release = self.find_import_release(layers_list,lv_dict,default_release) |
547 | # create project, SANITY: reuse any project of same name | 547 | # create project, SANITY: reuse any project of same name |
548 | project = Project.objects.create_project(project_name,release,project) | 548 | project = Project.objects.create_project(project_name,release,project, imported=True) |
549 | # Apply any new layers or variables | 549 | # Apply any new layers or variables |
550 | self.apply_conf_variables(project,layers_list,lv_dict,release) | 550 | self.apply_conf_variables(project,layers_list,lv_dict,release) |
551 | # WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific' | 551 | # WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific' |
diff --git a/bitbake/lib/toaster/toastermain/management/commands/checksocket.py b/bitbake/lib/toaster/toastermain/management/commands/checksocket.py index 811fd5d516..b2c002da7a 100644 --- a/bitbake/lib/toaster/toastermain/management/commands/checksocket.py +++ b/bitbake/lib/toaster/toastermain/management/commands/checksocket.py | |||
@@ -13,7 +13,7 @@ import errno | |||
13 | import socket | 13 | import socket |
14 | 14 | ||
15 | from django.core.management.base import BaseCommand, CommandError | 15 | from django.core.management.base import BaseCommand, CommandError |
16 | from django.utils.encoding import force_text | 16 | from django.utils.encoding import force_str |
17 | 17 | ||
18 | DEFAULT_ADDRPORT = "0.0.0.0:8000" | 18 | DEFAULT_ADDRPORT = "0.0.0.0:8000" |
19 | 19 | ||
@@ -51,7 +51,7 @@ class Command(BaseCommand): | |||
51 | if hasattr(err, 'errno') and err.errno in errors: | 51 | if hasattr(err, 'errno') and err.errno in errors: |
52 | errtext = errors[err.errno] | 52 | errtext = errors[err.errno] |
53 | else: | 53 | else: |
54 | errtext = force_text(err) | 54 | errtext = force_str(err) |
55 | raise CommandError(errtext) | 55 | raise CommandError(errtext) |
56 | 56 | ||
57 | self.stdout.write("OK") | 57 | self.stdout.write("OK") |
diff --git a/bitbake/lib/toaster/toastermain/settings.py b/bitbake/lib/toaster/toastermain/settings.py index a4b370c8d4..e06adc5a93 100644 --- a/bitbake/lib/toaster/toastermain/settings.py +++ b/bitbake/lib/toaster/toastermain/settings.py | |||
@@ -9,6 +9,8 @@ | |||
9 | # Django settings for Toaster project. | 9 | # Django settings for Toaster project. |
10 | 10 | ||
11 | import os | 11 | import os |
12 | from pathlib import Path | ||
13 | from toastermain.logs import LOGGING_SETTINGS | ||
12 | 14 | ||
13 | DEBUG = True | 15 | DEBUG = True |
14 | 16 | ||
@@ -39,6 +41,9 @@ DATABASES = { | |||
39 | } | 41 | } |
40 | } | 42 | } |
41 | 43 | ||
44 | # New in Django 3.2 | ||
45 | DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' | ||
46 | |||
42 | # Needed when Using sqlite especially to add a longer timeout for waiting | 47 | # Needed when Using sqlite especially to add a longer timeout for waiting |
43 | # for the database lock to be released | 48 | # for the database lock to be released |
44 | # https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors | 49 | # https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors |
@@ -84,14 +89,17 @@ else: | |||
84 | from pytz.exceptions import UnknownTimeZoneError | 89 | from pytz.exceptions import UnknownTimeZoneError |
85 | try: | 90 | try: |
86 | if pytz.timezone(zonename) is not None: | 91 | if pytz.timezone(zonename) is not None: |
87 | zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename | 92 | with open(filepath, 'rb') as f: |
93 | zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename | ||
88 | except UnknownTimeZoneError as ValueError: | 94 | except UnknownTimeZoneError as ValueError: |
89 | # we expect timezone failures here, just move over | 95 | # we expect timezone failures here, just move over |
90 | pass | 96 | pass |
91 | except ImportError: | 97 | except ImportError: |
92 | zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename | 98 | with open(filepath, 'rb') as f: |
99 | zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename | ||
93 | 100 | ||
94 | TIME_ZONE = zonefilelist[hashlib.md5(open('/etc/localtime', 'rb').read()).hexdigest()] | 101 | with open('/etc/localtime', 'rb') as f: |
102 | TIME_ZONE = zonefilelist[hashlib.md5(f.read()).hexdigest()] | ||
95 | 103 | ||
96 | # Language code for this installation. All choices can be found here: | 104 | # Language code for this installation. All choices can be found here: |
97 | # http://www.i18nguy.com/unicode/language-identifiers.html | 105 | # http://www.i18nguy.com/unicode/language-identifiers.html |
@@ -103,10 +111,6 @@ SITE_ID = 1 | |||
103 | # to load the internationalization machinery. | 111 | # to load the internationalization machinery. |
104 | USE_I18N = True | 112 | USE_I18N = True |
105 | 113 | ||
106 | # If you set this to False, Django will not format dates, numbers and | ||
107 | # calendars according to the current locale. | ||
108 | USE_L10N = True | ||
109 | |||
110 | # If you set this to False, Django will not use timezone-aware datetimes. | 114 | # If you set this to False, Django will not use timezone-aware datetimes. |
111 | USE_TZ = True | 115 | USE_TZ = True |
112 | 116 | ||
@@ -147,6 +151,8 @@ STATICFILES_FINDERS = ( | |||
147 | # Make this unique, and don't share it with anybody. | 151 | # Make this unique, and don't share it with anybody. |
148 | SECRET_KEY = 'NOT_SUITABLE_FOR_HOSTED_DEPLOYMENT' | 152 | SECRET_KEY = 'NOT_SUITABLE_FOR_HOSTED_DEPLOYMENT' |
149 | 153 | ||
154 | TMPDIR = os.environ.get('TOASTER_DJANGO_TMPDIR', '/tmp') | ||
155 | |||
150 | class InvalidString(str): | 156 | class InvalidString(str): |
151 | def __mod__(self, other): | 157 | def __mod__(self, other): |
152 | from django.template.base import TemplateSyntaxError | 158 | from django.template.base import TemplateSyntaxError |
@@ -183,7 +189,13 @@ TEMPLATES = [ | |||
183 | 'django.template.loaders.app_directories.Loader', | 189 | 'django.template.loaders.app_directories.Loader', |
184 | #'django.template.loaders.eggs.Loader', | 190 | #'django.template.loaders.eggs.Loader', |
185 | ], | 191 | ], |
186 | 'string_if_invalid': InvalidString("%s"), | 192 | # https://docs.djangoproject.com/en/4.2/ref/templates/api/#how-invalid-variables-are-handled |
193 | # Generally, string_if_invalid should only be enabled in order to debug | ||
194 | # a specific template problem, then cleared once debugging is complete. | ||
195 | # If you assign a value other than '' to string_if_invalid, | ||
196 | # you will experience rendering problems with these templates and sites. | ||
197 | # 'string_if_invalid': InvalidString("%s"), | ||
198 | 'string_if_invalid': "", | ||
187 | 'debug': DEBUG, | 199 | 'debug': DEBUG, |
188 | }, | 200 | }, |
189 | }, | 201 | }, |
@@ -207,7 +219,7 @@ CACHES = { | |||
207 | # }, | 219 | # }, |
208 | 'default': { | 220 | 'default': { |
209 | 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', | 221 | 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', |
210 | 'LOCATION': '/tmp/toaster_cache_%d' % os.getuid(), | 222 | 'LOCATION': '%s/toaster_cache_%d' % (TMPDIR, os.getuid()), |
211 | 'TIMEOUT': 1, | 223 | 'TIMEOUT': 1, |
212 | } | 224 | } |
213 | } | 225 | } |
@@ -239,6 +251,9 @@ INSTALLED_APPS = ( | |||
239 | 'django.contrib.humanize', | 251 | 'django.contrib.humanize', |
240 | 'bldcollector', | 252 | 'bldcollector', |
241 | 'toastermain', | 253 | 'toastermain', |
254 | |||
255 | # 3rd-lib | ||
256 | "log_viewer", | ||
242 | ) | 257 | ) |
243 | 258 | ||
244 | 259 | ||
@@ -299,43 +314,21 @@ for t in os.walk(os.path.dirname(currentdir)): | |||
299 | # the site admins on every HTTP 500 error when DEBUG=False. | 314 | # the site admins on every HTTP 500 error when DEBUG=False. |
300 | # See http://docs.djangoproject.com/en/dev/topics/logging for | 315 | # See http://docs.djangoproject.com/en/dev/topics/logging for |
301 | # more details on how to customize your logging configuration. | 316 | # more details on how to customize your logging configuration. |
302 | LOGGING = { | 317 | LOGGING = LOGGING_SETTINGS |
303 | 'version': 1, | 318 | |
304 | 'disable_existing_loggers': False, | 319 | # Build paths inside the project like this: BASE_DIR / 'subdir'. |
305 | 'filters': { | 320 | BUILDDIR = os.environ.get("BUILDDIR", TMPDIR) |
306 | 'require_debug_false': { | 321 | |
307 | '()': 'django.utils.log.RequireDebugFalse' | 322 | # LOG VIEWER |
308 | } | 323 | # https://pypi.org/project/django-log-viewer/ |
309 | }, | 324 | LOG_VIEWER_FILES_PATTERN = '*.log*' |
310 | 'formatters': { | 325 | LOG_VIEWER_FILES_DIR = os.path.join(BUILDDIR, "toaster_logs/") |
311 | 'datetime': { | 326 | LOG_VIEWER_PAGE_LENGTH = 25 # total log lines per-page |
312 | 'format': '%(asctime)s %(levelname)s %(message)s' | 327 | LOG_VIEWER_MAX_READ_LINES = 100000 # total log lines will be read |
313 | } | 328 | LOG_VIEWER_PATTERNS = ['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL'] |
314 | }, | 329 | |
315 | 'handlers': { | 330 | # Optionally you can set the next variables in order to customize the admin: |
316 | 'mail_admins': { | 331 | LOG_VIEWER_FILE_LIST_TITLE = "Logs list" |
317 | 'level': 'ERROR', | ||
318 | 'filters': ['require_debug_false'], | ||
319 | 'class': 'django.utils.log.AdminEmailHandler' | ||
320 | }, | ||
321 | 'console': { | ||
322 | 'level': 'DEBUG', | ||
323 | 'class': 'logging.StreamHandler', | ||
324 | 'formatter': 'datetime', | ||
325 | } | ||
326 | }, | ||
327 | 'loggers': { | ||
328 | 'toaster' : { | ||
329 | 'handlers': ['console'], | ||
330 | 'level': 'DEBUG', | ||
331 | }, | ||
332 | 'django.request': { | ||
333 | 'handlers': ['console'], | ||
334 | 'level': 'WARN', | ||
335 | 'propagate': True, | ||
336 | }, | ||
337 | } | ||
338 | } | ||
339 | 332 | ||
340 | if DEBUG and SQL_DEBUG: | 333 | if DEBUG and SQL_DEBUG: |
341 | LOGGING['loggers']['django.db.backends'] = { | 334 | LOGGING['loggers']['django.db.backends'] = { |
diff --git a/bitbake/lib/toaster/toastermain/settings_test.py b/bitbake/lib/toaster/toastermain/settings_test.py index 6538d9e453..74def2d240 100644 --- a/bitbake/lib/toaster/toastermain/settings_test.py +++ b/bitbake/lib/toaster/toastermain/settings_test.py | |||
@@ -19,10 +19,10 @@ TEMPLATE_DEBUG = DEBUG | |||
19 | DATABASES = { | 19 | DATABASES = { |
20 | 'default': { | 20 | 'default': { |
21 | 'ENGINE': 'django.db.backends.sqlite3', | 21 | 'ENGINE': 'django.db.backends.sqlite3', |
22 | 'NAME': '/tmp/toaster-test-db.sqlite', | 22 | 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR, |
23 | 'TEST': { | 23 | 'TEST': { |
24 | 'ENGINE': 'django.db.backends.sqlite3', | 24 | 'ENGINE': 'django.db.backends.sqlite3', |
25 | 'NAME': '/tmp/toaster-test-db.sqlite', | 25 | 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR, |
26 | } | 26 | } |
27 | } | 27 | } |
28 | } | 28 | } |
diff --git a/bitbake/lib/toaster/toastermain/urls.py b/bitbake/lib/toaster/toastermain/urls.py index 5fb520b384..3be46fcf0c 100644 --- a/bitbake/lib/toaster/toastermain/urls.py +++ b/bitbake/lib/toaster/toastermain/urls.py | |||
@@ -6,7 +6,7 @@ | |||
6 | # SPDX-License-Identifier: GPL-2.0-only | 6 | # SPDX-License-Identifier: GPL-2.0-only |
7 | # | 7 | # |
8 | 8 | ||
9 | from django.conf.urls import include, url | 9 | from django.urls import re_path as url, include |
10 | from django.views.generic import RedirectView, TemplateView | 10 | from django.views.generic import RedirectView, TemplateView |
11 | from django.views.decorators.cache import never_cache | 11 | from django.views.decorators.cache import never_cache |
12 | import bldcollector.views | 12 | import bldcollector.views |
@@ -28,6 +28,8 @@ urlpatterns = [ | |||
28 | # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), | 28 | # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), |
29 | 29 | ||
30 | 30 | ||
31 | url(r'^logs/', include('log_viewer.urls')), | ||
32 | |||
31 | # This is here to maintain backward compatibility and will be deprecated | 33 | # This is here to maintain backward compatibility and will be deprecated |
32 | # in the future. | 34 | # in the future. |
33 | url(r'^orm/eventfile$', bldcollector.views.eventfile), | 35 | url(r'^orm/eventfile$', bldcollector.views.eventfile), |
diff --git a/bitbake/lib/toaster/tox.ini b/bitbake/lib/toaster/tox.ini new file mode 100644 index 0000000000..1516a527ae --- /dev/null +++ b/bitbake/lib/toaster/tox.ini | |||
@@ -0,0 +1,24 @@ | |||
1 | [tox] | ||
2 | envlist = py38, py39, py310, py311, py312 | ||
3 | skipsdist = True | ||
4 | toxworkdir = {env:TOX_WORKDIR:.tox} | ||
5 | passenv = * | ||
6 | |||
7 | [testenv] | ||
8 | passenv = | ||
9 | SSTATE_DIR | ||
10 | DL_DIR | ||
11 | TOASTER_DJANGO_TMPDIR | ||
12 | setenv = | ||
13 | DJANGO_SETTINGS_MODULE=toastermain.settings_test | ||
14 | TOASTER_BUILDSERVER=1 | ||
15 | BUILDDIR = {env:BUILDDIR} | ||
16 | EVENTREPLAY_DIR = {env:EVENTREPLAY_DIR:BUILDDIR} | ||
17 | commands = | ||
18 | python3 {toxinidir}/manage.py test tests.db tests.commands tests.builds tests.browser tests.functional tests.views | ||
19 | deps = | ||
20 | -r {toxinidir}/../../toaster-requirements.txt | ||
21 | -r {toxinidir}/tests/toaster-tests-requirements.txt | ||
22 | |||
23 | [testenv:chrome] | ||
24 | commands={[testenv]commands} --splinter-webdriver=chrome \ No newline at end of file | ||