diff options
Diffstat (limited to 'bitbake/lib')
123 files changed, 13537 insertions, 8195 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py index 76bc08a3ea..4af03c54ad 100644 --- a/bitbake/lib/bb/COW.py +++ b/bitbake/lib/bb/COW.py | |||
@@ -36,8 +36,9 @@ class COWDictMeta(COWMeta): | |||
36 | __marker__ = tuple() | 36 | __marker__ = tuple() |
37 | 37 | ||
38 | def __str__(cls): | 38 | def __str__(cls): |
39 | # FIXME: I have magic numbers! | 39 | ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"]) |
40 | return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) | 40 | keys = set(cls.__dict__.keys()) - ignored_keys |
41 | return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(keys)) | ||
41 | 42 | ||
42 | __repr__ = __str__ | 43 | __repr__ = __str__ |
43 | 44 | ||
@@ -161,8 +162,9 @@ class COWDictMeta(COWMeta): | |||
161 | 162 | ||
162 | class COWSetMeta(COWDictMeta): | 163 | class COWSetMeta(COWDictMeta): |
163 | def __str__(cls): | 164 | def __str__(cls): |
164 | # FIXME: I have magic numbers! | 165 | ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"]) |
165 | return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) | 166 | keys = set(cls.__dict__.keys()) - ignored_keys |
167 | return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(keys)) | ||
166 | 168 | ||
167 | __repr__ = __str__ | 169 | __repr__ = __str__ |
168 | 170 | ||
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index 15013540c2..62ceaaef6e 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -9,11 +9,11 @@ | |||
9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
10 | # | 10 | # |
11 | 11 | ||
12 | __version__ = "2.9.0" | 12 | __version__ = "2.15.0" |
13 | 13 | ||
14 | import sys | 14 | import sys |
15 | if sys.version_info < (3, 8, 0): | 15 | if sys.version_info < (3, 9, 0): |
16 | raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake") | 16 | raise RuntimeError("Sorry, python 3.9.0 or later is required for this version of bitbake") |
17 | 17 | ||
18 | if sys.version_info < (3, 10, 0): | 18 | if sys.version_info < (3, 10, 0): |
19 | # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work" | 19 | # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work" |
@@ -36,6 +36,7 @@ class BBHandledException(Exception): | |||
36 | 36 | ||
37 | import os | 37 | import os |
38 | import logging | 38 | import logging |
39 | from collections import namedtuple | ||
39 | 40 | ||
40 | 41 | ||
41 | class NullHandler(logging.Handler): | 42 | class NullHandler(logging.Handler): |
@@ -103,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin): | |||
103 | self.setup_bblogger(logger.name) | 104 | self.setup_bblogger(logger.name) |
104 | super().__init__(logger, *args, **kwargs) | 105 | super().__init__(logger, *args, **kwargs) |
105 | 106 | ||
106 | if sys.version_info < (3, 6): | ||
107 | # These properties were added in Python 3.6. Add them in older versions | ||
108 | # for compatibility | ||
109 | @property | ||
110 | def manager(self): | ||
111 | return self.logger.manager | ||
112 | |||
113 | @manager.setter | ||
114 | def manager(self, value): | ||
115 | self.logger.manager = value | ||
116 | |||
117 | @property | ||
118 | def name(self): | ||
119 | return self.logger.name | ||
120 | |||
121 | def __repr__(self): | ||
122 | logger = self.logger | ||
123 | level = logger.getLevelName(logger.getEffectiveLevel()) | ||
124 | return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level) | ||
125 | |||
126 | logging.LoggerAdapter = BBLoggerAdapter | 107 | logging.LoggerAdapter = BBLoggerAdapter |
127 | 108 | ||
128 | logger = logging.getLogger("BitBake") | 109 | logger = logging.getLogger("BitBake") |
@@ -148,9 +129,25 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] | |||
148 | 129 | ||
149 | # Messaging convenience functions | 130 | # Messaging convenience functions |
150 | def plain(*args): | 131 | def plain(*args): |
132 | """ | ||
133 | Prints a message at "plain" level (higher level than a ``bb.note()``). | ||
134 | |||
135 | Arguments: | ||
136 | |||
137 | - ``args``: one or more strings to print. | ||
138 | """ | ||
151 | mainlogger.plain(''.join(args)) | 139 | mainlogger.plain(''.join(args)) |
152 | 140 | ||
153 | def debug(lvl, *args): | 141 | def debug(lvl, *args): |
142 | """ | ||
143 | Prints a debug message. | ||
144 | |||
145 | Arguments: | ||
146 | |||
147 | - ``lvl``: debug level. Higher value increases the debug level | ||
148 | (determined by ``bitbake -D``). | ||
149 | - ``args``: one or more strings to print. | ||
150 | """ | ||
154 | if isinstance(lvl, str): | 151 | if isinstance(lvl, str): |
155 | mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) | 152 | mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) |
156 | args = (lvl,) + args | 153 | args = (lvl,) + args |
@@ -158,33 +155,81 @@ def debug(lvl, *args): | |||
158 | mainlogger.bbdebug(lvl, ''.join(args)) | 155 | mainlogger.bbdebug(lvl, ''.join(args)) |
159 | 156 | ||
160 | def note(*args): | 157 | def note(*args): |
158 | """ | ||
159 | Prints a message at "note" level. | ||
160 | |||
161 | Arguments: | ||
162 | |||
163 | - ``args``: one or more strings to print. | ||
164 | """ | ||
161 | mainlogger.info(''.join(args)) | 165 | mainlogger.info(''.join(args)) |
162 | 166 | ||
163 | # | ||
164 | # A higher prioity note which will show on the console but isn't a warning | ||
165 | # | ||
166 | # Something is happening the user should be aware of but they probably did | ||
167 | # something to make it happen | ||
168 | # | ||
169 | def verbnote(*args): | 167 | def verbnote(*args): |
168 | """ | ||
169 | A higher priority note which will show on the console but isn't a warning. | ||
170 | |||
171 | Use in contexts when something is happening the user should be aware of but | ||
172 | they probably did something to make it happen. | ||
173 | |||
174 | Arguments: | ||
175 | |||
176 | - ``args``: one or more strings to print. | ||
177 | """ | ||
170 | mainlogger.verbnote(''.join(args)) | 178 | mainlogger.verbnote(''.join(args)) |
171 | 179 | ||
172 | # | 180 | # |
173 | # Warnings - things the user likely needs to pay attention to and fix | 181 | # Warnings - things the user likely needs to pay attention to and fix |
174 | # | 182 | # |
175 | def warn(*args): | 183 | def warn(*args): |
184 | """ | ||
185 | Prints a warning message. | ||
186 | |||
187 | Arguments: | ||
188 | |||
189 | - ``args``: one or more strings to print. | ||
190 | """ | ||
176 | mainlogger.warning(''.join(args)) | 191 | mainlogger.warning(''.join(args)) |
177 | 192 | ||
178 | def warnonce(*args): | 193 | def warnonce(*args): |
194 | """ | ||
195 | Prints a warning message like ``bb.warn()``, but only prints the message | ||
196 | once. | ||
197 | |||
198 | Arguments: | ||
199 | |||
200 | - ``args``: one or more strings to print. | ||
201 | """ | ||
179 | mainlogger.warnonce(''.join(args)) | 202 | mainlogger.warnonce(''.join(args)) |
180 | 203 | ||
181 | def error(*args, **kwargs): | 204 | def error(*args, **kwargs): |
205 | """ | ||
206 | Prints an error message. | ||
207 | |||
208 | Arguments: | ||
209 | |||
210 | - ``args``: one or more strings to print. | ||
211 | """ | ||
182 | mainlogger.error(''.join(args), extra=kwargs) | 212 | mainlogger.error(''.join(args), extra=kwargs) |
183 | 213 | ||
184 | def erroronce(*args): | 214 | def erroronce(*args): |
215 | """ | ||
216 | Prints an error message like ``bb.error()``, but only prints the message | ||
217 | once. | ||
218 | |||
219 | Arguments: | ||
220 | |||
221 | - ``args``: one or more strings to print. | ||
222 | """ | ||
185 | mainlogger.erroronce(''.join(args)) | 223 | mainlogger.erroronce(''.join(args)) |
186 | 224 | ||
187 | def fatal(*args, **kwargs): | 225 | def fatal(*args, **kwargs): |
226 | """ | ||
227 | Prints an error message and stops the BitBake execution. | ||
228 | |||
229 | Arguments: | ||
230 | |||
231 | - ``args``: one or more strings to print. | ||
232 | """ | ||
188 | mainlogger.critical(''.join(args), extra=kwargs) | 233 | mainlogger.critical(''.join(args), extra=kwargs) |
189 | raise BBHandledException() | 234 | raise BBHandledException() |
190 | 235 | ||
@@ -213,7 +258,6 @@ def deprecated(func, name=None, advice=""): | |||
213 | # For compatibility | 258 | # For compatibility |
214 | def deprecate_import(current, modulename, fromlist, renames = None): | 259 | def deprecate_import(current, modulename, fromlist, renames = None): |
215 | """Import objects from one module into another, wrapping them with a DeprecationWarning""" | 260 | """Import objects from one module into another, wrapping them with a DeprecationWarning""" |
216 | import sys | ||
217 | 261 | ||
218 | module = __import__(modulename, fromlist = fromlist) | 262 | module = __import__(modulename, fromlist = fromlist) |
219 | for position, objname in enumerate(fromlist): | 263 | for position, objname in enumerate(fromlist): |
@@ -227,3 +271,14 @@ def deprecate_import(current, modulename, fromlist, renames = None): | |||
227 | 271 | ||
228 | setattr(sys.modules[current], newname, newobj) | 272 | setattr(sys.modules[current], newname, newobj) |
229 | 273 | ||
274 | TaskData = namedtuple("TaskData", [ | ||
275 | "pn", | ||
276 | "taskname", | ||
277 | "fn", | ||
278 | "deps", | ||
279 | "provides", | ||
280 | "taskhash", | ||
281 | "unihash", | ||
282 | "hashfn", | ||
283 | "taskhash_deps", | ||
284 | ]) | ||
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py index 0f41b275cf..e9dbdb617f 100755 --- a/bitbake/lib/bb/acl.py +++ b/bitbake/lib/bb/acl.py | |||
@@ -195,8 +195,6 @@ class ACL(object): | |||
195 | 195 | ||
196 | def main(): | 196 | def main(): |
197 | import argparse | 197 | import argparse |
198 | import pwd | ||
199 | import grp | ||
200 | from pathlib import Path | 198 | from pathlib import Path |
201 | 199 | ||
202 | parser = argparse.ArgumentParser() | 200 | parser = argparse.ArgumentParser() |
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py index 639e1607f8..a4371643d7 100644 --- a/bitbake/lib/bb/asyncrpc/__init__.py +++ b/bitbake/lib/bb/asyncrpc/__init__.py | |||
@@ -5,7 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | 7 | ||
8 | from .client import AsyncClient, Client, ClientPool | 8 | from .client import AsyncClient, Client |
9 | from .serv import AsyncServer, AsyncServerConnection | 9 | from .serv import AsyncServer, AsyncServerConnection |
10 | from .connection import DEFAULT_MAX_CHUNK | 10 | from .connection import DEFAULT_MAX_CHUNK |
11 | from .exceptions import ( | 11 | from .exceptions import ( |
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py index a350b4fb12..17b72033b9 100644 --- a/bitbake/lib/bb/asyncrpc/client.py +++ b/bitbake/lib/bb/asyncrpc/client.py | |||
@@ -24,6 +24,12 @@ ADDR_TYPE_UNIX = 0 | |||
24 | ADDR_TYPE_TCP = 1 | 24 | ADDR_TYPE_TCP = 1 |
25 | ADDR_TYPE_WS = 2 | 25 | ADDR_TYPE_WS = 2 |
26 | 26 | ||
27 | WEBSOCKETS_MIN_VERSION = (9, 1) | ||
28 | # Need websockets 10 with python 3.10+ | ||
29 | if sys.version_info >= (3, 10, 0): | ||
30 | WEBSOCKETS_MIN_VERSION = (10, 0) | ||
31 | |||
32 | |||
27 | def parse_address(addr): | 33 | def parse_address(addr): |
28 | if addr.startswith(UNIX_PREFIX): | 34 | if addr.startswith(UNIX_PREFIX): |
29 | return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) | 35 | return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) |
@@ -39,6 +45,7 @@ def parse_address(addr): | |||
39 | 45 | ||
40 | return (ADDR_TYPE_TCP, (host, int(port))) | 46 | return (ADDR_TYPE_TCP, (host, int(port))) |
41 | 47 | ||
48 | |||
42 | class AsyncClient(object): | 49 | class AsyncClient(object): |
43 | def __init__( | 50 | def __init__( |
44 | self, | 51 | self, |
@@ -86,8 +93,35 @@ class AsyncClient(object): | |||
86 | async def connect_websocket(self, uri): | 93 | async def connect_websocket(self, uri): |
87 | import websockets | 94 | import websockets |
88 | 95 | ||
96 | try: | ||
97 | version = tuple( | ||
98 | int(v) | ||
99 | for v in websockets.__version__.split(".")[ | ||
100 | 0 : len(WEBSOCKETS_MIN_VERSION) | ||
101 | ] | ||
102 | ) | ||
103 | except ValueError: | ||
104 | raise ImportError( | ||
105 | f"Unable to parse websockets version '{websockets.__version__}'" | ||
106 | ) | ||
107 | |||
108 | if version < WEBSOCKETS_MIN_VERSION: | ||
109 | min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION) | ||
110 | raise ImportError( | ||
111 | f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}" | ||
112 | ) | ||
113 | |||
89 | async def connect_sock(): | 114 | async def connect_sock(): |
90 | websocket = await websockets.connect(uri, ping_interval=None) | 115 | try: |
116 | websocket = await websockets.connect( | ||
117 | uri, | ||
118 | ping_interval=None, | ||
119 | open_timeout=self.timeout, | ||
120 | ) | ||
121 | except asyncio.exceptions.TimeoutError: | ||
122 | raise ConnectionError("Timeout while connecting to websocket") | ||
123 | except (OSError, websockets.InvalidHandshake, websockets.InvalidURI) as exc: | ||
124 | raise ConnectionError(f"Could not connect to websocket: {exc}") from exc | ||
91 | return WebsocketConnection(websocket, self.timeout) | 125 | return WebsocketConnection(websocket, self.timeout) |
92 | 126 | ||
93 | self._connect_sock = connect_sock | 127 | self._connect_sock = connect_sock |
@@ -225,85 +259,9 @@ class Client(object): | |||
225 | def close(self): | 259 | def close(self): |
226 | if self.loop: | 260 | if self.loop: |
227 | self.loop.run_until_complete(self.client.close()) | 261 | self.loop.run_until_complete(self.client.close()) |
228 | if sys.version_info >= (3, 6): | ||
229 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
230 | self.loop.close() | ||
231 | self.loop = None | ||
232 | |||
233 | def __enter__(self): | ||
234 | return self | ||
235 | |||
236 | def __exit__(self, exc_type, exc_value, traceback): | ||
237 | self.close() | ||
238 | return False | ||
239 | |||
240 | |||
241 | class ClientPool(object): | ||
242 | def __init__(self, max_clients): | ||
243 | self.avail_clients = [] | ||
244 | self.num_clients = 0 | ||
245 | self.max_clients = max_clients | ||
246 | self.loop = None | ||
247 | self.client_condition = None | ||
248 | |||
249 | @abc.abstractmethod | ||
250 | async def _new_client(self): | ||
251 | raise NotImplementedError("Must be implemented in derived class") | ||
252 | |||
253 | def close(self): | ||
254 | if self.client_condition: | ||
255 | self.client_condition = None | ||
256 | |||
257 | if self.loop: | ||
258 | self.loop.run_until_complete(self.__close_clients()) | ||
259 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | 262 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) |
260 | self.loop.close() | 263 | self.loop.close() |
261 | self.loop = None | 264 | self.loop = None |
262 | |||
263 | def run_tasks(self, tasks): | ||
264 | if not self.loop: | ||
265 | self.loop = asyncio.new_event_loop() | ||
266 | |||
267 | thread = Thread(target=self.__thread_main, args=(tasks,)) | ||
268 | thread.start() | ||
269 | thread.join() | ||
270 | |||
271 | @contextlib.asynccontextmanager | ||
272 | async def get_client(self): | ||
273 | async with self.client_condition: | ||
274 | if self.avail_clients: | ||
275 | client = self.avail_clients.pop() | ||
276 | elif self.num_clients < self.max_clients: | ||
277 | self.num_clients += 1 | ||
278 | client = await self._new_client() | ||
279 | else: | ||
280 | while not self.avail_clients: | ||
281 | await self.client_condition.wait() | ||
282 | client = self.avail_clients.pop() | ||
283 | |||
284 | try: | ||
285 | yield client | ||
286 | finally: | ||
287 | async with self.client_condition: | ||
288 | self.avail_clients.append(client) | ||
289 | self.client_condition.notify() | ||
290 | |||
291 | def __thread_main(self, tasks): | ||
292 | async def process_task(task): | ||
293 | async with self.get_client() as client: | ||
294 | await task(client) | ||
295 | |||
296 | asyncio.set_event_loop(self.loop) | ||
297 | if not self.client_condition: | ||
298 | self.client_condition = asyncio.Condition() | ||
299 | tasks = [process_task(t) for t in tasks] | ||
300 | self.loop.run_until_complete(asyncio.gather(*tasks)) | ||
301 | |||
302 | async def __close_clients(self): | ||
303 | for c in self.avail_clients: | ||
304 | await c.close() | ||
305 | self.avail_clients = [] | ||
306 | self.num_clients = 0 | ||
307 | 265 | ||
308 | def __enter__(self): | 266 | def __enter__(self): |
309 | return self | 267 | return self |
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py index a66117acad..667217c5c1 100644 --- a/bitbake/lib/bb/asyncrpc/serv.py +++ b/bitbake/lib/bb/asyncrpc/serv.py | |||
@@ -138,14 +138,20 @@ class StreamServer(object): | |||
138 | 138 | ||
139 | 139 | ||
140 | class TCPStreamServer(StreamServer): | 140 | class TCPStreamServer(StreamServer): |
141 | def __init__(self, host, port, handler, logger): | 141 | def __init__(self, host, port, handler, logger, *, reuseport=False): |
142 | super().__init__(handler, logger) | 142 | super().__init__(handler, logger) |
143 | self.host = host | 143 | self.host = host |
144 | self.port = port | 144 | self.port = port |
145 | self.reuseport = reuseport | ||
145 | 146 | ||
146 | def start(self, loop): | 147 | def start(self, loop): |
147 | self.server = loop.run_until_complete( | 148 | self.server = loop.run_until_complete( |
148 | asyncio.start_server(self.handle_stream_client, self.host, self.port) | 149 | asyncio.start_server( |
150 | self.handle_stream_client, | ||
151 | self.host, | ||
152 | self.port, | ||
153 | reuse_port=self.reuseport, | ||
154 | ) | ||
149 | ) | 155 | ) |
150 | 156 | ||
151 | for s in self.server.sockets: | 157 | for s in self.server.sockets: |
@@ -209,11 +215,12 @@ class UnixStreamServer(StreamServer): | |||
209 | 215 | ||
210 | 216 | ||
211 | class WebsocketsServer(object): | 217 | class WebsocketsServer(object): |
212 | def __init__(self, host, port, handler, logger): | 218 | def __init__(self, host, port, handler, logger, *, reuseport=False): |
213 | self.host = host | 219 | self.host = host |
214 | self.port = port | 220 | self.port = port |
215 | self.handler = handler | 221 | self.handler = handler |
216 | self.logger = logger | 222 | self.logger = logger |
223 | self.reuseport = reuseport | ||
217 | 224 | ||
218 | def start(self, loop): | 225 | def start(self, loop): |
219 | import websockets.server | 226 | import websockets.server |
@@ -224,6 +231,7 @@ class WebsocketsServer(object): | |||
224 | self.host, | 231 | self.host, |
225 | self.port, | 232 | self.port, |
226 | ping_interval=None, | 233 | ping_interval=None, |
234 | reuse_port=self.reuseport, | ||
227 | ) | 235 | ) |
228 | ) | 236 | ) |
229 | 237 | ||
@@ -262,14 +270,26 @@ class AsyncServer(object): | |||
262 | self.loop = None | 270 | self.loop = None |
263 | self.run_tasks = [] | 271 | self.run_tasks = [] |
264 | 272 | ||
265 | def start_tcp_server(self, host, port): | 273 | def start_tcp_server(self, host, port, *, reuseport=False): |
266 | self.server = TCPStreamServer(host, port, self._client_handler, self.logger) | 274 | self.server = TCPStreamServer( |
275 | host, | ||
276 | port, | ||
277 | self._client_handler, | ||
278 | self.logger, | ||
279 | reuseport=reuseport, | ||
280 | ) | ||
267 | 281 | ||
268 | def start_unix_server(self, path): | 282 | def start_unix_server(self, path): |
269 | self.server = UnixStreamServer(path, self._client_handler, self.logger) | 283 | self.server = UnixStreamServer(path, self._client_handler, self.logger) |
270 | 284 | ||
271 | def start_websocket_server(self, host, port): | 285 | def start_websocket_server(self, host, port, reuseport=False): |
272 | self.server = WebsocketsServer(host, port, self._client_handler, self.logger) | 286 | self.server = WebsocketsServer( |
287 | host, | ||
288 | port, | ||
289 | self._client_handler, | ||
290 | self.logger, | ||
291 | reuseport=reuseport, | ||
292 | ) | ||
273 | 293 | ||
274 | async def _client_handler(self, socket): | 294 | async def _client_handler(self, socket): |
275 | address = socket.address | 295 | address = socket.address |
@@ -368,8 +388,7 @@ class AsyncServer(object): | |||
368 | 388 | ||
369 | self._serve_forever(tasks) | 389 | self._serve_forever(tasks) |
370 | 390 | ||
371 | if sys.version_info >= (3, 6): | 391 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) |
372 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
373 | self.loop.close() | 392 | self.loop.close() |
374 | 393 | ||
375 | queue = multiprocessing.Queue() | 394 | queue = multiprocessing.Queue() |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 44d08f5c55..40839a81b5 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
@@ -197,6 +197,8 @@ def exec_func(func, d, dirs = None): | |||
197 | for cdir in d.expand(cleandirs).split(): | 197 | for cdir in d.expand(cleandirs).split(): |
198 | bb.utils.remove(cdir, True) | 198 | bb.utils.remove(cdir, True) |
199 | bb.utils.mkdirhier(cdir) | 199 | bb.utils.mkdirhier(cdir) |
200 | if cdir == oldcwd: | ||
201 | os.chdir(cdir) | ||
200 | 202 | ||
201 | if flags and dirs is None: | 203 | if flags and dirs is None: |
202 | dirs = flags.get('dirs') | 204 | dirs = flags.get('dirs') |
@@ -395,7 +397,7 @@ def create_progress_handler(func, progress, logfile, d): | |||
395 | # Use specified regex | 397 | # Use specified regex |
396 | return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) | 398 | return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) |
397 | elif progress.startswith("custom:"): | 399 | elif progress.startswith("custom:"): |
398 | # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ | 400 | # Use a custom progress handler that was injected via other means |
399 | import functools | 401 | import functools |
400 | from types import ModuleType | 402 | from types import ModuleType |
401 | 403 | ||
@@ -741,7 +743,7 @@ def _exec_task(fn, task, d, quieterr): | |||
741 | 743 | ||
742 | if quieterr: | 744 | if quieterr: |
743 | if not handled: | 745 | if not handled: |
744 | logger.warning(repr(exc)) | 746 | logger.warning(str(exc)) |
745 | event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) | 747 | event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) |
746 | else: | 748 | else: |
747 | errprinted = errchk.triggered | 749 | errprinted = errchk.triggered |
@@ -750,7 +752,7 @@ def _exec_task(fn, task, d, quieterr): | |||
750 | if verboseStdoutLogging or handled: | 752 | if verboseStdoutLogging or handled: |
751 | errprinted = True | 753 | errprinted = True |
752 | if not handled: | 754 | if not handled: |
753 | logger.error(repr(exc)) | 755 | logger.error(str(exc)) |
754 | event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) | 756 | event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) |
755 | return 1 | 757 | return 1 |
756 | 758 | ||
@@ -930,9 +932,13 @@ def add_tasks(tasklist, d): | |||
930 | # don't assume holding a reference | 932 | # don't assume holding a reference |
931 | d.setVar('_task_deps', task_deps) | 933 | d.setVar('_task_deps', task_deps) |
932 | 934 | ||
935 | def ensure_task_prefix(name): | ||
936 | if name[:3] != "do_": | ||
937 | name = "do_" + name | ||
938 | return name | ||
939 | |||
933 | def addtask(task, before, after, d): | 940 | def addtask(task, before, after, d): |
934 | if task[:3] != "do_": | 941 | task = ensure_task_prefix(task) |
935 | task = "do_" + task | ||
936 | 942 | ||
937 | d.setVarFlag(task, "task", 1) | 943 | d.setVarFlag(task, "task", 1) |
938 | bbtasks = d.getVar('__BBTASKS', False) or [] | 944 | bbtasks = d.getVar('__BBTASKS', False) or [] |
@@ -944,19 +950,20 @@ def addtask(task, before, after, d): | |||
944 | if after is not None: | 950 | if after is not None: |
945 | # set up deps for function | 951 | # set up deps for function |
946 | for entry in after.split(): | 952 | for entry in after.split(): |
953 | entry = ensure_task_prefix(entry) | ||
947 | if entry not in existing: | 954 | if entry not in existing: |
948 | existing.append(entry) | 955 | existing.append(entry) |
949 | d.setVarFlag(task, "deps", existing) | 956 | d.setVarFlag(task, "deps", existing) |
950 | if before is not None: | 957 | if before is not None: |
951 | # set up things that depend on this func | 958 | # set up things that depend on this func |
952 | for entry in before.split(): | 959 | for entry in before.split(): |
960 | entry = ensure_task_prefix(entry) | ||
953 | existing = d.getVarFlag(entry, "deps", False) or [] | 961 | existing = d.getVarFlag(entry, "deps", False) or [] |
954 | if task not in existing: | 962 | if task not in existing: |
955 | d.setVarFlag(entry, "deps", [task] + existing) | 963 | d.setVarFlag(entry, "deps", [task] + existing) |
956 | 964 | ||
957 | def deltask(task, d): | 965 | def deltask(task, d): |
958 | if task[:3] != "do_": | 966 | task = ensure_task_prefix(task) |
959 | task = "do_" + task | ||
960 | 967 | ||
961 | bbtasks = d.getVar('__BBTASKS', False) or [] | 968 | bbtasks = d.getVar('__BBTASKS', False) or [] |
962 | if task in bbtasks: | 969 | if task in bbtasks: |
@@ -1021,3 +1028,9 @@ def tasksbetween(task_start, task_end, d): | |||
1021 | chain.pop() | 1028 | chain.pop() |
1022 | follow_chain(task_start, task_end) | 1029 | follow_chain(task_start, task_end) |
1023 | return outtasks | 1030 | return outtasks |
1031 | |||
1032 | def listtasks(d): | ||
1033 | """ | ||
1034 | Return the list of tasks in the current recipe. | ||
1035 | """ | ||
1036 | return tuple(d.getVar('__BBTASKS', False) or ()) | ||
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 18d5574a31..2361c5684d 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -28,7 +28,7 @@ import shutil | |||
28 | 28 | ||
29 | logger = logging.getLogger("BitBake.Cache") | 29 | logger = logging.getLogger("BitBake.Cache") |
30 | 30 | ||
31 | __cache_version__ = "155" | 31 | __cache_version__ = "156" |
32 | 32 | ||
33 | def getCacheFile(path, filename, mc, data_hash): | 33 | def getCacheFile(path, filename, mc, data_hash): |
34 | mcspec = '' | 34 | mcspec = '' |
@@ -395,7 +395,7 @@ class Cache(object): | |||
395 | # It will be used later for deciding whether we | 395 | # It will be used later for deciding whether we |
396 | # need extra cache file dump/load support | 396 | # need extra cache file dump/load support |
397 | self.mc = mc | 397 | self.mc = mc |
398 | self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) | 398 | self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else ''), logger) |
399 | self.caches_array = caches_array | 399 | self.caches_array = caches_array |
400 | self.cachedir = self.data.getVar("CACHE") | 400 | self.cachedir = self.data.getVar("CACHE") |
401 | self.clean = set() | 401 | self.clean = set() |
@@ -441,7 +441,7 @@ class Cache(object): | |||
441 | else: | 441 | else: |
442 | symlink = os.path.join(self.cachedir, "bb_cache.dat") | 442 | symlink = os.path.join(self.cachedir, "bb_cache.dat") |
443 | 443 | ||
444 | if os.path.exists(symlink): | 444 | if os.path.exists(symlink) or os.path.islink(symlink): |
445 | bb.utils.remove(symlink) | 445 | bb.utils.remove(symlink) |
446 | try: | 446 | try: |
447 | os.symlink(os.path.basename(self.cachefile), symlink) | 447 | os.symlink(os.path.basename(self.cachefile), symlink) |
@@ -779,25 +779,6 @@ class MulticonfigCache(Mapping): | |||
779 | for k in self.__caches: | 779 | for k in self.__caches: |
780 | yield k | 780 | yield k |
781 | 781 | ||
782 | def init(cooker): | ||
783 | """ | ||
784 | The Objective: Cache the minimum amount of data possible yet get to the | ||
785 | stage of building packages (i.e. tryBuild) without reparsing any .bb files. | ||
786 | |||
787 | To do this, we intercept getVar calls and only cache the variables we see | ||
788 | being accessed. We rely on the cache getVar calls being made for all | ||
789 | variables bitbake might need to use to reach this stage. For each cached | ||
790 | file we need to track: | ||
791 | |||
792 | * Its mtime | ||
793 | * The mtimes of all its dependencies | ||
794 | * Whether it caused a parse.SkipRecipe exception | ||
795 | |||
796 | Files causing parsing errors are evicted from the cache. | ||
797 | |||
798 | """ | ||
799 | return Cache(cooker.configuration.data, cooker.configuration.data_hash) | ||
800 | |||
801 | 782 | ||
802 | class CacheData(object): | 783 | class CacheData(object): |
803 | """ | 784 | """ |
@@ -866,6 +847,16 @@ class MultiProcessCache(object): | |||
866 | data = [{}] | 847 | data = [{}] |
867 | return data | 848 | return data |
868 | 849 | ||
850 | def clear_cache(self): | ||
851 | if not self.cachefile: | ||
852 | bb.fatal("Can't clear invalid cachefile") | ||
853 | |||
854 | self.cachedata = self.create_cachedata() | ||
855 | self.cachedata_extras = self.create_cachedata() | ||
856 | with bb.utils.fileslocked([self.cachefile + ".lock"]): | ||
857 | bb.utils.remove(self.cachefile) | ||
858 | bb.utils.remove(self.cachefile + "-*") | ||
859 | |||
869 | def save_extras(self): | 860 | def save_extras(self): |
870 | if not self.cachefile: | 861 | if not self.cachefile: |
871 | return | 862 | return |
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py index 557793d366..3fb39a303e 100644 --- a/bitbake/lib/bb/checksum.py +++ b/bitbake/lib/bb/checksum.py | |||
@@ -142,3 +142,28 @@ class FileChecksumCache(MultiProcessCache): | |||
142 | 142 | ||
143 | checksums.sort(key=operator.itemgetter(1)) | 143 | checksums.sort(key=operator.itemgetter(1)) |
144 | return checksums | 144 | return checksums |
145 | |||
146 | class RevisionsCache(MultiProcessCache): | ||
147 | cache_file_name = "local_srcrevisions.dat" | ||
148 | CACHE_VERSION = 1 | ||
149 | |||
150 | def __init__(self): | ||
151 | MultiProcessCache.__init__(self) | ||
152 | |||
153 | def get_revs(self): | ||
154 | return self.cachedata[0] | ||
155 | |||
156 | def get_rev(self, k): | ||
157 | if k in self.cachedata_extras[0]: | ||
158 | return self.cachedata_extras[0][k] | ||
159 | if k in self.cachedata[0]: | ||
160 | return self.cachedata[0][k] | ||
161 | return None | ||
162 | |||
163 | def set_rev(self, k, v): | ||
164 | self.cachedata[0][k] = v | ||
165 | self.cachedata_extras[0][k] = v | ||
166 | |||
167 | def merge_data(self, source, dest): | ||
168 | for h in source[0]: | ||
169 | dest[0][h] = source[0][h] | ||
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 2e8b7ced3c..4f70cf7fe7 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -69,12 +69,25 @@ def add_module_functions(fn, functions, namespace): | |||
69 | name = "%s.%s" % (namespace, f) | 69 | name = "%s.%s" % (namespace, f) |
70 | parser = PythonParser(name, logger) | 70 | parser = PythonParser(name, logger) |
71 | try: | 71 | try: |
72 | parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f) | 72 | parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f, func=functions[f]) |
73 | #bb.warn("Cached %s" % f) | 73 | #bb.warn("Cached %s" % f) |
74 | except KeyError: | 74 | except KeyError: |
75 | lines, lineno = inspect.getsourcelines(functions[f]) | 75 | try: |
76 | targetfn = inspect.getsourcefile(functions[f]) | ||
77 | except TypeError: | ||
78 | # Builtin | ||
79 | continue | ||
80 | if fn != targetfn: | ||
81 | # Skip references to other modules outside this file | ||
82 | #bb.warn("Skipping %s" % name) | ||
83 | continue | ||
84 | try: | ||
85 | lines, lineno = inspect.getsourcelines(functions[f]) | ||
86 | except TypeError: | ||
87 | # Builtin | ||
88 | continue | ||
76 | src = "".join(lines) | 89 | src = "".join(lines) |
77 | parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f) | 90 | parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f, func=functions[f]) |
78 | #bb.warn("Not cached %s" % f) | 91 | #bb.warn("Not cached %s" % f) |
79 | execs = parser.execs.copy() | 92 | execs = parser.execs.copy() |
80 | # Expand internal module exec references | 93 | # Expand internal module exec references |
@@ -82,14 +95,17 @@ def add_module_functions(fn, functions, namespace): | |||
82 | if e in functions: | 95 | if e in functions: |
83 | execs.remove(e) | 96 | execs.remove(e) |
84 | execs.add(namespace + "." + e) | 97 | execs.add(namespace + "." + e) |
85 | modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()] | 98 | visitorcode = None |
99 | if hasattr(functions[f], 'visitorcode'): | ||
100 | visitorcode = getattr(functions[f], "visitorcode") | ||
101 | modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy(), parser.extra, visitorcode] | ||
86 | #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains)) | 102 | #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains)) |
87 | 103 | ||
88 | def update_module_dependencies(d): | 104 | def update_module_dependencies(d): |
89 | for mod in modulecode_deps: | 105 | for mod in modulecode_deps: |
90 | excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split()) | 106 | excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split()) |
91 | if excludes: | 107 | if excludes: |
92 | modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]] | 108 | modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3], modulecode_deps[mod][4], modulecode_deps[mod][5]] |
93 | 109 | ||
94 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by | 110 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by |
95 | # avoiding duplication of the attribute names! | 111 | # avoiding duplication of the attribute names! |
@@ -112,21 +128,22 @@ class SetCache(object): | |||
112 | codecache = SetCache() | 128 | codecache = SetCache() |
113 | 129 | ||
114 | class pythonCacheLine(object): | 130 | class pythonCacheLine(object): |
115 | def __init__(self, refs, execs, contains): | 131 | def __init__(self, refs, execs, contains, extra): |
116 | self.refs = codecache.internSet(refs) | 132 | self.refs = codecache.internSet(refs) |
117 | self.execs = codecache.internSet(execs) | 133 | self.execs = codecache.internSet(execs) |
118 | self.contains = {} | 134 | self.contains = {} |
119 | for c in contains: | 135 | for c in contains: |
120 | self.contains[c] = codecache.internSet(contains[c]) | 136 | self.contains[c] = codecache.internSet(contains[c]) |
137 | self.extra = extra | ||
121 | 138 | ||
122 | def __getstate__(self): | 139 | def __getstate__(self): |
123 | return (self.refs, self.execs, self.contains) | 140 | return (self.refs, self.execs, self.contains, self.extra) |
124 | 141 | ||
125 | def __setstate__(self, state): | 142 | def __setstate__(self, state): |
126 | (refs, execs, contains) = state | 143 | (refs, execs, contains, extra) = state |
127 | self.__init__(refs, execs, contains) | 144 | self.__init__(refs, execs, contains, extra) |
128 | def __hash__(self): | 145 | def __hash__(self): |
129 | l = (hash(self.refs), hash(self.execs)) | 146 | l = (hash(self.refs), hash(self.execs), hash(self.extra)) |
130 | for c in sorted(self.contains.keys()): | 147 | for c in sorted(self.contains.keys()): |
131 | l = l + (c, hash(self.contains[c])) | 148 | l = l + (c, hash(self.contains[c])) |
132 | return hash(l) | 149 | return hash(l) |
@@ -155,7 +172,7 @@ class CodeParserCache(MultiProcessCache): | |||
155 | # so that an existing cache gets invalidated. Additionally you'll need | 172 | # so that an existing cache gets invalidated. Additionally you'll need |
156 | # to increment __cache_version__ in cache.py in order to ensure that old | 173 | # to increment __cache_version__ in cache.py in order to ensure that old |
157 | # recipe caches don't trigger "Taskhash mismatch" errors. | 174 | # recipe caches don't trigger "Taskhash mismatch" errors. |
158 | CACHE_VERSION = 11 | 175 | CACHE_VERSION = 14 |
159 | 176 | ||
160 | def __init__(self): | 177 | def __init__(self): |
161 | MultiProcessCache.__init__(self) | 178 | MultiProcessCache.__init__(self) |
@@ -169,8 +186,8 @@ class CodeParserCache(MultiProcessCache): | |||
169 | self.pythoncachelines = {} | 186 | self.pythoncachelines = {} |
170 | self.shellcachelines = {} | 187 | self.shellcachelines = {} |
171 | 188 | ||
172 | def newPythonCacheLine(self, refs, execs, contains): | 189 | def newPythonCacheLine(self, refs, execs, contains, extra): |
173 | cacheline = pythonCacheLine(refs, execs, contains) | 190 | cacheline = pythonCacheLine(refs, execs, contains, extra) |
174 | h = hash(cacheline) | 191 | h = hash(cacheline) |
175 | if h in self.pythoncachelines: | 192 | if h in self.pythoncachelines: |
176 | return self.pythoncachelines[h] | 193 | return self.pythoncachelines[h] |
@@ -255,7 +272,15 @@ class PythonParser(): | |||
255 | 272 | ||
256 | def visit_Call(self, node): | 273 | def visit_Call(self, node): |
257 | name = self.called_node_name(node.func) | 274 | name = self.called_node_name(node.func) |
258 | if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): | 275 | if name and name in modulecode_deps and modulecode_deps[name][5]: |
276 | visitorcode = modulecode_deps[name][5] | ||
277 | contains, execs, warn = visitorcode(name, node.args) | ||
278 | for i in contains: | ||
279 | self.contains[i] = contains[i] | ||
280 | self.execs |= execs | ||
281 | if warn: | ||
282 | self.warn(node.func, warn) | ||
283 | elif name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): | ||
259 | if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str): | 284 | if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str): |
260 | varname = node.args[0].value | 285 | varname = node.args[0].value |
261 | if name in self.containsfuncs and isinstance(node.args[1], ast.Constant): | 286 | if name in self.containsfuncs and isinstance(node.args[1], ast.Constant): |
@@ -323,7 +348,7 @@ class PythonParser(): | |||
323 | # For the python module code it is expensive to have the function text so it is | 348 | # For the python module code it is expensive to have the function text so it is |
324 | # uses a different fixedhash to cache against. We can take the hit on obtaining the | 349 | # uses a different fixedhash to cache against. We can take the hit on obtaining the |
325 | # text if it isn't in the cache. | 350 | # text if it isn't in the cache. |
326 | def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None): | 351 | def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None, func=None): |
327 | if not fixedhash and (not node or not node.strip()): | 352 | if not fixedhash and (not node or not node.strip()): |
328 | return | 353 | return |
329 | 354 | ||
@@ -338,6 +363,7 @@ class PythonParser(): | |||
338 | self.contains = {} | 363 | self.contains = {} |
339 | for i in codeparsercache.pythoncache[h].contains: | 364 | for i in codeparsercache.pythoncache[h].contains: |
340 | self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) | 365 | self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) |
366 | self.extra = codeparsercache.pythoncache[h].extra | ||
341 | return | 367 | return |
342 | 368 | ||
343 | if h in codeparsercache.pythoncacheextras: | 369 | if h in codeparsercache.pythoncacheextras: |
@@ -346,6 +372,7 @@ class PythonParser(): | |||
346 | self.contains = {} | 372 | self.contains = {} |
347 | for i in codeparsercache.pythoncacheextras[h].contains: | 373 | for i in codeparsercache.pythoncacheextras[h].contains: |
348 | self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) | 374 | self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) |
375 | self.extra = codeparsercache.pythoncacheextras[h].extra | ||
349 | return | 376 | return |
350 | 377 | ||
351 | if fixedhash and not node: | 378 | if fixedhash and not node: |
@@ -363,9 +390,16 @@ class PythonParser(): | |||
363 | if n.__class__.__name__ == "Call": | 390 | if n.__class__.__name__ == "Call": |
364 | self.visit_Call(n) | 391 | self.visit_Call(n) |
365 | 392 | ||
393 | if func is not None: | ||
394 | self.references |= getattr(func, "bb_vardeps", set()) | ||
395 | self.references -= getattr(func, "bb_vardepsexclude", set()) | ||
396 | |||
366 | self.execs.update(self.var_execs) | 397 | self.execs.update(self.var_execs) |
398 | self.extra = None | ||
399 | if fixedhash: | ||
400 | self.extra = bbhash(str(node)) | ||
367 | 401 | ||
368 | codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) | 402 | codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains, self.extra) |
369 | 403 | ||
370 | class ShellParser(): | 404 | class ShellParser(): |
371 | def __init__(self, name, log): | 405 | def __init__(self, name, log): |
@@ -484,19 +518,34 @@ class ShellParser(): | |||
484 | """ | 518 | """ |
485 | 519 | ||
486 | words = list(words) | 520 | words = list(words) |
487 | for word in list(words): | 521 | for word in words: |
488 | wtree = pyshlex.make_wordtree(word[1]) | 522 | wtree = pyshlex.make_wordtree(word[1]) |
489 | for part in wtree: | 523 | for part in wtree: |
490 | if not isinstance(part, list): | 524 | if not isinstance(part, list): |
491 | continue | 525 | continue |
492 | 526 | ||
493 | if part[0] in ('`', '$('): | 527 | candidates = [part] |
494 | command = pyshlex.wordtree_as_string(part[1:-1]) | 528 | |
495 | self._parse_shell(command) | 529 | # If command is of type: |
496 | 530 | # | |
497 | if word[0] in ("cmd_name", "cmd_word"): | 531 | # var="... $(cmd [...]) ..." |
498 | if word in words: | 532 | # |
499 | words.remove(word) | 533 | # Then iterate on what's between the quotes and if we find a |
534 | # list, make that what we check for below. | ||
535 | if len(part) >= 3 and part[0] == '"': | ||
536 | for p in part[1:-1]: | ||
537 | if isinstance(p, list): | ||
538 | candidates.append(p) | ||
539 | |||
540 | for candidate in candidates: | ||
541 | if len(candidate) >= 2: | ||
542 | if candidate[0] in ('`', '$('): | ||
543 | command = pyshlex.wordtree_as_string(candidate[1:-1]) | ||
544 | self._parse_shell(command) | ||
545 | |||
546 | if word[0] in ("cmd_name", "cmd_word"): | ||
547 | if word in words: | ||
548 | words.remove(word) | ||
500 | 549 | ||
501 | usetoken = False | 550 | usetoken = False |
502 | for word in words: | 551 | for word in words: |
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py index 1fcb9bf14c..59a979ee90 100644 --- a/bitbake/lib/bb/command.py +++ b/bitbake/lib/bb/command.py | |||
@@ -24,6 +24,7 @@ import io | |||
24 | import bb.event | 24 | import bb.event |
25 | import bb.cooker | 25 | import bb.cooker |
26 | import bb.remotedata | 26 | import bb.remotedata |
27 | import bb.parse | ||
27 | 28 | ||
28 | class DataStoreConnectionHandle(object): | 29 | class DataStoreConnectionHandle(object): |
29 | def __init__(self, dsindex=0): | 30 | def __init__(self, dsindex=0): |
@@ -108,7 +109,7 @@ class Command: | |||
108 | 109 | ||
109 | def runAsyncCommand(self, _, process_server, halt): | 110 | def runAsyncCommand(self, _, process_server, halt): |
110 | try: | 111 | try: |
111 | if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): | 112 | if self.cooker.state in (bb.cooker.State.ERROR, bb.cooker.State.SHUTDOWN, bb.cooker.State.FORCE_SHUTDOWN): |
112 | # updateCache will trigger a shutdown of the parser | 113 | # updateCache will trigger a shutdown of the parser |
113 | # and then raise BBHandledException triggering an exit | 114 | # and then raise BBHandledException triggering an exit |
114 | self.cooker.updateCache() | 115 | self.cooker.updateCache() |
@@ -118,7 +119,7 @@ class Command: | |||
118 | (command, options) = cmd | 119 | (command, options) = cmd |
119 | commandmethod = getattr(CommandsAsync, command) | 120 | commandmethod = getattr(CommandsAsync, command) |
120 | needcache = getattr( commandmethod, "needcache" ) | 121 | needcache = getattr( commandmethod, "needcache" ) |
121 | if needcache and self.cooker.state != bb.cooker.state.running: | 122 | if needcache and self.cooker.state != bb.cooker.State.RUNNING: |
122 | self.cooker.updateCache() | 123 | self.cooker.updateCache() |
123 | return True | 124 | return True |
124 | else: | 125 | else: |
@@ -142,14 +143,14 @@ class Command: | |||
142 | return bb.server.process.idleFinish(traceback.format_exc()) | 143 | return bb.server.process.idleFinish(traceback.format_exc()) |
143 | 144 | ||
144 | def finishAsyncCommand(self, msg=None, code=None): | 145 | def finishAsyncCommand(self, msg=None, code=None): |
146 | self.cooker.finishcommand() | ||
147 | self.process_server.clear_async_cmd() | ||
145 | if msg or msg == "": | 148 | if msg or msg == "": |
146 | bb.event.fire(CommandFailed(msg), self.cooker.data) | 149 | bb.event.fire(CommandFailed(msg), self.cooker.data) |
147 | elif code: | 150 | elif code: |
148 | bb.event.fire(CommandExit(code), self.cooker.data) | 151 | bb.event.fire(CommandExit(code), self.cooker.data) |
149 | else: | 152 | else: |
150 | bb.event.fire(CommandCompleted(), self.cooker.data) | 153 | bb.event.fire(CommandCompleted(), self.cooker.data) |
151 | self.cooker.finishcommand() | ||
152 | self.process_server.clear_async_cmd() | ||
153 | 154 | ||
154 | def reset(self): | 155 | def reset(self): |
155 | if self.remotedatastores: | 156 | if self.remotedatastores: |
@@ -310,7 +311,7 @@ class CommandsSync: | |||
310 | def revalidateCaches(self, command, params): | 311 | def revalidateCaches(self, command, params): |
311 | """Called by UI clients when metadata may have changed""" | 312 | """Called by UI clients when metadata may have changed""" |
312 | command.cooker.revalidateCaches() | 313 | command.cooker.revalidateCaches() |
313 | parseConfiguration.needconfig = False | 314 | revalidateCaches.needconfig = False |
314 | 315 | ||
315 | def getRecipes(self, command, params): | 316 | def getRecipes(self, command, params): |
316 | try: | 317 | try: |
@@ -420,15 +421,30 @@ class CommandsSync: | |||
420 | return command.cooker.recipecaches[mc].pkg_dp | 421 | return command.cooker.recipecaches[mc].pkg_dp |
421 | getDefaultPreference.readonly = True | 422 | getDefaultPreference.readonly = True |
422 | 423 | ||
424 | |||
423 | def getSkippedRecipes(self, command, params): | 425 | def getSkippedRecipes(self, command, params): |
426 | """ | ||
427 | Get the map of skipped recipes for the specified multiconfig/mc name (`params[0]`). | ||
428 | |||
429 | Invoked by `bb.tinfoil.Tinfoil.get_skipped_recipes` | ||
430 | |||
431 | :param command: Internally used parameter. | ||
432 | :param params: Parameter array. params[0] is multiconfig/mc name. If not given, then default mc '' is assumed. | ||
433 | :return: Dict whose keys are virtualfns and values are `bb.cooker.SkippedPackage` | ||
434 | """ | ||
435 | try: | ||
436 | mc = params[0] | ||
437 | except IndexError: | ||
438 | mc = '' | ||
439 | |||
424 | # Return list sorted by reverse priority order | 440 | # Return list sorted by reverse priority order |
425 | import bb.cache | 441 | import bb.cache |
426 | def sortkey(x): | 442 | def sortkey(x): |
427 | vfn, _ = x | 443 | vfn, _ = x |
428 | realfn, _, mc = bb.cache.virtualfn2realfn(vfn) | 444 | realfn, _, item_mc = bb.cache.virtualfn2realfn(vfn) |
429 | return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn) | 445 | return -command.cooker.collections[item_mc].calc_bbfile_priority(realfn)[0], vfn |
430 | 446 | ||
431 | skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey)) | 447 | skipdict = OrderedDict(sorted(command.cooker.skiplist_by_mc[mc].items(), key=sortkey)) |
432 | return list(skipdict.items()) | 448 | return list(skipdict.items()) |
433 | getSkippedRecipes.readonly = True | 449 | getSkippedRecipes.readonly = True |
434 | 450 | ||
@@ -582,6 +598,13 @@ class CommandsSync: | |||
582 | return DataStoreConnectionHandle(idx) | 598 | return DataStoreConnectionHandle(idx) |
583 | parseRecipeFile.readonly = True | 599 | parseRecipeFile.readonly = True |
584 | 600 | ||
601 | def finalizeData(self, command, params): | ||
602 | newdata = command.cooker.data.createCopy() | ||
603 | bb.data.expandKeys(newdata) | ||
604 | bb.parse.ast.runAnonFuncs(newdata) | ||
605 | idx = command.remotedatastores.store(newdata) | ||
606 | return DataStoreConnectionHandle(idx) | ||
607 | |||
585 | class CommandsAsync: | 608 | class CommandsAsync: |
586 | """ | 609 | """ |
587 | A class of asynchronous commands | 610 | A class of asynchronous commands |
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py index 88b0989322..2a64681c86 100644 --- a/bitbake/lib/bb/compress/lz4.py +++ b/bitbake/lib/bb/compress/lz4.py | |||
@@ -13,7 +13,7 @@ def open(*args, **kwargs): | |||
13 | 13 | ||
14 | class LZ4File(bb.compress._pipecompress.PipeFile): | 14 | class LZ4File(bb.compress._pipecompress.PipeFile): |
15 | def get_compress(self): | 15 | def get_compress(self): |
16 | return ["lz4c", "-z", "-c"] | 16 | return ["lz4", "-z", "-c"] |
17 | 17 | ||
18 | def get_decompress(self): | 18 | def get_decompress(self): |
19 | return ["lz4c", "-d", "-c"] | 19 | return ["lz4", "-d", "-c"] |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index c5bfef55d6..1810bcc604 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -8,7 +8,7 @@ | |||
8 | # | 8 | # |
9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
10 | # | 10 | # |
11 | 11 | import enum | |
12 | import sys, os, glob, os.path, re, time | 12 | import sys, os, glob, os.path, re, time |
13 | import itertools | 13 | import itertools |
14 | import logging | 14 | import logging |
@@ -17,7 +17,7 @@ import threading | |||
17 | from io import StringIO, UnsupportedOperation | 17 | from io import StringIO, UnsupportedOperation |
18 | from contextlib import closing | 18 | from contextlib import closing |
19 | from collections import defaultdict, namedtuple | 19 | from collections import defaultdict, namedtuple |
20 | import bb, bb.exceptions, bb.command | 20 | import bb, bb.command |
21 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build | 21 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build |
22 | import queue | 22 | import queue |
23 | import signal | 23 | import signal |
@@ -48,16 +48,15 @@ class CollectionError(bb.BBHandledException): | |||
48 | Exception raised when layer configuration is incorrect | 48 | Exception raised when layer configuration is incorrect |
49 | """ | 49 | """ |
50 | 50 | ||
51 | class state: | ||
52 | initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7)) | ||
53 | 51 | ||
54 | @classmethod | 52 | class State(enum.Enum): |
55 | def get_name(cls, code): | 53 | INITIAL = 0, |
56 | for name in dir(cls): | 54 | PARSING = 1, |
57 | value = getattr(cls, name) | 55 | RUNNING = 2, |
58 | if type(value) == type(cls.initial) and value == code: | 56 | SHUTDOWN = 3, |
59 | return name | 57 | FORCE_SHUTDOWN = 4, |
60 | raise ValueError("Invalid status code: %s" % code) | 58 | STOPPED = 5, |
59 | ERROR = 6 | ||
61 | 60 | ||
62 | 61 | ||
63 | class SkippedPackage: | 62 | class SkippedPackage: |
@@ -134,7 +133,8 @@ class BBCooker: | |||
134 | self.baseconfig_valid = False | 133 | self.baseconfig_valid = False |
135 | self.parsecache_valid = False | 134 | self.parsecache_valid = False |
136 | self.eventlog = None | 135 | self.eventlog = None |
137 | self.skiplist = {} | 136 | # The skiplists, one per multiconfig |
137 | self.skiplist_by_mc = defaultdict(dict) | ||
138 | self.featureset = CookerFeatures() | 138 | self.featureset = CookerFeatures() |
139 | if featureSet: | 139 | if featureSet: |
140 | for f in featureSet: | 140 | for f in featureSet: |
@@ -180,7 +180,7 @@ class BBCooker: | |||
180 | pass | 180 | pass |
181 | 181 | ||
182 | self.command = bb.command.Command(self, self.process_server) | 182 | self.command = bb.command.Command(self, self.process_server) |
183 | self.state = state.initial | 183 | self.state = State.INITIAL |
184 | 184 | ||
185 | self.parser = None | 185 | self.parser = None |
186 | 186 | ||
@@ -226,23 +226,22 @@ class BBCooker: | |||
226 | bb.warn("Cooker received SIGTERM, shutting down...") | 226 | bb.warn("Cooker received SIGTERM, shutting down...") |
227 | elif signum == signal.SIGHUP: | 227 | elif signum == signal.SIGHUP: |
228 | bb.warn("Cooker received SIGHUP, shutting down...") | 228 | bb.warn("Cooker received SIGHUP, shutting down...") |
229 | self.state = state.forceshutdown | 229 | self.state = State.FORCE_SHUTDOWN |
230 | bb.event._should_exit.set() | 230 | bb.event._should_exit.set() |
231 | 231 | ||
232 | def setFeatures(self, features): | 232 | def setFeatures(self, features): |
233 | # we only accept a new feature set if we're in state initial, so we can reset without problems | 233 | # we only accept a new feature set if we're in state initial, so we can reset without problems |
234 | if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: | 234 | if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]: |
235 | raise Exception("Illegal state for feature set change") | 235 | raise Exception("Illegal state for feature set change") |
236 | original_featureset = list(self.featureset) | 236 | original_featureset = list(self.featureset) |
237 | for feature in features: | 237 | for feature in features: |
238 | self.featureset.setFeature(feature) | 238 | self.featureset.setFeature(feature) |
239 | bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) | 239 | bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) |
240 | if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): | 240 | if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"): |
241 | self.reset() | 241 | self.reset() |
242 | 242 | ||
243 | def initConfigurationData(self): | 243 | def initConfigurationData(self): |
244 | 244 | self.state = State.INITIAL | |
245 | self.state = state.initial | ||
246 | self.caches_array = [] | 245 | self.caches_array = [] |
247 | 246 | ||
248 | sys.path = self.orig_syspath.copy() | 247 | sys.path = self.orig_syspath.copy() |
@@ -281,7 +280,6 @@ class BBCooker: | |||
281 | self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) | 280 | self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) |
282 | self.databuilder.parseBaseConfiguration() | 281 | self.databuilder.parseBaseConfiguration() |
283 | self.data = self.databuilder.data | 282 | self.data = self.databuilder.data |
284 | self.data_hash = self.databuilder.data_hash | ||
285 | self.extraconfigdata = {} | 283 | self.extraconfigdata = {} |
286 | 284 | ||
287 | eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") | 285 | eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") |
@@ -315,13 +313,19 @@ class BBCooker: | |||
315 | dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" | 313 | dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" |
316 | upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None | 314 | upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None |
317 | if upstream: | 315 | if upstream: |
318 | import socket | ||
319 | try: | 316 | try: |
320 | sock = socket.create_connection(upstream.split(":"), 5) | 317 | with hashserv.create_client(upstream) as client: |
321 | sock.close() | 318 | client.ping() |
322 | except socket.error as e: | 319 | except ImportError as e: |
323 | bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" | 320 | bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module: |
321 | %s | ||
322 | Please install the needed module on the build host, or use an environment containing it (e.g a pip venv or OpenEmbedded's buildtools tarball). | ||
323 | You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts.""" | ||
324 | % (upstream, repr(e))) | ||
325 | except ConnectionError as e: | ||
326 | bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s" | ||
324 | % (upstream, repr(e))) | 327 | % (upstream, repr(e))) |
328 | upstream = None | ||
325 | 329 | ||
326 | self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") | 330 | self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") |
327 | self.hashserv = hashserv.create_server( | 331 | self.hashserv = hashserv.create_server( |
@@ -370,6 +374,11 @@ class BBCooker: | |||
370 | if not clean: | 374 | if not clean: |
371 | bb.parse.BBHandler.cached_statements = {} | 375 | bb.parse.BBHandler.cached_statements = {} |
372 | 376 | ||
377 | # If writes were made to any of the data stores, we need to recalculate the data | ||
378 | # store cache | ||
379 | if hasattr(self, "databuilder"): | ||
380 | self.databuilder.calc_datastore_hashes() | ||
381 | |||
373 | def parseConfiguration(self): | 382 | def parseConfiguration(self): |
374 | self.updateCacheSync() | 383 | self.updateCacheSync() |
375 | 384 | ||
@@ -612,8 +621,8 @@ class BBCooker: | |||
612 | localdata = {} | 621 | localdata = {} |
613 | 622 | ||
614 | for mc in self.multiconfigs: | 623 | for mc in self.multiconfigs: |
615 | taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) | 624 | taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete) |
616 | localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) | 625 | localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc]) |
617 | bb.data.expandKeys(localdata[mc]) | 626 | bb.data.expandKeys(localdata[mc]) |
618 | 627 | ||
619 | current = 0 | 628 | current = 0 |
@@ -680,14 +689,14 @@ class BBCooker: | |||
680 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) | 689 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) |
681 | return taskdata, runlist | 690 | return taskdata, runlist |
682 | 691 | ||
683 | def prepareTreeData(self, pkgs_to_build, task): | 692 | def prepareTreeData(self, pkgs_to_build, task, halt=False): |
684 | """ | 693 | """ |
685 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | 694 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build |
686 | """ | 695 | """ |
687 | 696 | ||
688 | # We set halt to False here to prevent unbuildable targets raising | 697 | # We set halt to False here to prevent unbuildable targets raising |
689 | # an exception when we're just generating data | 698 | # an exception when we're just generating data |
690 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) | 699 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True) |
691 | 700 | ||
692 | return runlist, taskdata | 701 | return runlist, taskdata |
693 | 702 | ||
@@ -701,7 +710,7 @@ class BBCooker: | |||
701 | if not task.startswith("do_"): | 710 | if not task.startswith("do_"): |
702 | task = "do_%s" % task | 711 | task = "do_%s" % task |
703 | 712 | ||
704 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) | 713 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True) |
705 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) | 714 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) |
706 | rq.rqdata.prepare() | 715 | rq.rqdata.prepare() |
707 | return self.buildDependTree(rq, taskdata) | 716 | return self.buildDependTree(rq, taskdata) |
@@ -896,10 +905,11 @@ class BBCooker: | |||
896 | 905 | ||
897 | depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) | 906 | depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) |
898 | 907 | ||
899 | with open('pn-buildlist', 'w') as f: | 908 | pns = depgraph["pn"].keys() |
900 | for pn in depgraph["pn"]: | 909 | if pns: |
901 | f.write(pn + "\n") | 910 | with open('pn-buildlist', 'w') as f: |
902 | logger.info("PN build list saved to 'pn-buildlist'") | 911 | f.write("%s\n" % "\n".join(sorted(pns))) |
912 | logger.info("PN build list saved to 'pn-buildlist'") | ||
903 | 913 | ||
904 | # Remove old format output files to ensure no confusion with stale data | 914 | # Remove old format output files to ensure no confusion with stale data |
905 | try: | 915 | try: |
@@ -933,7 +943,7 @@ class BBCooker: | |||
933 | for mc in self.multiconfigs: | 943 | for mc in self.multiconfigs: |
934 | # First get list of recipes, including skipped | 944 | # First get list of recipes, including skipped |
935 | recipefns = list(self.recipecaches[mc].pkg_fn.keys()) | 945 | recipefns = list(self.recipecaches[mc].pkg_fn.keys()) |
936 | recipefns.extend(self.skiplist.keys()) | 946 | recipefns.extend(self.skiplist_by_mc[mc].keys()) |
937 | 947 | ||
938 | # Work out list of bbappends that have been applied | 948 | # Work out list of bbappends that have been applied |
939 | applied_appends = [] | 949 | applied_appends = [] |
@@ -952,13 +962,7 @@ class BBCooker: | |||
952 | '\n '.join(appends_without_recipes[mc]))) | 962 | '\n '.join(appends_without_recipes[mc]))) |
953 | 963 | ||
954 | if msgs: | 964 | if msgs: |
955 | msg = "\n".join(msgs) | 965 | bb.fatal("\n".join(msgs)) |
956 | warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \ | ||
957 | False) or "no" | ||
958 | if warn_only.lower() in ("1", "yes", "true"): | ||
959 | bb.warn(msg) | ||
960 | else: | ||
961 | bb.fatal(msg) | ||
962 | 966 | ||
963 | def handlePrefProviders(self): | 967 | def handlePrefProviders(self): |
964 | 968 | ||
@@ -1338,7 +1342,7 @@ class BBCooker: | |||
1338 | self.buildSetVars() | 1342 | self.buildSetVars() |
1339 | self.reset_mtime_caches() | 1343 | self.reset_mtime_caches() |
1340 | 1344 | ||
1341 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) | 1345 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array) |
1342 | 1346 | ||
1343 | layername = self.collections[mc].calc_bbfile_priority(fn)[2] | 1347 | layername = self.collections[mc].calc_bbfile_priority(fn)[2] |
1344 | infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) | 1348 | infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) |
@@ -1399,11 +1403,11 @@ class BBCooker: | |||
1399 | 1403 | ||
1400 | msg = None | 1404 | msg = None |
1401 | interrupted = 0 | 1405 | interrupted = 0 |
1402 | if halt or self.state == state.forceshutdown: | 1406 | if halt or self.state == State.FORCE_SHUTDOWN: |
1403 | rq.finish_runqueue(True) | 1407 | rq.finish_runqueue(True) |
1404 | msg = "Forced shutdown" | 1408 | msg = "Forced shutdown" |
1405 | interrupted = 2 | 1409 | interrupted = 2 |
1406 | elif self.state == state.shutdown: | 1410 | elif self.state == State.SHUTDOWN: |
1407 | rq.finish_runqueue(False) | 1411 | rq.finish_runqueue(False) |
1408 | msg = "Stopped build" | 1412 | msg = "Stopped build" |
1409 | interrupted = 1 | 1413 | interrupted = 1 |
@@ -1459,7 +1463,6 @@ class BBCooker: | |||
1459 | 1463 | ||
1460 | if t in task or getAllTaskSignatures: | 1464 | if t in task or getAllTaskSignatures: |
1461 | try: | 1465 | try: |
1462 | rq.rqdata.prepare_task_hash(tid) | ||
1463 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) | 1466 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) |
1464 | except KeyError: | 1467 | except KeyError: |
1465 | sig.append(self.getTaskSignatures(target, [t])[0]) | 1468 | sig.append(self.getTaskSignatures(target, [t])[0]) |
@@ -1474,12 +1477,12 @@ class BBCooker: | |||
1474 | def buildTargetsIdle(server, rq, halt): | 1477 | def buildTargetsIdle(server, rq, halt): |
1475 | msg = None | 1478 | msg = None |
1476 | interrupted = 0 | 1479 | interrupted = 0 |
1477 | if halt or self.state == state.forceshutdown: | 1480 | if halt or self.state == State.FORCE_SHUTDOWN: |
1478 | bb.event._should_exit.set() | 1481 | bb.event._should_exit.set() |
1479 | rq.finish_runqueue(True) | 1482 | rq.finish_runqueue(True) |
1480 | msg = "Forced shutdown" | 1483 | msg = "Forced shutdown" |
1481 | interrupted = 2 | 1484 | interrupted = 2 |
1482 | elif self.state == state.shutdown: | 1485 | elif self.state == State.SHUTDOWN: |
1483 | rq.finish_runqueue(False) | 1486 | rq.finish_runqueue(False) |
1484 | msg = "Stopped build" | 1487 | msg = "Stopped build" |
1485 | interrupted = 1 | 1488 | interrupted = 1 |
@@ -1574,7 +1577,7 @@ class BBCooker: | |||
1574 | 1577 | ||
1575 | 1578 | ||
1576 | def updateCacheSync(self): | 1579 | def updateCacheSync(self): |
1577 | if self.state == state.running: | 1580 | if self.state == State.RUNNING: |
1578 | return | 1581 | return |
1579 | 1582 | ||
1580 | if not self.baseconfig_valid: | 1583 | if not self.baseconfig_valid: |
@@ -1584,19 +1587,19 @@ class BBCooker: | |||
1584 | 1587 | ||
1585 | # This is called for all async commands when self.state != running | 1588 | # This is called for all async commands when self.state != running |
1586 | def updateCache(self): | 1589 | def updateCache(self): |
1587 | if self.state == state.running: | 1590 | if self.state == State.RUNNING: |
1588 | return | 1591 | return |
1589 | 1592 | ||
1590 | if self.state in (state.shutdown, state.forceshutdown, state.error): | 1593 | if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR): |
1591 | if hasattr(self.parser, 'shutdown'): | 1594 | if hasattr(self.parser, 'shutdown'): |
1592 | self.parser.shutdown(clean=False) | 1595 | self.parser.shutdown(clean=False) |
1593 | self.parser.final_cleanup() | 1596 | self.parser.final_cleanup() |
1594 | raise bb.BBHandledException() | 1597 | raise bb.BBHandledException() |
1595 | 1598 | ||
1596 | if self.state != state.parsing: | 1599 | if self.state != State.PARSING: |
1597 | self.updateCacheSync() | 1600 | self.updateCacheSync() |
1598 | 1601 | ||
1599 | if self.state != state.parsing and not self.parsecache_valid: | 1602 | if self.state != State.PARSING and not self.parsecache_valid: |
1600 | bb.server.process.serverlog("Parsing started") | 1603 | bb.server.process.serverlog("Parsing started") |
1601 | self.parsewatched = {} | 1604 | self.parsewatched = {} |
1602 | 1605 | ||
@@ -1630,9 +1633,10 @@ class BBCooker: | |||
1630 | self.parser = CookerParser(self, mcfilelist, total_masked) | 1633 | self.parser = CookerParser(self, mcfilelist, total_masked) |
1631 | self._parsecache_set(True) | 1634 | self._parsecache_set(True) |
1632 | 1635 | ||
1633 | self.state = state.parsing | 1636 | self.state = State.PARSING |
1634 | 1637 | ||
1635 | if not self.parser.parse_next(): | 1638 | if not self.parser.parse_next(): |
1639 | bb.server.process.serverlog("Parsing completed") | ||
1636 | collectlog.debug("parsing complete") | 1640 | collectlog.debug("parsing complete") |
1637 | if self.parser.error: | 1641 | if self.parser.error: |
1638 | raise bb.BBHandledException() | 1642 | raise bb.BBHandledException() |
@@ -1640,7 +1644,7 @@ class BBCooker: | |||
1640 | self.handlePrefProviders() | 1644 | self.handlePrefProviders() |
1641 | for mc in self.multiconfigs: | 1645 | for mc in self.multiconfigs: |
1642 | self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) | 1646 | self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) |
1643 | self.state = state.running | 1647 | self.state = State.RUNNING |
1644 | 1648 | ||
1645 | # Send an event listing all stamps reachable after parsing | 1649 | # Send an event listing all stamps reachable after parsing |
1646 | # which the metadata may use to clean up stale data | 1650 | # which the metadata may use to clean up stale data |
@@ -1713,10 +1717,10 @@ class BBCooker: | |||
1713 | 1717 | ||
1714 | def shutdown(self, force=False): | 1718 | def shutdown(self, force=False): |
1715 | if force: | 1719 | if force: |
1716 | self.state = state.forceshutdown | 1720 | self.state = State.FORCE_SHUTDOWN |
1717 | bb.event._should_exit.set() | 1721 | bb.event._should_exit.set() |
1718 | else: | 1722 | else: |
1719 | self.state = state.shutdown | 1723 | self.state = State.SHUTDOWN |
1720 | 1724 | ||
1721 | if self.parser: | 1725 | if self.parser: |
1722 | self.parser.shutdown(clean=False) | 1726 | self.parser.shutdown(clean=False) |
@@ -1726,7 +1730,7 @@ class BBCooker: | |||
1726 | if hasattr(self.parser, 'shutdown'): | 1730 | if hasattr(self.parser, 'shutdown'): |
1727 | self.parser.shutdown(clean=False) | 1731 | self.parser.shutdown(clean=False) |
1728 | self.parser.final_cleanup() | 1732 | self.parser.final_cleanup() |
1729 | self.state = state.initial | 1733 | self.state = State.INITIAL |
1730 | bb.event._should_exit.clear() | 1734 | bb.event._should_exit.clear() |
1731 | 1735 | ||
1732 | def reset(self): | 1736 | def reset(self): |
@@ -1813,8 +1817,8 @@ class CookerCollectFiles(object): | |||
1813 | bb.event.fire(CookerExit(), eventdata) | 1817 | bb.event.fire(CookerExit(), eventdata) |
1814 | 1818 | ||
1815 | # We need to track where we look so that we can know when the cache is invalid. There | 1819 | # We need to track where we look so that we can know when the cache is invalid. There |
1816 | # is no nice way to do this, this is horrid. We intercept the os.listdir() | 1820 | # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir() |
1817 | # (or os.scandir() for python 3.6+) calls while we run glob(). | 1821 | # calls while we run glob(). |
1818 | origlistdir = os.listdir | 1822 | origlistdir = os.listdir |
1819 | if hasattr(os, 'scandir'): | 1823 | if hasattr(os, 'scandir'): |
1820 | origscandir = os.scandir | 1824 | origscandir = os.scandir |
@@ -2098,7 +2102,6 @@ class Parser(multiprocessing.Process): | |||
2098 | except Exception as exc: | 2102 | except Exception as exc: |
2099 | tb = sys.exc_info()[2] | 2103 | tb = sys.exc_info()[2] |
2100 | exc.recipe = filename | 2104 | exc.recipe = filename |
2101 | exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) | ||
2102 | return True, None, exc | 2105 | return True, None, exc |
2103 | # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown | 2106 | # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown |
2104 | # and for example a worker thread doesn't just exit on its own in response to | 2107 | # and for example a worker thread doesn't just exit on its own in response to |
@@ -2113,7 +2116,7 @@ class CookerParser(object): | |||
2113 | self.mcfilelist = mcfilelist | 2116 | self.mcfilelist = mcfilelist |
2114 | self.cooker = cooker | 2117 | self.cooker = cooker |
2115 | self.cfgdata = cooker.data | 2118 | self.cfgdata = cooker.data |
2116 | self.cfghash = cooker.data_hash | 2119 | self.cfghash = cooker.databuilder.data_hash |
2117 | self.cfgbuilder = cooker.databuilder | 2120 | self.cfgbuilder = cooker.databuilder |
2118 | 2121 | ||
2119 | # Accounting statistics | 2122 | # Accounting statistics |
@@ -2225,9 +2228,8 @@ class CookerParser(object): | |||
2225 | 2228 | ||
2226 | for process in self.processes: | 2229 | for process in self.processes: |
2227 | process.join() | 2230 | process.join() |
2228 | # Added in 3.7, cleans up zombies | 2231 | # clean up zombies |
2229 | if hasattr(process, "close"): | 2232 | process.close() |
2230 | process.close() | ||
2231 | 2233 | ||
2232 | bb.codeparser.parser_cache_save() | 2234 | bb.codeparser.parser_cache_save() |
2233 | bb.codeparser.parser_cache_savemerge() | 2235 | bb.codeparser.parser_cache_savemerge() |
@@ -2237,12 +2239,13 @@ class CookerParser(object): | |||
2237 | profiles = [] | 2239 | profiles = [] |
2238 | for i in self.process_names: | 2240 | for i in self.process_names: |
2239 | logfile = "profile-parse-%s.log" % i | 2241 | logfile = "profile-parse-%s.log" % i |
2240 | if os.path.exists(logfile): | 2242 | if os.path.exists(logfile) and os.path.getsize(logfile): |
2241 | profiles.append(logfile) | 2243 | profiles.append(logfile) |
2242 | 2244 | ||
2243 | pout = "profile-parse.log.processed" | 2245 | if profiles: |
2244 | bb.utils.process_profilelog(profiles, pout = pout) | 2246 | pout = "profile-parse.log.processed" |
2245 | print("Processed parsing statistics saved to %s" % (pout)) | 2247 | bb.utils.process_profilelog(profiles, pout = pout) |
2248 | print("Processed parsing statistics saved to %s" % (pout)) | ||
2246 | 2249 | ||
2247 | def final_cleanup(self): | 2250 | def final_cleanup(self): |
2248 | if self.syncthread: | 2251 | if self.syncthread: |
@@ -2299,8 +2302,12 @@ class CookerParser(object): | |||
2299 | return False | 2302 | return False |
2300 | except ParsingFailure as exc: | 2303 | except ParsingFailure as exc: |
2301 | self.error += 1 | 2304 | self.error += 1 |
2302 | logger.error('Unable to parse %s: %s' % | 2305 | |
2303 | (exc.recipe, bb.exceptions.to_string(exc.realexception))) | 2306 | exc_desc = str(exc) |
2307 | if isinstance(exc, SystemExit) and not isinstance(exc.code, str): | ||
2308 | exc_desc = 'Exited with "%d"' % exc.code | ||
2309 | |||
2310 | logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc)) | ||
2304 | self.shutdown(clean=False) | 2311 | self.shutdown(clean=False) |
2305 | return False | 2312 | return False |
2306 | except bb.parse.ParseError as exc: | 2313 | except bb.parse.ParseError as exc: |
@@ -2309,20 +2316,33 @@ class CookerParser(object): | |||
2309 | self.shutdown(clean=False, eventmsg=str(exc)) | 2316 | self.shutdown(clean=False, eventmsg=str(exc)) |
2310 | return False | 2317 | return False |
2311 | except bb.data_smart.ExpansionError as exc: | 2318 | except bb.data_smart.ExpansionError as exc: |
2319 | def skip_frames(f, fn_prefix): | ||
2320 | while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix): | ||
2321 | f = f.tb_next | ||
2322 | return f | ||
2323 | |||
2312 | self.error += 1 | 2324 | self.error += 1 |
2313 | bbdir = os.path.dirname(__file__) + os.sep | 2325 | bbdir = os.path.dirname(__file__) + os.sep |
2314 | etype, value, _ = sys.exc_info() | 2326 | etype, value, tb = sys.exc_info() |
2315 | tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) | 2327 | |
2328 | # Remove any frames where the code comes from bitbake. This | ||
2329 | # prevents deep (and pretty useless) backtraces for expansion error | ||
2330 | tb = skip_frames(tb, bbdir) | ||
2331 | cur = tb | ||
2332 | while cur: | ||
2333 | cur.tb_next = skip_frames(cur.tb_next, bbdir) | ||
2334 | cur = cur.tb_next | ||
2335 | |||
2316 | logger.error('ExpansionError during parsing %s', value.recipe, | 2336 | logger.error('ExpansionError during parsing %s', value.recipe, |
2317 | exc_info=(etype, value, tb)) | 2337 | exc_info=(etype, value, tb)) |
2318 | self.shutdown(clean=False) | 2338 | self.shutdown(clean=False) |
2319 | return False | 2339 | return False |
2320 | except Exception as exc: | 2340 | except Exception as exc: |
2321 | self.error += 1 | 2341 | self.error += 1 |
2322 | etype, value, tb = sys.exc_info() | 2342 | _, value, _ = sys.exc_info() |
2323 | if hasattr(value, "recipe"): | 2343 | if hasattr(value, "recipe"): |
2324 | logger.error('Unable to parse %s' % value.recipe, | 2344 | logger.error('Unable to parse %s' % value.recipe, |
2325 | exc_info=(etype, value, exc.traceback)) | 2345 | exc_info=sys.exc_info()) |
2326 | else: | 2346 | else: |
2327 | # Most likely, an exception occurred during raising an exception | 2347 | # Most likely, an exception occurred during raising an exception |
2328 | import traceback | 2348 | import traceback |
@@ -2343,7 +2363,7 @@ class CookerParser(object): | |||
2343 | for virtualfn, info_array in result: | 2363 | for virtualfn, info_array in result: |
2344 | if info_array[0].skipped: | 2364 | if info_array[0].skipped: |
2345 | self.skipped += 1 | 2365 | self.skipped += 1 |
2346 | self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) | 2366 | self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0]) |
2347 | self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], | 2367 | self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], |
2348 | parsed=parsed, watcher = self.cooker.add_filewatch) | 2368 | parsed=parsed, watcher = self.cooker.add_filewatch) |
2349 | return True | 2369 | return True |
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py index 0649e40995..65c153a5bb 100644 --- a/bitbake/lib/bb/cookerdata.py +++ b/bitbake/lib/bb/cookerdata.py | |||
@@ -1,3 +1,4 @@ | |||
1 | |||
1 | # | 2 | # |
2 | # Copyright (C) 2003, 2004 Chris Larson | 3 | # Copyright (C) 2003, 2004 Chris Larson |
3 | # Copyright (C) 2003, 2004 Phil Blundell | 4 | # Copyright (C) 2003, 2004 Phil Blundell |
@@ -254,14 +255,21 @@ class CookerDataBuilder(object): | |||
254 | self.data = self.basedata | 255 | self.data = self.basedata |
255 | self.mcdata = {} | 256 | self.mcdata = {} |
256 | 257 | ||
258 | def calc_datastore_hashes(self): | ||
259 | data_hash = hashlib.sha256() | ||
260 | data_hash.update(self.data.get_hash().encode('utf-8')) | ||
261 | multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() | ||
262 | for config in multiconfig: | ||
263 | data_hash.update(self.mcdata[config].get_hash().encode('utf-8')) | ||
264 | self.data_hash = data_hash.hexdigest() | ||
265 | |||
257 | def parseBaseConfiguration(self, worker=False): | 266 | def parseBaseConfiguration(self, worker=False): |
258 | mcdata = {} | 267 | mcdata = {} |
259 | data_hash = hashlib.sha256() | ||
260 | try: | 268 | try: |
261 | self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) | 269 | self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) |
262 | 270 | ||
263 | if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker: | 271 | servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker |
264 | bb.fetch.fetcher_init(self.data) | 272 | bb.fetch.fetcher_init(self.data, servercontext) |
265 | bb.parse.init_parser(self.data) | 273 | bb.parse.init_parser(self.data) |
266 | 274 | ||
267 | bb.event.fire(bb.event.ConfigParsed(), self.data) | 275 | bb.event.fire(bb.event.ConfigParsed(), self.data) |
@@ -279,7 +287,6 @@ class CookerDataBuilder(object): | |||
279 | bb.event.fire(bb.event.ConfigParsed(), self.data) | 287 | bb.event.fire(bb.event.ConfigParsed(), self.data) |
280 | 288 | ||
281 | bb.parse.init_parser(self.data) | 289 | bb.parse.init_parser(self.data) |
282 | data_hash.update(self.data.get_hash().encode('utf-8')) | ||
283 | mcdata[''] = self.data | 290 | mcdata[''] = self.data |
284 | 291 | ||
285 | multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() | 292 | multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() |
@@ -289,11 +296,9 @@ class CookerDataBuilder(object): | |||
289 | parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) | 296 | parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) |
290 | bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata) | 297 | bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata) |
291 | mcdata[config] = parsed_mcdata | 298 | mcdata[config] = parsed_mcdata |
292 | data_hash.update(parsed_mcdata.get_hash().encode('utf-8')) | ||
293 | if multiconfig: | 299 | if multiconfig: |
294 | bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data) | 300 | bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data) |
295 | 301 | ||
296 | self.data_hash = data_hash.hexdigest() | ||
297 | except bb.data_smart.ExpansionError as e: | 302 | except bb.data_smart.ExpansionError as e: |
298 | logger.error(str(e)) | 303 | logger.error(str(e)) |
299 | raise bb.BBHandledException() | 304 | raise bb.BBHandledException() |
@@ -328,6 +333,7 @@ class CookerDataBuilder(object): | |||
328 | for mc in mcdata: | 333 | for mc in mcdata: |
329 | self.mcdata[mc] = bb.data.createCopy(mcdata[mc]) | 334 | self.mcdata[mc] = bb.data.createCopy(mcdata[mc]) |
330 | self.data = self.mcdata[''] | 335 | self.data = self.mcdata[''] |
336 | self.calc_datastore_hashes() | ||
331 | 337 | ||
332 | def reset(self): | 338 | def reset(self): |
333 | # We may not have run parseBaseConfiguration() yet | 339 | # We may not have run parseBaseConfiguration() yet |
@@ -340,7 +346,7 @@ class CookerDataBuilder(object): | |||
340 | def _findLayerConf(self, data): | 346 | def _findLayerConf(self, data): |
341 | return findConfigFile("bblayers.conf", data) | 347 | return findConfigFile("bblayers.conf", data) |
342 | 348 | ||
343 | def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"): | 349 | def parseConfigurationFiles(self, prefiles, postfiles, mc = ""): |
344 | data = bb.data.createCopy(self.basedata) | 350 | data = bb.data.createCopy(self.basedata) |
345 | data.setVar("BB_CURRENT_MC", mc) | 351 | data.setVar("BB_CURRENT_MC", mc) |
346 | 352 | ||
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 505f42950f..f672a84451 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
@@ -293,7 +293,7 @@ def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_va | |||
293 | if key in mod_funcs: | 293 | if key in mod_funcs: |
294 | exclusions = set() | 294 | exclusions = set() |
295 | moddep = bb.codeparser.modulecode_deps[key] | 295 | moddep = bb.codeparser.modulecode_deps[key] |
296 | value = handle_contains("", moddep[3], exclusions, d) | 296 | value = handle_contains(moddep[4], moddep[3], exclusions, d) |
297 | return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value | 297 | return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value |
298 | 298 | ||
299 | if key[-1] == ']': | 299 | if key[-1] == ']': |
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index 0128a5bb17..8e7dd98384 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py | |||
@@ -31,7 +31,7 @@ logger = logging.getLogger("BitBake.Data") | |||
31 | 31 | ||
32 | __setvar_keyword__ = [":append", ":prepend", ":remove"] | 32 | __setvar_keyword__ = [":append", ":prepend", ":remove"] |
33 | __setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$') | 33 | __setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$') |
34 | __expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}") | 34 | __expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+}") |
35 | __expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}") | 35 | __expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}") |
36 | __whitespace_split__ = re.compile(r'(\s)') | 36 | __whitespace_split__ = re.compile(r'(\s)') |
37 | __override_regexp__ = re.compile(r'[a-z0-9]+') | 37 | __override_regexp__ = re.compile(r'[a-z0-9]+') |
@@ -106,52 +106,52 @@ class VariableParse: | |||
106 | self.contains = {} | 106 | self.contains = {} |
107 | 107 | ||
108 | def var_sub(self, match): | 108 | def var_sub(self, match): |
109 | key = match.group()[2:-1] | 109 | key = match.group()[2:-1] |
110 | if self.varname and key: | 110 | if self.varname and key: |
111 | if self.varname == key: | 111 | if self.varname == key: |
112 | raise Exception("variable %s references itself!" % self.varname) | 112 | raise Exception("variable %s references itself!" % self.varname) |
113 | var = self.d.getVarFlag(key, "_content") | 113 | var = self.d.getVarFlag(key, "_content") |
114 | self.references.add(key) | 114 | self.references.add(key) |
115 | if var is not None: | 115 | if var is not None: |
116 | return var | 116 | return var |
117 | else: | 117 | else: |
118 | return match.group() | 118 | return match.group() |
119 | 119 | ||
120 | def python_sub(self, match): | 120 | def python_sub(self, match): |
121 | if isinstance(match, str): | 121 | if isinstance(match, str): |
122 | code = match | 122 | code = match |
123 | else: | 123 | else: |
124 | code = match.group()[3:-1] | 124 | code = match.group()[3:-1] |
125 | 125 | ||
126 | # Do not run code that contains one or more unexpanded variables | 126 | # Do not run code that contains one or more unexpanded variables |
127 | # instead return the code with the characters we removed put back | 127 | # instead return the code with the characters we removed put back |
128 | if __expand_var_regexp__.findall(code): | 128 | if __expand_var_regexp__.findall(code): |
129 | return "${@" + code + "}" | 129 | return "${@" + code + "}" |
130 | 130 | ||
131 | if self.varname: | 131 | if self.varname: |
132 | varname = 'Var <%s>' % self.varname | 132 | varname = 'Var <%s>' % self.varname |
133 | else: | 133 | else: |
134 | varname = '<expansion>' | 134 | varname = '<expansion>' |
135 | codeobj = compile(code.strip(), varname, "eval") | 135 | codeobj = compile(code.strip(), varname, "eval") |
136 | 136 | ||
137 | parser = bb.codeparser.PythonParser(self.varname, logger) | 137 | parser = bb.codeparser.PythonParser(self.varname, logger) |
138 | parser.parse_python(code) | 138 | parser.parse_python(code) |
139 | if self.varname: | 139 | if self.varname: |
140 | vardeps = self.d.getVarFlag(self.varname, "vardeps") | 140 | vardeps = self.d.getVarFlag(self.varname, "vardeps") |
141 | if vardeps is None: | 141 | if vardeps is None: |
142 | parser.log.flush() | ||
143 | else: | ||
144 | parser.log.flush() | 142 | parser.log.flush() |
145 | self.references |= parser.references | 143 | else: |
146 | self.execs |= parser.execs | 144 | parser.log.flush() |
145 | self.references |= parser.references | ||
146 | self.execs |= parser.execs | ||
147 | 147 | ||
148 | for k in parser.contains: | 148 | for k in parser.contains: |
149 | if k not in self.contains: | 149 | if k not in self.contains: |
150 | self.contains[k] = parser.contains[k].copy() | 150 | self.contains[k] = parser.contains[k].copy() |
151 | else: | 151 | else: |
152 | self.contains[k].update(parser.contains[k]) | 152 | self.contains[k].update(parser.contains[k]) |
153 | value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) | 153 | value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) |
154 | return str(value) | 154 | return str(value) |
155 | 155 | ||
156 | class DataContext(dict): | 156 | class DataContext(dict): |
157 | excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe']) | 157 | excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe']) |
@@ -272,12 +272,9 @@ class VariableHistory(object): | |||
272 | return | 272 | return |
273 | if 'op' not in loginfo or not loginfo['op']: | 273 | if 'op' not in loginfo or not loginfo['op']: |
274 | loginfo['op'] = 'set' | 274 | loginfo['op'] = 'set' |
275 | if 'detail' in loginfo: | ||
276 | loginfo['detail'] = str(loginfo['detail']) | ||
277 | if 'variable' not in loginfo or 'file' not in loginfo: | 275 | if 'variable' not in loginfo or 'file' not in loginfo: |
278 | raise ValueError("record() missing variable or file.") | 276 | raise ValueError("record() missing variable or file.") |
279 | var = loginfo['variable'] | 277 | var = loginfo['variable'] |
280 | |||
281 | if var not in self.variables: | 278 | if var not in self.variables: |
282 | self.variables[var] = [] | 279 | self.variables[var] = [] |
283 | if not isinstance(self.variables[var], list): | 280 | if not isinstance(self.variables[var], list): |
@@ -336,7 +333,8 @@ class VariableHistory(object): | |||
336 | flag = '[%s] ' % (event['flag']) | 333 | flag = '[%s] ' % (event['flag']) |
337 | else: | 334 | else: |
338 | flag = '' | 335 | flag = '' |
339 | o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail']))) | 336 | o.write("# %s %s:%s%s\n# %s\"%s\"\n" % \ |
337 | (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', str(event['detail'])))) | ||
340 | if len(history) > 1: | 338 | if len(history) > 1: |
341 | o.write("# pre-expansion value:\n") | 339 | o.write("# pre-expansion value:\n") |
342 | o.write('# "%s"\n' % (commentVal)) | 340 | o.write('# "%s"\n' % (commentVal)) |
@@ -390,7 +388,7 @@ class VariableHistory(object): | |||
390 | if isset and event['op'] == 'set?': | 388 | if isset and event['op'] == 'set?': |
391 | continue | 389 | continue |
392 | isset = True | 390 | isset = True |
393 | items = d.expand(event['detail']).split() | 391 | items = d.expand(str(event['detail'])).split() |
394 | for item in items: | 392 | for item in items: |
395 | # This is a little crude but is belt-and-braces to avoid us | 393 | # This is a little crude but is belt-and-braces to avoid us |
396 | # having to handle every possible operation type specifically | 394 | # having to handle every possible operation type specifically |
@@ -582,12 +580,10 @@ class DataSmart(MutableMapping): | |||
582 | else: | 580 | else: |
583 | loginfo['op'] = keyword | 581 | loginfo['op'] = keyword |
584 | self.varhistory.record(**loginfo) | 582 | self.varhistory.record(**loginfo) |
585 | # todo make sure keyword is not __doc__ or __module__ | ||
586 | # pay the cookie monster | 583 | # pay the cookie monster |
587 | 584 | ||
588 | # more cookies for the cookie monster | 585 | # more cookies for the cookie monster |
589 | if ':' in var: | 586 | self._setvar_update_overrides(base, **loginfo) |
590 | self._setvar_update_overrides(base, **loginfo) | ||
591 | 587 | ||
592 | if base in self.overridevars: | 588 | if base in self.overridevars: |
593 | self._setvar_update_overridevars(var, value) | 589 | self._setvar_update_overridevars(var, value) |
@@ -640,6 +636,7 @@ class DataSmart(MutableMapping): | |||
640 | nextnew.update(vardata.contains.keys()) | 636 | nextnew.update(vardata.contains.keys()) |
641 | new = nextnew | 637 | new = nextnew |
642 | self.overrides = None | 638 | self.overrides = None |
639 | self.expand_cache = {} | ||
643 | 640 | ||
644 | def _setvar_update_overrides(self, var, **loginfo): | 641 | def _setvar_update_overrides(self, var, **loginfo): |
645 | # aka pay the cookie monster | 642 | # aka pay the cookie monster |
@@ -829,6 +826,8 @@ class DataSmart(MutableMapping): | |||
829 | value = copy.copy(local_var[flag]) | 826 | value = copy.copy(local_var[flag]) |
830 | elif flag == "_content" and "_defaultval" in local_var and not noweakdefault: | 827 | elif flag == "_content" and "_defaultval" in local_var and not noweakdefault: |
831 | value = copy.copy(local_var["_defaultval"]) | 828 | value = copy.copy(local_var["_defaultval"]) |
829 | elif "_defaultval_flag_"+flag in local_var and not noweakdefault: | ||
830 | value = copy.copy(local_var["_defaultval_flag_"+flag]) | ||
832 | 831 | ||
833 | 832 | ||
834 | if flag == "_content" and local_var is not None and ":append" in local_var and not parsing: | 833 | if flag == "_content" and local_var is not None and ":append" in local_var and not parsing: |
@@ -920,6 +919,8 @@ class DataSmart(MutableMapping): | |||
920 | self.varhistory.record(**loginfo) | 919 | self.varhistory.record(**loginfo) |
921 | 920 | ||
922 | del self.dict[var][flag] | 921 | del self.dict[var][flag] |
922 | if ("_defaultval_flag_" + flag) in self.dict[var]: | ||
923 | del self.dict[var]["_defaultval_flag_" + flag] | ||
923 | 924 | ||
924 | def appendVarFlag(self, var, flag, value, **loginfo): | 925 | def appendVarFlag(self, var, flag, value, **loginfo): |
925 | loginfo['op'] = 'append' | 926 | loginfo['op'] = 'append' |
@@ -954,17 +955,22 @@ class DataSmart(MutableMapping): | |||
954 | flags = {} | 955 | flags = {} |
955 | 956 | ||
956 | if local_var: | 957 | if local_var: |
957 | for i in local_var: | 958 | for i, val in local_var.items(): |
958 | if i.startswith(("_", ":")) and not internalflags: | 959 | if i.startswith("_defaultval_flag_") and not internalflags: |
960 | i = i[len("_defaultval_flag_"):] | ||
961 | if i not in local_var: | ||
962 | flags[i] = val | ||
963 | elif i.startswith(("_", ":")) and not internalflags: | ||
959 | continue | 964 | continue |
960 | flags[i] = local_var[i] | 965 | else: |
966 | flags[i] = val | ||
967 | |||
961 | if expand and i in expand: | 968 | if expand and i in expand: |
962 | flags[i] = self.expand(flags[i], var + "[" + i + "]") | 969 | flags[i] = self.expand(flags[i], var + "[" + i + "]") |
963 | if len(flags) == 0: | 970 | if len(flags) == 0: |
964 | return None | 971 | return None |
965 | return flags | 972 | return flags |
966 | 973 | ||
967 | |||
968 | def delVarFlags(self, var, **loginfo): | 974 | def delVarFlags(self, var, **loginfo): |
969 | self.expand_cache = {} | 975 | self.expand_cache = {} |
970 | if not var in self.dict: | 976 | if not var in self.dict: |
@@ -1114,5 +1120,10 @@ class DataSmart(MutableMapping): | |||
1114 | value = d.getVar(i, False) or "" | 1120 | value = d.getVar(i, False) or "" |
1115 | data.update({i:value}) | 1121 | data.update({i:value}) |
1116 | 1122 | ||
1123 | moddeps = bb.codeparser.modulecode_deps | ||
1124 | for dep in sorted(moddeps): | ||
1125 | # Ignore visitor code, sort sets | ||
1126 | data.update({'moddep[%s]' % dep : [sorted(moddeps[dep][0]), sorted(moddeps[dep][1]), sorted(moddeps[dep][2]), sorted(moddeps[dep][3]), moddeps[dep][4]]}) | ||
1127 | |||
1117 | data_str = str([(k, data[k]) for k in sorted(data.keys())]) | 1128 | data_str = str([(k, data[k]) for k in sorted(data.keys())]) |
1118 | return hashlib.sha256(data_str.encode("utf-8")).hexdigest() | 1129 | return hashlib.sha256(data_str.encode("utf-8")).hexdigest() |
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py index 4761c86880..b29f0a5568 100644 --- a/bitbake/lib/bb/event.py +++ b/bitbake/lib/bb/event.py | |||
@@ -19,7 +19,6 @@ import sys | |||
19 | import threading | 19 | import threading |
20 | import traceback | 20 | import traceback |
21 | 21 | ||
22 | import bb.exceptions | ||
23 | import bb.utils | 22 | import bb.utils |
24 | 23 | ||
25 | # This is the pid for which we should generate the event. This is set when | 24 | # This is the pid for which we should generate the event. This is set when |
@@ -195,7 +194,12 @@ def fire_ui_handlers(event, d): | |||
195 | ui_queue.append(event) | 194 | ui_queue.append(event) |
196 | return | 195 | return |
197 | 196 | ||
198 | with bb.utils.lock_timeout(_thread_lock): | 197 | with bb.utils.lock_timeout_nocheck(_thread_lock) as lock: |
198 | if not lock: | ||
199 | # If we can't get the lock, we may be recursively called, queue and return | ||
200 | ui_queue.append(event) | ||
201 | return | ||
202 | |||
199 | errors = [] | 203 | errors = [] |
200 | for h in _ui_handlers: | 204 | for h in _ui_handlers: |
201 | #print "Sending event %s" % event | 205 | #print "Sending event %s" % event |
@@ -214,6 +218,9 @@ def fire_ui_handlers(event, d): | |||
214 | for h in errors: | 218 | for h in errors: |
215 | del _ui_handlers[h] | 219 | del _ui_handlers[h] |
216 | 220 | ||
221 | while ui_queue: | ||
222 | fire_ui_handlers(ui_queue.pop(), d) | ||
223 | |||
217 | def fire(event, d): | 224 | def fire(event, d): |
218 | """Fire off an Event""" | 225 | """Fire off an Event""" |
219 | 226 | ||
@@ -424,6 +431,16 @@ class RecipeEvent(Event): | |||
424 | self.fn = fn | 431 | self.fn = fn |
425 | Event.__init__(self) | 432 | Event.__init__(self) |
426 | 433 | ||
434 | class RecipePreDeferredInherits(RecipeEvent): | ||
435 | """ | ||
436 | Called before deferred inherits are processed so code can snoop on class extensions for example | ||
437 | Limitations: It won't see inherits of inherited classes and the data is unexpanded | ||
438 | """ | ||
439 | def __init__(self, fn, inherits): | ||
440 | self.fn = fn | ||
441 | self.inherits = inherits | ||
442 | Event.__init__(self) | ||
443 | |||
427 | class RecipePreFinalise(RecipeEvent): | 444 | class RecipePreFinalise(RecipeEvent): |
428 | """ Recipe Parsing Complete but not yet finalised""" | 445 | """ Recipe Parsing Complete but not yet finalised""" |
429 | 446 | ||
@@ -759,13 +776,7 @@ class LogHandler(logging.Handler): | |||
759 | 776 | ||
760 | def emit(self, record): | 777 | def emit(self, record): |
761 | if record.exc_info: | 778 | if record.exc_info: |
762 | etype, value, tb = record.exc_info | 779 | record.bb_exc_formatted = traceback.format_exception(*record.exc_info) |
763 | if hasattr(tb, 'tb_next'): | ||
764 | tb = list(bb.exceptions.extract_traceback(tb, context=3)) | ||
765 | # Need to turn the value into something the logging system can pickle | ||
766 | record.bb_exc_info = (etype, value, tb) | ||
767 | record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5) | ||
768 | value = str(value) | ||
769 | record.exc_info = None | 780 | record.exc_info = None |
770 | fire(record, None) | 781 | fire(record, None) |
771 | 782 | ||
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py deleted file mode 100644 index 801db9c82f..0000000000 --- a/bitbake/lib/bb/exceptions.py +++ /dev/null | |||
@@ -1,96 +0,0 @@ | |||
1 | # | ||
2 | # Copyright BitBake Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import inspect | ||
8 | import traceback | ||
9 | import bb.namedtuple_with_abc | ||
10 | from collections import namedtuple | ||
11 | |||
12 | |||
13 | class TracebackEntry(namedtuple.abc): | ||
14 | """Pickleable representation of a traceback entry""" | ||
15 | _fields = 'filename lineno function args code_context index' | ||
16 | _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}' | ||
17 | |||
18 | def format(self, formatter=None): | ||
19 | if not self.code_context: | ||
20 | return self._header.format(self) + '\n' | ||
21 | |||
22 | formatted = [self._header.format(self) + ':\n'] | ||
23 | |||
24 | for lineindex, line in enumerate(self.code_context): | ||
25 | if formatter: | ||
26 | line = formatter(line) | ||
27 | |||
28 | if lineindex == self.index: | ||
29 | formatted.append(' >%s' % line) | ||
30 | else: | ||
31 | formatted.append(' %s' % line) | ||
32 | return formatted | ||
33 | |||
34 | def __str__(self): | ||
35 | return ''.join(self.format()) | ||
36 | |||
37 | def _get_frame_args(frame): | ||
38 | """Get the formatted arguments and class (if available) for a frame""" | ||
39 | arginfo = inspect.getargvalues(frame) | ||
40 | |||
41 | try: | ||
42 | if not arginfo.args: | ||
43 | return '', None | ||
44 | # There have been reports from the field of python 2.6 which doesn't | ||
45 | # return a namedtuple here but simply a tuple so fallback gracefully if | ||
46 | # args isn't present. | ||
47 | except AttributeError: | ||
48 | return '', None | ||
49 | |||
50 | firstarg = arginfo.args[0] | ||
51 | if firstarg == 'self': | ||
52 | self = arginfo.locals['self'] | ||
53 | cls = self.__class__.__name__ | ||
54 | |||
55 | arginfo.args.pop(0) | ||
56 | del arginfo.locals['self'] | ||
57 | else: | ||
58 | cls = None | ||
59 | |||
60 | formatted = inspect.formatargvalues(*arginfo) | ||
61 | return formatted, cls | ||
62 | |||
63 | def extract_traceback(tb, context=1): | ||
64 | frames = inspect.getinnerframes(tb, context) | ||
65 | for frame, filename, lineno, function, code_context, index in frames: | ||
66 | formatted_args, cls = _get_frame_args(frame) | ||
67 | if cls: | ||
68 | function = '%s.%s' % (cls, function) | ||
69 | yield TracebackEntry(filename, lineno, function, formatted_args, | ||
70 | code_context, index) | ||
71 | |||
72 | def format_extracted(extracted, formatter=None, limit=None): | ||
73 | if limit: | ||
74 | extracted = extracted[-limit:] | ||
75 | |||
76 | formatted = [] | ||
77 | for tracebackinfo in extracted: | ||
78 | formatted.extend(tracebackinfo.format(formatter)) | ||
79 | return formatted | ||
80 | |||
81 | |||
82 | def format_exception(etype, value, tb, context=1, limit=None, formatter=None): | ||
83 | formatted = ['Traceback (most recent call last):\n'] | ||
84 | |||
85 | if hasattr(tb, 'tb_next'): | ||
86 | tb = extract_traceback(tb, context) | ||
87 | |||
88 | formatted.extend(format_extracted(tb, formatter, limit)) | ||
89 | formatted.extend(traceback.format_exception_only(etype, value)) | ||
90 | return formatted | ||
91 | |||
92 | def to_string(exc): | ||
93 | if isinstance(exc, SystemExit): | ||
94 | if not isinstance(exc.code, str): | ||
95 | return 'Exited with "%d"' % exc.code | ||
96 | return str(exc) | ||
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 5bf2c4b8cf..0ad987c596 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -23,17 +23,18 @@ import collections | |||
23 | import subprocess | 23 | import subprocess |
24 | import pickle | 24 | import pickle |
25 | import errno | 25 | import errno |
26 | import bb.persist_data, bb.utils | 26 | import bb.utils |
27 | import bb.checksum | 27 | import bb.checksum |
28 | import bb.process | 28 | import bb.process |
29 | import bb.event | 29 | import bb.event |
30 | 30 | ||
31 | __version__ = "2" | 31 | __version__ = "2" |
32 | _checksum_cache = bb.checksum.FileChecksumCache() | 32 | _checksum_cache = bb.checksum.FileChecksumCache() |
33 | _revisions_cache = bb.checksum.RevisionsCache() | ||
33 | 34 | ||
34 | logger = logging.getLogger("BitBake.Fetcher") | 35 | logger = logging.getLogger("BitBake.Fetcher") |
35 | 36 | ||
36 | CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] | 37 | CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ] |
37 | SHOWN_CHECKSUM_LIST = ["sha256"] | 38 | SHOWN_CHECKSUM_LIST = ["sha256"] |
38 | 39 | ||
39 | class BBFetchException(Exception): | 40 | class BBFetchException(Exception): |
@@ -237,7 +238,7 @@ class URI(object): | |||
237 | # to RFC compliant URL format. E.g.: | 238 | # to RFC compliant URL format. E.g.: |
238 | # file://foo.diff -> file:foo.diff | 239 | # file://foo.diff -> file:foo.diff |
239 | if urlp.scheme in self._netloc_forbidden: | 240 | if urlp.scheme in self._netloc_forbidden: |
240 | uri = re.sub("(?<=:)//(?!/)", "", uri, 1) | 241 | uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1) |
241 | reparse = 1 | 242 | reparse = 1 |
242 | 243 | ||
243 | if reparse: | 244 | if reparse: |
@@ -352,6 +353,14 @@ def decodeurl(url): | |||
352 | user, password, parameters). | 353 | user, password, parameters). |
353 | """ | 354 | """ |
354 | 355 | ||
356 | uri = URI(url) | ||
357 | path = uri.path if uri.path else "/" | ||
358 | return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params | ||
359 | |||
360 | def decodemirrorurl(url): | ||
361 | """Decodes a mirror URL into the tokens (scheme, network location, path, | ||
362 | user, password, parameters). | ||
363 | """ | ||
355 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | 364 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) |
356 | if not m: | 365 | if not m: |
357 | raise MalformedUrl(url) | 366 | raise MalformedUrl(url) |
@@ -370,6 +379,9 @@ def decodeurl(url): | |||
370 | elif type.lower() == 'file': | 379 | elif type.lower() == 'file': |
371 | host = "" | 380 | host = "" |
372 | path = location | 381 | path = location |
382 | if user: | ||
383 | path = user + '@' + path | ||
384 | user = "" | ||
373 | else: | 385 | else: |
374 | host = location | 386 | host = location |
375 | path = "/" | 387 | path = "/" |
@@ -402,32 +414,34 @@ def encodeurl(decoded): | |||
402 | 414 | ||
403 | if not type: | 415 | if not type: |
404 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) | 416 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) |
405 | url = ['%s://' % type] | 417 | uri = URI() |
418 | uri.scheme = type | ||
406 | if user and type != "file": | 419 | if user and type != "file": |
407 | url.append("%s" % user) | 420 | uri.username = user |
408 | if pswd: | 421 | if pswd: |
409 | url.append(":%s" % pswd) | 422 | uri.password = pswd |
410 | url.append("@") | ||
411 | if host and type != "file": | 423 | if host and type != "file": |
412 | url.append("%s" % host) | 424 | uri.hostname = host |
413 | if path: | 425 | if path: |
414 | # Standardise path to ensure comparisons work | 426 | # Standardise path to ensure comparisons work |
415 | while '//' in path: | 427 | while '//' in path: |
416 | path = path.replace("//", "/") | 428 | path = path.replace("//", "/") |
417 | url.append("%s" % urllib.parse.quote(path)) | 429 | uri.path = path |
430 | if type == "file": | ||
431 | # Use old not IETF compliant style | ||
432 | uri.relative = False | ||
418 | if p: | 433 | if p: |
419 | for parm in p: | 434 | uri.params = p |
420 | url.append(";%s=%s" % (parm, p[parm])) | ||
421 | 435 | ||
422 | return "".join(url) | 436 | return str(uri) |
423 | 437 | ||
424 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | 438 | def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): |
425 | if not ud.url or not uri_find or not uri_replace: | 439 | if not ud.url or not uri_find or not uri_replace: |
426 | logger.error("uri_replace: passed an undefined value, not replacing") | 440 | logger.error("uri_replace: passed an undefined value, not replacing") |
427 | return None | 441 | return None |
428 | uri_decoded = list(decodeurl(ud.url)) | 442 | uri_decoded = list(decodemirrorurl(ud.url)) |
429 | uri_find_decoded = list(decodeurl(uri_find)) | 443 | uri_find_decoded = list(decodemirrorurl(uri_find)) |
430 | uri_replace_decoded = list(decodeurl(uri_replace)) | 444 | uri_replace_decoded = list(decodemirrorurl(uri_replace)) |
431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) | 445 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
432 | result_decoded = ['', '', '', '', '', {}] | 446 | result_decoded = ['', '', '', '', '', {}] |
433 | # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params | 447 | # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params |
@@ -460,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
460 | for k in replacements: | 474 | for k in replacements: |
461 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) | 475 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) |
462 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) | 476 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) |
463 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) | 477 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1) |
464 | if loc == 2: | 478 | if loc == 2: |
465 | # Handle path manipulations | 479 | # Handle path manipulations |
466 | basename = None | 480 | basename = None |
@@ -493,18 +507,23 @@ methods = [] | |||
493 | urldata_cache = {} | 507 | urldata_cache = {} |
494 | saved_headrevs = {} | 508 | saved_headrevs = {} |
495 | 509 | ||
496 | def fetcher_init(d): | 510 | def fetcher_init(d, servercontext=True): |
497 | """ | 511 | """ |
498 | Called to initialize the fetchers once the configuration data is known. | 512 | Called to initialize the fetchers once the configuration data is known. |
499 | Calls before this must not hit the cache. | 513 | Calls before this must not hit the cache. |
500 | """ | 514 | """ |
501 | 515 | ||
502 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 516 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) |
517 | _revisions_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
518 | |||
519 | if not servercontext: | ||
520 | return | ||
521 | |||
503 | try: | 522 | try: |
504 | # fetcher_init is called multiple times, so make sure we only save the | 523 | # fetcher_init is called multiple times, so make sure we only save the |
505 | # revs the first time it is called. | 524 | # revs the first time it is called. |
506 | if not bb.fetch2.saved_headrevs: | 525 | if not bb.fetch2.saved_headrevs: |
507 | bb.fetch2.saved_headrevs = dict(revs) | 526 | bb.fetch2.saved_headrevs = _revisions_cache.get_revs() |
508 | except: | 527 | except: |
509 | pass | 528 | pass |
510 | 529 | ||
@@ -514,11 +533,10 @@ def fetcher_init(d): | |||
514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 533 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
515 | elif srcrev_policy == "clear": | 534 | elif srcrev_policy == "clear": |
516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 535 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
517 | revs.clear() | 536 | _revisions_cache.clear_cache() |
518 | else: | 537 | else: |
519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 538 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
520 | 539 | ||
521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
522 | 540 | ||
523 | for m in methods: | 541 | for m in methods: |
524 | if hasattr(m, "init"): | 542 | if hasattr(m, "init"): |
@@ -526,9 +544,11 @@ def fetcher_init(d): | |||
526 | 544 | ||
527 | def fetcher_parse_save(): | 545 | def fetcher_parse_save(): |
528 | _checksum_cache.save_extras() | 546 | _checksum_cache.save_extras() |
547 | _revisions_cache.save_extras() | ||
529 | 548 | ||
530 | def fetcher_parse_done(): | 549 | def fetcher_parse_done(): |
531 | _checksum_cache.save_merge() | 550 | _checksum_cache.save_merge() |
551 | _revisions_cache.save_merge() | ||
532 | 552 | ||
533 | def fetcher_compare_revisions(d): | 553 | def fetcher_compare_revisions(d): |
534 | """ | 554 | """ |
@@ -536,7 +556,7 @@ def fetcher_compare_revisions(d): | |||
536 | when bitbake was started and return true if they have changed. | 556 | when bitbake was started and return true if they have changed. |
537 | """ | 557 | """ |
538 | 558 | ||
539 | headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) | 559 | headrevs = _revisions_cache.get_revs() |
540 | return headrevs != bb.fetch2.saved_headrevs | 560 | return headrevs != bb.fetch2.saved_headrevs |
541 | 561 | ||
542 | def mirror_from_string(data): | 562 | def mirror_from_string(data): |
@@ -786,8 +806,8 @@ def _get_srcrev(d, method_name='sortable_revision'): | |||
786 | return "", revs | 806 | return "", revs |
787 | 807 | ||
788 | 808 | ||
789 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: | 809 | if len(scms) == 1: |
790 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) | 810 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name) |
791 | revs.append(rev) | 811 | revs.append(rev) |
792 | if len(rev) > 10: | 812 | if len(rev) > 10: |
793 | rev = rev[:10] | 813 | rev = rev[:10] |
@@ -808,13 +828,12 @@ def _get_srcrev(d, method_name='sortable_revision'): | |||
808 | seenautoinc = False | 828 | seenautoinc = False |
809 | for scm in scms: | 829 | for scm in scms: |
810 | ud = urldata[scm] | 830 | ud = urldata[scm] |
811 | for name in ud.names: | 831 | autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name) |
812 | autoinc, rev = getattr(ud.method, method_name)(ud, d, name) | 832 | revs.append(rev) |
813 | revs.append(rev) | 833 | seenautoinc = seenautoinc or autoinc |
814 | seenautoinc = seenautoinc or autoinc | 834 | if len(rev) > 10: |
815 | if len(rev) > 10: | 835 | rev = rev[:10] |
816 | rev = rev[:10] | 836 | name_to_rev[ud.name] = rev |
817 | name_to_rev[name] = rev | ||
818 | # Replace names by revisions in the SRCREV_FORMAT string. The approach used | 837 | # Replace names by revisions in the SRCREV_FORMAT string. The approach used |
819 | # here can handle names being prefixes of other names and names appearing | 838 | # here can handle names being prefixes of other names and names appearing |
820 | # as substrings in revisions (in which case the name should not be | 839 | # as substrings in revisions (in which case the name should not be |
@@ -878,6 +897,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH', | |||
878 | 'AWS_SESSION_TOKEN', | 897 | 'AWS_SESSION_TOKEN', |
879 | 'GIT_CACHE_PATH', | 898 | 'GIT_CACHE_PATH', |
880 | 'REMOTE_CONTAINERS_IPC', | 899 | 'REMOTE_CONTAINERS_IPC', |
900 | 'GITHUB_TOKEN', | ||
881 | 'SSL_CERT_DIR'] | 901 | 'SSL_CERT_DIR'] |
882 | 902 | ||
883 | def get_fetcher_environment(d): | 903 | def get_fetcher_environment(d): |
@@ -1072,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1072 | # If that tarball is a local file:// we need to provide a symlink to it | 1092 | # If that tarball is a local file:// we need to provide a symlink to it |
1073 | dldir = ld.getVar("DL_DIR") | 1093 | dldir = ld.getVar("DL_DIR") |
1074 | 1094 | ||
1095 | if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")): | ||
1096 | ld = ld.createCopy() | ||
1097 | ld.setVar("BB_NO_NETWORK", "1") | ||
1098 | |||
1075 | if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): | 1099 | if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
1076 | # Create donestamp in old format to avoid triggering a re-download | 1100 | # Create donestamp in old format to avoid triggering a re-download |
1077 | if ud.donestamp: | 1101 | if ud.donestamp: |
@@ -1093,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1093 | origud.method.build_mirror_data(origud, ld) | 1117 | origud.method.build_mirror_data(origud, ld) |
1094 | return origud.localpath | 1118 | return origud.localpath |
1095 | # Otherwise the result is a local file:// and we symlink to it | 1119 | # Otherwise the result is a local file:// and we symlink to it |
1096 | ensure_symlink(ud.localpath, origud.localpath) | 1120 | # This may also be a link to a shallow archive |
1121 | # When using shallow mode, add a symlink to the original fullshallow | ||
1122 | # path to ensure a valid symlink even in the `PREMIRRORS` case | ||
1123 | origud.method.update_mirror_links(ud, origud) | ||
1097 | update_stamp(origud, ld) | 1124 | update_stamp(origud, ld) |
1098 | return ud.localpath | 1125 | return ud.localpath |
1099 | 1126 | ||
@@ -1127,25 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1127 | if ud.lockfile and ud.lockfile != origud.lockfile: | 1154 | if ud.lockfile and ud.lockfile != origud.lockfile: |
1128 | bb.utils.unlockfile(lf) | 1155 | bb.utils.unlockfile(lf) |
1129 | 1156 | ||
1130 | |||
1131 | def ensure_symlink(target, link_name): | ||
1132 | if not os.path.exists(link_name): | ||
1133 | dirname = os.path.dirname(link_name) | ||
1134 | bb.utils.mkdirhier(dirname) | ||
1135 | if os.path.islink(link_name): | ||
1136 | # Broken symbolic link | ||
1137 | os.unlink(link_name) | ||
1138 | |||
1139 | # In case this is executing without any file locks held (as is | ||
1140 | # the case for file:// URLs), two tasks may end up here at the | ||
1141 | # same time, in which case we do not want the second task to | ||
1142 | # fail when the link has already been created by the first task. | ||
1143 | try: | ||
1144 | os.symlink(target, link_name) | ||
1145 | except FileExistsError: | ||
1146 | pass | ||
1147 | |||
1148 | |||
1149 | def try_mirrors(fetch, d, origud, mirrors, check = False): | 1157 | def try_mirrors(fetch, d, origud, mirrors, check = False): |
1150 | """ | 1158 | """ |
1151 | Try to use a mirrored version of the sources. | 1159 | Try to use a mirrored version of the sources. |
@@ -1174,7 +1182,7 @@ def trusted_network(d, url): | |||
1174 | if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): | 1182 | if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): |
1175 | return True | 1183 | return True |
1176 | 1184 | ||
1177 | pkgname = d.expand(d.getVar('PN', False)) | 1185 | pkgname = d.getVar('PN') |
1178 | trusted_hosts = None | 1186 | trusted_hosts = None |
1179 | if pkgname: | 1187 | if pkgname: |
1180 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) | 1188 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) |
@@ -1227,20 +1235,17 @@ def srcrev_internal_helper(ud, d, name): | |||
1227 | if srcrev and srcrev != "INVALID": | 1235 | if srcrev and srcrev != "INVALID": |
1228 | break | 1236 | break |
1229 | 1237 | ||
1230 | if 'rev' in ud.parm and 'tag' in ud.parm: | 1238 | if 'rev' in ud.parm: |
1231 | raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) | 1239 | parmrev = ud.parm['rev'] |
1232 | |||
1233 | if 'rev' in ud.parm or 'tag' in ud.parm: | ||
1234 | if 'rev' in ud.parm: | ||
1235 | parmrev = ud.parm['rev'] | ||
1236 | else: | ||
1237 | parmrev = ud.parm['tag'] | ||
1238 | if srcrev == "INVALID" or not srcrev: | 1240 | if srcrev == "INVALID" or not srcrev: |
1239 | return parmrev | 1241 | return parmrev |
1240 | if srcrev != parmrev: | 1242 | if srcrev != parmrev: |
1241 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) | 1243 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) |
1242 | return parmrev | 1244 | return parmrev |
1243 | 1245 | ||
1246 | if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev): | ||
1247 | return ud.parm['tag'] | ||
1248 | |||
1244 | if srcrev == "INVALID" or not srcrev: | 1249 | if srcrev == "INVALID" or not srcrev: |
1245 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) | 1250 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) |
1246 | if srcrev == "AUTOINC": | 1251 | if srcrev == "AUTOINC": |
@@ -1263,7 +1268,7 @@ def get_checksum_file_list(d): | |||
1263 | found = False | 1268 | found = False |
1264 | paths = ud.method.localfile_searchpaths(ud, d) | 1269 | paths = ud.method.localfile_searchpaths(ud, d) |
1265 | for f in paths: | 1270 | for f in paths: |
1266 | pth = ud.decodedurl | 1271 | pth = ud.path |
1267 | if os.path.exists(f): | 1272 | if os.path.exists(f): |
1268 | found = True | 1273 | found = True |
1269 | filelist.append(f + ":" + str(os.path.exists(f))) | 1274 | filelist.append(f + ":" + str(os.path.exists(f))) |
@@ -1308,23 +1313,28 @@ class FetchData(object): | |||
1308 | self.setup = False | 1313 | self.setup = False |
1309 | 1314 | ||
1310 | def configure_checksum(checksum_id): | 1315 | def configure_checksum(checksum_id): |
1316 | checksum_plain_name = "%ssum" % checksum_id | ||
1311 | if "name" in self.parm: | 1317 | if "name" in self.parm: |
1312 | checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) | 1318 | checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) |
1313 | else: | 1319 | else: |
1314 | checksum_name = "%ssum" % checksum_id | 1320 | checksum_name = checksum_plain_name |
1315 | |||
1316 | setattr(self, "%s_name" % checksum_id, checksum_name) | ||
1317 | 1321 | ||
1318 | if checksum_name in self.parm: | 1322 | if checksum_name in self.parm: |
1319 | checksum_expected = self.parm[checksum_name] | 1323 | checksum_expected = self.parm[checksum_name] |
1320 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]: | 1324 | elif checksum_plain_name in self.parm: |
1325 | checksum_expected = self.parm[checksum_plain_name] | ||
1326 | checksum_name = checksum_plain_name | ||
1327 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]: | ||
1321 | checksum_expected = None | 1328 | checksum_expected = None |
1322 | else: | 1329 | else: |
1323 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) | 1330 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) |
1324 | 1331 | ||
1332 | setattr(self, "%s_name" % checksum_id, checksum_name) | ||
1325 | setattr(self, "%s_expected" % checksum_id, checksum_expected) | 1333 | setattr(self, "%s_expected" % checksum_id, checksum_expected) |
1326 | 1334 | ||
1327 | self.names = self.parm.get("name",'default').split(',') | 1335 | self.name = self.parm.get("name",'default') |
1336 | if "," in self.name: | ||
1337 | raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url) | ||
1328 | 1338 | ||
1329 | self.method = None | 1339 | self.method = None |
1330 | for m in methods: | 1340 | for m in methods: |
@@ -1376,13 +1386,7 @@ class FetchData(object): | |||
1376 | self.lockfile = basepath + '.lock' | 1386 | self.lockfile = basepath + '.lock' |
1377 | 1387 | ||
1378 | def setup_revisions(self, d): | 1388 | def setup_revisions(self, d): |
1379 | self.revisions = {} | 1389 | self.revision = srcrev_internal_helper(self, d, self.name) |
1380 | for name in self.names: | ||
1381 | self.revisions[name] = srcrev_internal_helper(self, d, name) | ||
1382 | |||
1383 | # add compatibility code for non name specified case | ||
1384 | if len(self.names) == 1: | ||
1385 | self.revision = self.revisions[self.names[0]] | ||
1386 | 1390 | ||
1387 | def setup_localpath(self, d): | 1391 | def setup_localpath(self, d): |
1388 | if not self.localpath: | 1392 | if not self.localpath: |
@@ -1510,7 +1514,7 @@ class FetchMethod(object): | |||
1510 | (file, urldata.parm.get('unpack'))) | 1514 | (file, urldata.parm.get('unpack'))) |
1511 | 1515 | ||
1512 | base, ext = os.path.splitext(file) | 1516 | base, ext = os.path.splitext(file) |
1513 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: | 1517 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']: |
1514 | efile = os.path.join(rootdir, os.path.basename(base)) | 1518 | efile = os.path.join(rootdir, os.path.basename(base)) |
1515 | else: | 1519 | else: |
1516 | efile = file | 1520 | efile = file |
@@ -1569,11 +1573,11 @@ class FetchMethod(object): | |||
1569 | datafile = None | 1573 | datafile = None |
1570 | if output: | 1574 | if output: |
1571 | for line in output.decode().splitlines(): | 1575 | for line in output.decode().splitlines(): |
1572 | if line.startswith('data.tar.'): | 1576 | if line.startswith('data.tar.') or line == 'data.tar': |
1573 | datafile = line | 1577 | datafile = line |
1574 | break | 1578 | break |
1575 | else: | 1579 | else: |
1576 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) | 1580 | raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url) |
1577 | else: | 1581 | else: |
1578 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) | 1582 | raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) |
1579 | cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) | 1583 | cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) |
@@ -1606,7 +1610,7 @@ class FetchMethod(object): | |||
1606 | if urlpath.find("/") != -1: | 1610 | if urlpath.find("/") != -1: |
1607 | destdir = urlpath.rsplit("/", 1)[0] + '/' | 1611 | destdir = urlpath.rsplit("/", 1)[0] + '/' |
1608 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) | 1612 | bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) |
1609 | cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) | 1613 | cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir) |
1610 | else: | 1614 | else: |
1611 | urldata.unpack_tracer.unpack("archive-extract", unpackdir) | 1615 | urldata.unpack_tracer.unpack("archive-extract", unpackdir) |
1612 | 1616 | ||
@@ -1635,6 +1639,28 @@ class FetchMethod(object): | |||
1635 | """ | 1639 | """ |
1636 | bb.utils.remove(urldata.localpath) | 1640 | bb.utils.remove(urldata.localpath) |
1637 | 1641 | ||
1642 | def ensure_symlink(self, target, link_name): | ||
1643 | if not os.path.exists(link_name): | ||
1644 | dirname = os.path.dirname(link_name) | ||
1645 | bb.utils.mkdirhier(dirname) | ||
1646 | if os.path.islink(link_name): | ||
1647 | # Broken symbolic link | ||
1648 | os.unlink(link_name) | ||
1649 | |||
1650 | # In case this is executing without any file locks held (as is | ||
1651 | # the case for file:// URLs), two tasks may end up here at the | ||
1652 | # same time, in which case we do not want the second task to | ||
1653 | # fail when the link has already been created by the first task. | ||
1654 | try: | ||
1655 | os.symlink(target, link_name) | ||
1656 | except FileExistsError: | ||
1657 | pass | ||
1658 | |||
1659 | def update_mirror_links(self, ud, origud): | ||
1660 | # For local file:// results, create a symlink to them | ||
1661 | # This may also be a link to a shallow archive | ||
1662 | self.ensure_symlink(ud.localpath, origud.localpath) | ||
1663 | |||
1638 | def try_premirror(self, urldata, d): | 1664 | def try_premirror(self, urldata, d): |
1639 | """ | 1665 | """ |
1640 | Should premirrors be used? | 1666 | Should premirrors be used? |
@@ -1662,13 +1688,13 @@ class FetchMethod(object): | |||
1662 | if not hasattr(self, "_latest_revision"): | 1688 | if not hasattr(self, "_latest_revision"): |
1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) | 1689 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) |
1664 | 1690 | ||
1665 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | ||
1666 | key = self.generate_revision_key(ud, d, name) | 1691 | key = self.generate_revision_key(ud, d, name) |
1667 | try: | 1692 | |
1668 | return revs[key] | 1693 | rev = _revisions_cache.get_rev(key) |
1669 | except KeyError: | 1694 | if rev is None: |
1670 | revs[key] = rev = self._latest_revision(ud, d, name) | 1695 | rev = self._latest_revision(ud, d, name) |
1671 | return rev | 1696 | _revisions_cache.set_rev(key, rev) |
1697 | return rev | ||
1672 | 1698 | ||
1673 | def sortable_revision(self, ud, d, name): | 1699 | def sortable_revision(self, ud, d, name): |
1674 | latest_rev = self._build_revision(ud, d, name) | 1700 | latest_rev = self._build_revision(ud, d, name) |
@@ -1806,7 +1832,7 @@ class Fetch(object): | |||
1806 | self.ud[url] = FetchData(url, self.d) | 1832 | self.ud[url] = FetchData(url, self.d) |
1807 | 1833 | ||
1808 | self.ud[url].setup_localpath(self.d) | 1834 | self.ud[url].setup_localpath(self.d) |
1809 | return self.d.expand(self.ud[url].localpath) | 1835 | return self.ud[url].localpath |
1810 | 1836 | ||
1811 | def localpaths(self): | 1837 | def localpaths(self): |
1812 | """ | 1838 | """ |
@@ -1859,25 +1885,28 @@ class Fetch(object): | |||
1859 | logger.debug(str(e)) | 1885 | logger.debug(str(e)) |
1860 | done = False | 1886 | done = False |
1861 | 1887 | ||
1888 | d = self.d | ||
1862 | if premirroronly: | 1889 | if premirroronly: |
1863 | self.d.setVar("BB_NO_NETWORK", "1") | 1890 | # Only disable the network in a copy |
1891 | d = bb.data.createCopy(self.d) | ||
1892 | d.setVar("BB_NO_NETWORK", "1") | ||
1864 | 1893 | ||
1865 | firsterr = None | 1894 | firsterr = None |
1866 | verified_stamp = False | 1895 | verified_stamp = False |
1867 | if done: | 1896 | if done: |
1868 | verified_stamp = m.verify_donestamp(ud, self.d) | 1897 | verified_stamp = m.verify_donestamp(ud, d) |
1869 | if not done and (not verified_stamp or m.need_update(ud, self.d)): | 1898 | if not done and (not verified_stamp or m.need_update(ud, d)): |
1870 | try: | 1899 | try: |
1871 | if not trusted_network(self.d, ud.url): | 1900 | if not trusted_network(d, ud.url): |
1872 | raise UntrustedUrl(ud.url) | 1901 | raise UntrustedUrl(ud.url) |
1873 | logger.debug("Trying Upstream") | 1902 | logger.debug("Trying Upstream") |
1874 | m.download(ud, self.d) | 1903 | m.download(ud, d) |
1875 | if hasattr(m, "build_mirror_data"): | 1904 | if hasattr(m, "build_mirror_data"): |
1876 | m.build_mirror_data(ud, self.d) | 1905 | m.build_mirror_data(ud, d) |
1877 | done = True | 1906 | done = True |
1878 | # early checksum verify, so that if checksum mismatched, | 1907 | # early checksum verify, so that if checksum mismatched, |
1879 | # fetcher still have chance to fetch from mirror | 1908 | # fetcher still have chance to fetch from mirror |
1880 | m.update_donestamp(ud, self.d) | 1909 | m.update_donestamp(ud, d) |
1881 | 1910 | ||
1882 | except bb.fetch2.NetworkAccess: | 1911 | except bb.fetch2.NetworkAccess: |
1883 | raise | 1912 | raise |
@@ -1896,17 +1925,17 @@ class Fetch(object): | |||
1896 | firsterr = e | 1925 | firsterr = e |
1897 | # Remove any incomplete fetch | 1926 | # Remove any incomplete fetch |
1898 | if not verified_stamp and m.cleanup_upon_failure(): | 1927 | if not verified_stamp and m.cleanup_upon_failure(): |
1899 | m.clean(ud, self.d) | 1928 | m.clean(ud, d) |
1900 | logger.debug("Trying MIRRORS") | 1929 | logger.debug("Trying MIRRORS") |
1901 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) | 1930 | mirrors = mirror_from_string(d.getVar('MIRRORS')) |
1902 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1931 | done = m.try_mirrors(self, ud, d, mirrors) |
1903 | 1932 | ||
1904 | if not done or not m.done(ud, self.d): | 1933 | if not done or not m.done(ud, d): |
1905 | if firsterr: | 1934 | if firsterr: |
1906 | logger.error(str(firsterr)) | 1935 | logger.error(str(firsterr)) |
1907 | raise FetchError("Unable to fetch URL from any source.", u) | 1936 | raise FetchError("Unable to fetch URL from any source.", u) |
1908 | 1937 | ||
1909 | m.update_donestamp(ud, self.d) | 1938 | m.update_donestamp(ud, d) |
1910 | 1939 | ||
1911 | except IOError as e: | 1940 | except IOError as e: |
1912 | if e.errno in [errno.ESTALE]: | 1941 | if e.errno in [errno.ESTALE]: |
@@ -2088,6 +2117,7 @@ from . import npmsw | |||
2088 | from . import az | 2117 | from . import az |
2089 | from . import crate | 2118 | from . import crate |
2090 | from . import gcp | 2119 | from . import gcp |
2120 | from . import gomod | ||
2091 | 2121 | ||
2092 | methods.append(local.Local()) | 2122 | methods.append(local.Local()) |
2093 | methods.append(wget.Wget()) | 2123 | methods.append(wget.Wget()) |
@@ -2110,3 +2140,5 @@ methods.append(npmsw.NpmShrinkWrap()) | |||
2110 | methods.append(az.Az()) | 2140 | methods.append(az.Az()) |
2111 | methods.append(crate.Crate()) | 2141 | methods.append(crate.Crate()) |
2112 | methods.append(gcp.GCP()) | 2142 | methods.append(gcp.GCP()) |
2143 | methods.append(gomod.GoMod()) | ||
2144 | methods.append(gomod.GoModGit()) | ||
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py index 3ccc594c22..1d3664f213 100644 --- a/bitbake/lib/bb/fetch2/az.py +++ b/bitbake/lib/bb/fetch2/az.py | |||
@@ -36,6 +36,8 @@ class Az(Wget): | |||
36 | 36 | ||
37 | az_sas = d.getVar('AZ_SAS') | 37 | az_sas = d.getVar('AZ_SAS') |
38 | if az_sas and az_sas not in ud.url: | 38 | if az_sas and az_sas not in ud.url: |
39 | if not az_sas.startswith('?'): | ||
40 | raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.") | ||
39 | ud.url += az_sas | 41 | ud.url += az_sas |
40 | 42 | ||
41 | return Wget.checkstatus(self, fetch, ud, d, try_again) | 43 | return Wget.checkstatus(self, fetch, ud, d, try_again) |
@@ -62,15 +64,18 @@ class Az(Wget): | |||
62 | az_sas = d.getVar('AZ_SAS') | 64 | az_sas = d.getVar('AZ_SAS') |
63 | 65 | ||
64 | if az_sas: | 66 | if az_sas: |
67 | if not az_sas.startswith('?'): | ||
68 | raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.") | ||
65 | azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) | 69 | azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) |
66 | else: | 70 | else: |
67 | azuri = '%s%s%s' % ('https://', ud.host, ud.path) | 71 | azuri = '%s%s%s' % ('https://', ud.host, ud.path) |
68 | 72 | ||
73 | dldir = d.getVar("DL_DIR") | ||
69 | if os.path.exists(ud.localpath): | 74 | if os.path.exists(ud.localpath): |
70 | # file exists, but we didnt complete it.. trying again. | 75 | # file exists, but we didnt complete it.. trying again. |
71 | fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) | 76 | fetchcmd += " -c -P %s '%s'" % (dldir, azuri) |
72 | else: | 77 | else: |
73 | fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) | 78 | fetchcmd += " -P %s '%s'" % (dldir, azuri) |
74 | 79 | ||
75 | try: | 80 | try: |
76 | self._runwget(ud, d, fetchcmd, False) | 81 | self._runwget(ud, d, fetchcmd, False) |
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py index 1a9c863769..17500daf95 100644 --- a/bitbake/lib/bb/fetch2/clearcase.py +++ b/bitbake/lib/bb/fetch2/clearcase.py | |||
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod): | |||
108 | ud.module.replace("/", "."), | 108 | ud.module.replace("/", "."), |
109 | ud.label.replace("/", ".")) | 109 | ud.label.replace("/", ".")) |
110 | 110 | ||
111 | ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) | 111 | ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME")) |
112 | ud.csname = "%s-config-spec" % (ud.identifier) | 112 | ud.csname = "%s-config-spec" % (ud.identifier) |
113 | ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) | 113 | ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) |
114 | ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) | 114 | ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) |
@@ -130,8 +130,6 @@ class ClearCase(FetchMethod): | |||
130 | self.debug("configspecfile = %s" % ud.configspecfile) | 130 | self.debug("configspecfile = %s" % ud.configspecfile) |
131 | self.debug("localfile = %s" % ud.localfile) | 131 | self.debug("localfile = %s" % ud.localfile) |
132 | 132 | ||
133 | ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile) | ||
134 | |||
135 | def _build_ccase_command(self, ud, command): | 133 | def _build_ccase_command(self, ud, command): |
136 | """ | 134 | """ |
137 | Build up a commandline based on ud | 135 | Build up a commandline based on ud |
@@ -196,7 +194,7 @@ class ClearCase(FetchMethod): | |||
196 | 194 | ||
197 | def need_update(self, ud, d): | 195 | def need_update(self, ud, d): |
198 | if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec): | 196 | if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec): |
199 | ud.identifier += "-%s" % d.getVar("DATETIME",d, True) | 197 | ud.identifier += "-%s" % d.getVar("DATETIME") |
200 | return True | 198 | return True |
201 | if os.path.exists(ud.localpath): | 199 | if os.path.exists(ud.localpath): |
202 | return False | 200 | return False |
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py index 01d49435c3..e611736f06 100644 --- a/bitbake/lib/bb/fetch2/crate.py +++ b/bitbake/lib/bb/fetch2/crate.py | |||
@@ -70,6 +70,7 @@ class Crate(Wget): | |||
70 | host = 'crates.io/api/v1/crates' | 70 | host = 'crates.io/api/v1/crates' |
71 | 71 | ||
72 | ud.url = "https://%s/%s/%s/download" % (host, name, version) | 72 | ud.url = "https://%s/%s/%s/download" % (host, name, version) |
73 | ud.versionsurl = "https://%s/%s/versions" % (host, name) | ||
73 | ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) | 74 | ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) |
74 | if 'name' not in ud.parm: | 75 | if 'name' not in ud.parm: |
75 | ud.parm['name'] = '%s-%s' % (name, version) | 76 | ud.parm['name'] = '%s-%s' % (name, version) |
@@ -139,3 +140,11 @@ class Crate(Wget): | |||
139 | mdpath = os.path.join(bbpath, cratepath, mdfile) | 140 | mdpath = os.path.join(bbpath, cratepath, mdfile) |
140 | with open(mdpath, "w") as f: | 141 | with open(mdpath, "w") as f: |
141 | json.dump(metadata, f) | 142 | json.dump(metadata, f) |
143 | |||
144 | def latest_versionstring(self, ud, d): | ||
145 | from functools import cmp_to_key | ||
146 | json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d)) | ||
147 | versions = [(0, i["num"], "") for i in json_data["versions"]] | ||
148 | versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp)) | ||
149 | |||
150 | return (versions[-1][1], "") | ||
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py index eb3e0c6a6b..86546d40bf 100644 --- a/bitbake/lib/bb/fetch2/gcp.py +++ b/bitbake/lib/bb/fetch2/gcp.py | |||
@@ -23,7 +23,6 @@ import urllib.parse, urllib.error | |||
23 | from bb.fetch2 import FetchMethod | 23 | from bb.fetch2 import FetchMethod |
24 | from bb.fetch2 import FetchError | 24 | from bb.fetch2 import FetchError |
25 | from bb.fetch2 import logger | 25 | from bb.fetch2 import logger |
26 | from bb.fetch2 import runfetchcmd | ||
27 | 26 | ||
28 | class GCP(FetchMethod): | 27 | class GCP(FetchMethod): |
29 | """ | 28 | """ |
@@ -47,8 +46,7 @@ class GCP(FetchMethod): | |||
47 | else: | 46 | else: |
48 | ud.basename = os.path.basename(ud.path) | 47 | ud.basename = os.path.basename(ud.path) |
49 | 48 | ||
50 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | 49 | ud.localfile = ud.basename |
51 | ud.basecmd = "gsutil stat" | ||
52 | 50 | ||
53 | def get_gcp_client(self): | 51 | def get_gcp_client(self): |
54 | from google.cloud import storage | 52 | from google.cloud import storage |
@@ -59,17 +57,20 @@ class GCP(FetchMethod): | |||
59 | Fetch urls using the GCP API. | 57 | Fetch urls using the GCP API. |
60 | Assumes localpath was called first. | 58 | Assumes localpath was called first. |
61 | """ | 59 | """ |
60 | from google.api_core.exceptions import NotFound | ||
62 | logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") | 61 | logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") |
63 | if self.gcp_client is None: | 62 | if self.gcp_client is None: |
64 | self.get_gcp_client() | 63 | self.get_gcp_client() |
65 | 64 | ||
66 | bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") | 65 | bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}") |
67 | runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d) | ||
68 | 66 | ||
69 | # Path sometimes has leading slash, so strip it | 67 | # Path sometimes has leading slash, so strip it |
70 | path = ud.path.lstrip("/") | 68 | path = ud.path.lstrip("/") |
71 | blob = self.gcp_client.bucket(ud.host).blob(path) | 69 | blob = self.gcp_client.bucket(ud.host).blob(path) |
72 | blob.download_to_filename(ud.localpath) | 70 | try: |
71 | blob.download_to_filename(ud.localpath) | ||
72 | except NotFound: | ||
73 | raise FetchError("The GCP API threw a NotFound exception") | ||
73 | 74 | ||
74 | # Additional sanity checks copied from the wget class (although there | 75 | # Additional sanity checks copied from the wget class (although there |
75 | # are no known issues which mean these are required, treat the GCP API | 76 | # are no known issues which mean these are required, treat the GCP API |
@@ -91,8 +92,7 @@ class GCP(FetchMethod): | |||
91 | if self.gcp_client is None: | 92 | if self.gcp_client is None: |
92 | self.get_gcp_client() | 93 | self.get_gcp_client() |
93 | 94 | ||
94 | bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") | 95 | bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}") |
95 | runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d) | ||
96 | 96 | ||
97 | # Path sometimes has leading slash, so strip it | 97 | # Path sometimes has leading slash, so strip it |
98 | path = ud.path.lstrip("/") | 98 | path = ud.path.lstrip("/") |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index c7ff769fdf..14ec45a3f6 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
@@ -9,15 +9,6 @@ Supported SRC_URI options are: | |||
9 | - branch | 9 | - branch |
10 | The git branch to retrieve from. The default is "master" | 10 | The git branch to retrieve from. The default is "master" |
11 | 11 | ||
12 | This option also supports multiple branch fetching, with branches | ||
13 | separated by commas. In multiple branches case, the name option | ||
14 | must have the same number of names to match the branches, which is | ||
15 | used to specify the SRC_REV for the branch | ||
16 | e.g: | ||
17 | SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY" | ||
18 | SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx" | ||
19 | SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY" | ||
20 | |||
21 | - tag | 12 | - tag |
22 | The git tag to retrieve. The default is "master" | 13 | The git tag to retrieve. The default is "master" |
23 | 14 | ||
@@ -81,6 +72,7 @@ import shlex | |||
81 | import shutil | 72 | import shutil |
82 | import subprocess | 73 | import subprocess |
83 | import tempfile | 74 | import tempfile |
75 | import urllib | ||
84 | import bb | 76 | import bb |
85 | import bb.progress | 77 | import bb.progress |
86 | from contextlib import contextmanager | 78 | from contextlib import contextmanager |
@@ -190,14 +182,11 @@ class Git(FetchMethod): | |||
190 | ud.bareclone = ud.parm.get("bareclone","0") == "1" | 182 | ud.bareclone = ud.parm.get("bareclone","0") == "1" |
191 | if ud.bareclone: | 183 | if ud.bareclone: |
192 | ud.nocheckout = 1 | 184 | ud.nocheckout = 1 |
193 | 185 | ||
194 | ud.unresolvedrev = {} | 186 | ud.unresolvedrev = "" |
195 | branches = ud.parm.get("branch", "").split(',') | 187 | ud.branch = ud.parm.get("branch", "") |
196 | if branches == [""] and not ud.nobranch: | 188 | if not ud.branch and not ud.nobranch: |
197 | bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url) | 189 | raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url) |
198 | branches = ["master"] | ||
199 | if len(branches) != len(ud.names): | ||
200 | raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) | ||
201 | 190 | ||
202 | ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" | 191 | ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" |
203 | 192 | ||
@@ -207,8 +196,11 @@ class Git(FetchMethod): | |||
207 | if ud.bareclone: | 196 | if ud.bareclone: |
208 | ud.cloneflags += " --mirror" | 197 | ud.cloneflags += " --mirror" |
209 | 198 | ||
199 | ud.shallow_skip_fast = False | ||
210 | ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" | 200 | ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" |
211 | ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() | 201 | ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() |
202 | if 'tag' in ud.parm: | ||
203 | ud.shallow_extra_refs.append("refs/tags/" + ud.parm['tag']) | ||
212 | 204 | ||
213 | depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") | 205 | depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") |
214 | if depth_default is not None: | 206 | if depth_default is not None: |
@@ -225,32 +217,27 @@ class Git(FetchMethod): | |||
225 | 217 | ||
226 | revs_default = d.getVar("BB_GIT_SHALLOW_REVS") | 218 | revs_default = d.getVar("BB_GIT_SHALLOW_REVS") |
227 | ud.shallow_revs = [] | 219 | ud.shallow_revs = [] |
228 | ud.branches = {} | 220 | |
229 | for pos, name in enumerate(ud.names): | 221 | ud.unresolvedrev = ud.branch |
230 | branch = branches[pos] | 222 | |
231 | ud.branches[name] = branch | 223 | shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name) |
232 | ud.unresolvedrev[name] = branch | 224 | if shallow_depth is not None: |
233 | 225 | try: | |
234 | shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name) | 226 | shallow_depth = int(shallow_depth or 0) |
235 | if shallow_depth is not None: | 227 | except ValueError: |
236 | try: | 228 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth)) |
237 | shallow_depth = int(shallow_depth or 0) | 229 | else: |
238 | except ValueError: | 230 | if shallow_depth < 0: |
239 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) | 231 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth)) |
240 | else: | 232 | ud.shallow_depths[ud.name] = shallow_depth |
241 | if shallow_depth < 0: | 233 | |
242 | raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) | 234 | revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name) |
243 | ud.shallow_depths[name] = shallow_depth | 235 | if revs is not None: |
244 | 236 | ud.shallow_revs.extend(revs.split()) | |
245 | revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name) | 237 | elif revs_default is not None: |
246 | if revs is not None: | 238 | ud.shallow_revs.extend(revs_default.split()) |
247 | ud.shallow_revs.extend(revs.split()) | 239 | |
248 | elif revs_default is not None: | 240 | if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0: |
249 | ud.shallow_revs.extend(revs_default.split()) | ||
250 | |||
251 | if (ud.shallow and | ||
252 | not ud.shallow_revs and | ||
253 | all(ud.shallow_depths[n] == 0 for n in ud.names)): | ||
254 | # Shallow disabled for this URL | 241 | # Shallow disabled for this URL |
255 | ud.shallow = False | 242 | ud.shallow = False |
256 | 243 | ||
@@ -259,10 +246,9 @@ class Git(FetchMethod): | |||
259 | # rev of this repository. This will get resolved into a revision | 246 | # rev of this repository. This will get resolved into a revision |
260 | # later. If an actual revision happens to have also been provided | 247 | # later. If an actual revision happens to have also been provided |
261 | # then this setting will be overridden. | 248 | # then this setting will be overridden. |
262 | for name in ud.names: | 249 | ud.unresolvedrev = 'HEAD' |
263 | ud.unresolvedrev[name] = 'HEAD' | ||
264 | 250 | ||
265 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all" | 251 | ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin" |
266 | 252 | ||
267 | write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" | 253 | write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" |
268 | ud.write_tarballs = write_tarballs != "0" or ud.rebaseable | 254 | ud.write_tarballs = write_tarballs != "0" or ud.rebaseable |
@@ -270,12 +256,11 @@ class Git(FetchMethod): | |||
270 | 256 | ||
271 | ud.setup_revisions(d) | 257 | ud.setup_revisions(d) |
272 | 258 | ||
273 | for name in ud.names: | 259 | # Ensure any revision that doesn't look like a SHA-1 is translated into one |
274 | # Ensure any revision that doesn't look like a SHA-1 is translated into one | 260 | if not sha1_re.match(ud.revision or ''): |
275 | if not sha1_re.match(ud.revisions[name] or ''): | 261 | if ud.revision: |
276 | if ud.revisions[name]: | 262 | ud.unresolvedrev = ud.revision |
277 | ud.unresolvedrev[name] = ud.revisions[name] | 263 | ud.revision = self.latest_revision(ud, d, ud.name) |
278 | ud.revisions[name] = self.latest_revision(ud, d, name) | ||
279 | 264 | ||
280 | gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) | 265 | gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) |
281 | if gitsrcname.startswith('.'): | 266 | if gitsrcname.startswith('.'): |
@@ -286,8 +271,7 @@ class Git(FetchMethod): | |||
286 | # upstream repo in the future, the mirror will remain intact and still | 271 | # upstream repo in the future, the mirror will remain intact and still |
287 | # contain the revision | 272 | # contain the revision |
288 | if ud.rebaseable: | 273 | if ud.rebaseable: |
289 | for name in ud.names: | 274 | gitsrcname = gitsrcname + '_' + ud.revision |
290 | gitsrcname = gitsrcname + '_' + ud.revisions[name] | ||
291 | 275 | ||
292 | dl_dir = d.getVar("DL_DIR") | 276 | dl_dir = d.getVar("DL_DIR") |
293 | gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") | 277 | gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") |
@@ -305,15 +289,14 @@ class Git(FetchMethod): | |||
305 | if ud.shallow_revs: | 289 | if ud.shallow_revs: |
306 | tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) | 290 | tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) |
307 | 291 | ||
308 | for name, revision in sorted(ud.revisions.items()): | 292 | tarballname = "%s_%s" % (tarballname, ud.revision[:7]) |
309 | tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7]) | 293 | depth = ud.shallow_depths[ud.name] |
310 | depth = ud.shallow_depths[name] | 294 | if depth: |
311 | if depth: | 295 | tarballname = "%s-%s" % (tarballname, depth) |
312 | tarballname = "%s-%s" % (tarballname, depth) | ||
313 | 296 | ||
314 | shallow_refs = [] | 297 | shallow_refs = [] |
315 | if not ud.nobranch: | 298 | if not ud.nobranch: |
316 | shallow_refs.extend(ud.branches.values()) | 299 | shallow_refs.append(ud.branch) |
317 | if ud.shallow_extra_refs: | 300 | if ud.shallow_extra_refs: |
318 | shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) | 301 | shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) |
319 | if shallow_refs: | 302 | if shallow_refs: |
@@ -338,18 +321,19 @@ class Git(FetchMethod): | |||
338 | return True | 321 | return True |
339 | if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): | 322 | if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): |
340 | return True | 323 | return True |
341 | for name in ud.names: | 324 | if not self._contains_ref(ud, d, ud.name, ud.clonedir): |
342 | if not self._contains_ref(ud, d, name, ud.clonedir): | 325 | return True |
343 | return True | ||
344 | return False | 326 | return False |
345 | 327 | ||
346 | def lfs_need_update(self, ud, d): | 328 | def lfs_need_update(self, ud, d): |
329 | if not self._need_lfs(ud): | ||
330 | return False | ||
331 | |||
347 | if self.clonedir_need_update(ud, d): | 332 | if self.clonedir_need_update(ud, d): |
348 | return True | 333 | return True |
349 | 334 | ||
350 | for name in ud.names: | 335 | if not self._lfs_objects_downloaded(ud, d, ud.clonedir): |
351 | if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir): | 336 | return True |
352 | return True | ||
353 | return False | 337 | return False |
354 | 338 | ||
355 | def clonedir_need_shallow_revs(self, ud, d): | 339 | def clonedir_need_shallow_revs(self, ud, d): |
@@ -366,6 +350,13 @@ class Git(FetchMethod): | |||
366 | def tarball_need_update(self, ud): | 350 | def tarball_need_update(self, ud): |
367 | return ud.write_tarballs and not os.path.exists(ud.fullmirror) | 351 | return ud.write_tarballs and not os.path.exists(ud.fullmirror) |
368 | 352 | ||
353 | def update_mirror_links(self, ud, origud): | ||
354 | super().update_mirror_links(ud, origud) | ||
355 | # When using shallow mode, add a symlink to the original fullshallow | ||
356 | # path to ensure a valid symlink even in the `PREMIRRORS` case | ||
357 | if ud.shallow and not os.path.exists(origud.fullshallow): | ||
358 | self.ensure_symlink(ud.localpath, origud.fullshallow) | ||
359 | |||
369 | def try_premirror(self, ud, d): | 360 | def try_premirror(self, ud, d): |
370 | # If we don't do this, updating an existing checkout with only premirrors | 361 | # If we don't do this, updating an existing checkout with only premirrors |
371 | # is not possible | 362 | # is not possible |
@@ -446,6 +437,24 @@ class Git(FetchMethod): | |||
446 | if ud.proto.lower() != 'file': | 437 | if ud.proto.lower() != 'file': |
447 | bb.fetch2.check_network_access(d, clone_cmd, ud.url) | 438 | bb.fetch2.check_network_access(d, clone_cmd, ud.url) |
448 | progresshandler = GitProgressHandler(d) | 439 | progresshandler = GitProgressHandler(d) |
440 | |||
441 | # Try creating a fast initial shallow clone | ||
442 | # Enabling ud.shallow_skip_fast will skip this | ||
443 | # If the Git error "Server does not allow request for unadvertised object" | ||
444 | # occurs, shallow_skip_fast is enabled automatically. | ||
445 | # This may happen if the Git server does not allow the request | ||
446 | # or if the Git client has issues with this functionality. | ||
447 | if ud.shallow and not ud.shallow_skip_fast: | ||
448 | try: | ||
449 | self.clone_shallow_with_tarball(ud, d) | ||
450 | # When the shallow clone has succeeded, use the shallow tarball | ||
451 | ud.localpath = ud.fullshallow | ||
452 | return | ||
453 | except: | ||
454 | logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.") | ||
455 | |||
456 | # When skipping fast initial shallow or the fast inital shallow clone failed: | ||
457 | # Try again with an initial regular clone | ||
449 | runfetchcmd(clone_cmd, d, log=progresshandler) | 458 | runfetchcmd(clone_cmd, d, log=progresshandler) |
450 | 459 | ||
451 | # Update the checkout if needed | 460 | # Update the checkout if needed |
@@ -473,9 +482,8 @@ class Git(FetchMethod): | |||
473 | if exc.errno != errno.ENOENT: | 482 | if exc.errno != errno.ENOENT: |
474 | raise | 483 | raise |
475 | 484 | ||
476 | for name in ud.names: | 485 | if not self._contains_ref(ud, d, ud.name, ud.clonedir): |
477 | if not self._contains_ref(ud, d, name, ud.clonedir): | 486 | raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch)) |
478 | raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name])) | ||
479 | 487 | ||
480 | if ud.shallow and ud.write_shallow_tarballs: | 488 | if ud.shallow and ud.write_shallow_tarballs: |
481 | missing_rev = self.clonedir_need_shallow_revs(ud, d) | 489 | missing_rev = self.clonedir_need_shallow_revs(ud, d) |
@@ -483,128 +491,168 @@ class Git(FetchMethod): | |||
483 | raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) | 491 | raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) |
484 | 492 | ||
485 | if self.lfs_need_update(ud, d): | 493 | if self.lfs_need_update(ud, d): |
486 | # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching | 494 | self.lfs_fetch(ud, d, ud.clonedir, ud.revision) |
487 | # of all LFS blobs needed at the srcrev. | ||
488 | # | ||
489 | # It would be nice to just do this inline here by running 'git-lfs fetch' | ||
490 | # on the bare clonedir, but that operation requires a working copy on some | ||
491 | # releases of Git LFS. | ||
492 | with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir: | ||
493 | # Do the checkout. This implicitly involves a Git LFS fetch. | ||
494 | Git.unpack(self, ud, tmpdir, d) | ||
495 | |||
496 | # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into | ||
497 | # the bare clonedir. | ||
498 | # | ||
499 | # As this procedure is invoked repeatedly on incremental fetches as | ||
500 | # a recipe's SRCREV is bumped throughout its lifetime, this will | ||
501 | # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs | ||
502 | # corresponding to all the blobs reachable from the different revs | ||
503 | # fetched across time. | ||
504 | # | ||
505 | # Only do this if the unpack resulted in a .git/lfs directory being | ||
506 | # created; this only happens if at least one blob needed to be | ||
507 | # downloaded. | ||
508 | if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")): | ||
509 | runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir) | ||
510 | |||
511 | def build_mirror_data(self, ud, d): | ||
512 | 495 | ||
513 | # Create as a temp file and move atomically into position to avoid races | 496 | def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None): |
514 | @contextmanager | 497 | """Helper method for fetching Git LFS data""" |
515 | def create_atomic(filename): | 498 | try: |
516 | fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) | 499 | if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision): |
517 | try: | 500 | self._ensure_git_lfs(d, ud) |
518 | yield tfile | 501 | |
519 | umask = os.umask(0o666) | 502 | # Using worktree with the revision because .lfsconfig may exists |
520 | os.umask(umask) | 503 | worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision) |
521 | os.chmod(tfile, (0o666 & ~umask)) | 504 | runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir) |
522 | os.rename(tfile, filename) | 505 | lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "") |
523 | finally: | 506 | runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt")) |
524 | os.close(fd) | 507 | worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd |
508 | runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir) | ||
509 | except: | ||
510 | logger.warning("Fetching LFS did not succeed.") | ||
511 | |||
512 | @contextmanager | ||
513 | def create_atomic(self, filename): | ||
514 | """Create as a temp file and move atomically into position to avoid races""" | ||
515 | fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) | ||
516 | try: | ||
517 | yield tfile | ||
518 | umask = os.umask(0o666) | ||
519 | os.umask(umask) | ||
520 | os.chmod(tfile, (0o666 & ~umask)) | ||
521 | os.rename(tfile, filename) | ||
522 | finally: | ||
523 | os.close(fd) | ||
525 | 524 | ||
525 | def build_mirror_data(self, ud, d): | ||
526 | if ud.shallow and ud.write_shallow_tarballs: | 526 | if ud.shallow and ud.write_shallow_tarballs: |
527 | if not os.path.exists(ud.fullshallow): | 527 | if not os.path.exists(ud.fullshallow): |
528 | if os.path.islink(ud.fullshallow): | 528 | if os.path.islink(ud.fullshallow): |
529 | os.unlink(ud.fullshallow) | 529 | os.unlink(ud.fullshallow) |
530 | tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) | 530 | self.clone_shallow_with_tarball(ud, d) |
531 | shallowclone = os.path.join(tempdir, 'git') | ||
532 | try: | ||
533 | self.clone_shallow_local(ud, shallowclone, d) | ||
534 | |||
535 | logger.info("Creating tarball of git repository") | ||
536 | with create_atomic(ud.fullshallow) as tfile: | ||
537 | runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone) | ||
538 | runfetchcmd("touch %s.done" % ud.fullshallow, d) | ||
539 | finally: | ||
540 | bb.utils.remove(tempdir, recurse=True) | ||
541 | elif ud.write_tarballs and not os.path.exists(ud.fullmirror): | 531 | elif ud.write_tarballs and not os.path.exists(ud.fullmirror): |
542 | if os.path.islink(ud.fullmirror): | 532 | if os.path.islink(ud.fullmirror): |
543 | os.unlink(ud.fullmirror) | 533 | os.unlink(ud.fullmirror) |
544 | 534 | ||
545 | logger.info("Creating tarball of git repository") | 535 | logger.info("Creating tarball of git repository") |
546 | with create_atomic(ud.fullmirror) as tfile: | 536 | with self.create_atomic(ud.fullmirror) as tfile: |
547 | mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, | 537 | mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, |
548 | quiet=True, workdir=ud.clonedir) | 538 | quiet=True, workdir=ud.clonedir) |
549 | runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." | 539 | runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." |
550 | % (tfile, mtime), d, workdir=ud.clonedir) | 540 | % (tfile, mtime), d, workdir=ud.clonedir) |
551 | runfetchcmd("touch %s.done" % ud.fullmirror, d) | 541 | runfetchcmd("touch %s.done" % ud.fullmirror, d) |
552 | 542 | ||
543 | def clone_shallow_with_tarball(self, ud, d): | ||
544 | ret = False | ||
545 | tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) | ||
546 | shallowclone = os.path.join(tempdir, 'git') | ||
547 | try: | ||
548 | try: | ||
549 | self.clone_shallow_local(ud, shallowclone, d) | ||
550 | except: | ||
551 | logger.warning("Fast shallow clone failed, try to skip fast mode now.") | ||
552 | bb.utils.remove(tempdir, recurse=True) | ||
553 | os.mkdir(tempdir) | ||
554 | ud.shallow_skip_fast = True | ||
555 | self.clone_shallow_local(ud, shallowclone, d) | ||
556 | logger.info("Creating tarball of git repository") | ||
557 | with self.create_atomic(ud.fullshallow) as tfile: | ||
558 | runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone) | ||
559 | runfetchcmd("touch %s.done" % ud.fullshallow, d) | ||
560 | ret = True | ||
561 | finally: | ||
562 | bb.utils.remove(tempdir, recurse=True) | ||
563 | |||
564 | return ret | ||
565 | |||
553 | def clone_shallow_local(self, ud, dest, d): | 566 | def clone_shallow_local(self, ud, dest, d): |
554 | """Clone the repo and make it shallow. | 567 | """ |
568 | Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default): | ||
569 | - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev | ||
570 | - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev | ||
571 | """ | ||
555 | 572 | ||
556 | The upstream url of the new clone isn't set at this time, as it'll be | 573 | progresshandler = GitProgressHandler(d) |
557 | set correctly when unpacked.""" | 574 | repourl = self._get_repo_url(ud) |
558 | runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d) | 575 | bb.utils.mkdirhier(dest) |
576 | init_cmd = "%s init -q" % ud.basecmd | ||
577 | if ud.bareclone: | ||
578 | init_cmd += " --bare" | ||
579 | runfetchcmd(init_cmd, d, workdir=dest) | ||
580 | # Use repourl when creating a fast initial shallow clone | ||
581 | # Prefer already existing full bare clones if available | ||
582 | if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir): | ||
583 | remote = shlex.quote(repourl) | ||
584 | else: | ||
585 | remote = ud.clonedir | ||
586 | runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest) | ||
559 | 587 | ||
560 | to_parse, shallow_branches = [], [] | 588 | # Check the histories which should be excluded |
561 | for name in ud.names: | 589 | shallow_exclude = '' |
562 | revision = ud.revisions[name] | 590 | for revision in ud.shallow_revs: |
563 | depth = ud.shallow_depths[name] | 591 | shallow_exclude += " --shallow-exclude=%s" % revision |
564 | if depth: | ||
565 | to_parse.append('%s~%d^{}' % (revision, depth - 1)) | ||
566 | 592 | ||
567 | # For nobranch, we need a ref, otherwise the commits will be | 593 | revision = ud.revision |
568 | # removed, and for non-nobranch, we truncate the branch to our | 594 | depth = ud.shallow_depths[ud.name] |
569 | # srcrev, to avoid keeping unnecessary history beyond that. | ||
570 | branch = ud.branches[name] | ||
571 | if ud.nobranch: | ||
572 | ref = "refs/shallow/%s" % name | ||
573 | elif ud.bareclone: | ||
574 | ref = "refs/heads/%s" % branch | ||
575 | else: | ||
576 | ref = "refs/remotes/origin/%s" % branch | ||
577 | 595 | ||
578 | shallow_branches.append(ref) | 596 | # The --depth and --shallow-exclude can't be used together |
579 | runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) | 597 | if depth and shallow_exclude: |
598 | raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.") | ||
599 | |||
600 | # For nobranch, we need a ref, otherwise the commits will be | ||
601 | # removed, and for non-nobranch, we truncate the branch to our | ||
602 | # srcrev, to avoid keeping unnecessary history beyond that. | ||
603 | branch = ud.branch | ||
604 | if ud.nobranch: | ||
605 | ref = "refs/shallow/%s" % ud.name | ||
606 | elif ud.bareclone: | ||
607 | ref = "refs/heads/%s" % branch | ||
608 | else: | ||
609 | ref = "refs/remotes/origin/%s" % branch | ||
610 | |||
611 | fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision) | ||
612 | if depth: | ||
613 | fetch_cmd += " --depth %s" % depth | ||
614 | |||
615 | if shallow_exclude: | ||
616 | fetch_cmd += shallow_exclude | ||
580 | 617 | ||
581 | # Map srcrev+depths to revisions | 618 | # Advertise the revision for lower version git such as 2.25.1: |
582 | parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest) | 619 | # error: Server does not allow request for unadvertised object. |
620 | # The ud.clonedir is a local temporary dir, will be removed when | ||
621 | # fetch is done, so we can do anything on it. | ||
622 | adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision) | ||
623 | if ud.shallow_skip_fast: | ||
624 | runfetchcmd(adv_cmd, d, workdir=ud.clonedir) | ||
583 | 625 | ||
584 | # Resolve specified revisions | 626 | runfetchcmd(fetch_cmd, d, workdir=dest) |
585 | parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest) | 627 | runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) |
586 | shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines() | 628 | # Fetch Git LFS data |
629 | self.lfs_fetch(ud, d, dest, ud.revision) | ||
587 | 630 | ||
588 | # Apply extra ref wildcards | 631 | # Apply extra ref wildcards |
589 | all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd, | 632 | all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \ |
590 | d, workdir=dest).splitlines() | 633 | d, workdir=dest).splitlines() |
634 | all_refs = [] | ||
635 | for line in all_refs_remote: | ||
636 | all_refs.append(line.split()[-1]) | ||
637 | extra_refs = [] | ||
591 | for r in ud.shallow_extra_refs: | 638 | for r in ud.shallow_extra_refs: |
592 | if not ud.bareclone: | 639 | if not ud.bareclone: |
593 | r = r.replace('refs/heads/', 'refs/remotes/origin/') | 640 | r = r.replace('refs/heads/', 'refs/remotes/origin/') |
594 | 641 | ||
595 | if '*' in r: | 642 | if '*' in r: |
596 | matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) | 643 | matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) |
597 | shallow_branches.extend(matches) | 644 | extra_refs.extend(matches) |
598 | else: | 645 | else: |
599 | shallow_branches.append(r) | 646 | extra_refs.append(r) |
600 | 647 | ||
601 | # Make the repository shallow | 648 | for ref in extra_refs: |
602 | shallow_cmd = [self.make_shallow_path, '-s'] | 649 | ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '') |
603 | for b in shallow_branches: | 650 | runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest) |
604 | shallow_cmd.append('-r') | 651 | revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest) |
605 | shallow_cmd.append(b) | 652 | runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) |
606 | shallow_cmd.extend(shallow_revisions) | 653 | |
607 | runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest) | 654 | # The url is local ud.clonedir, set it to upstream one |
655 | runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest) | ||
608 | 656 | ||
609 | def unpack(self, ud, destdir, d): | 657 | def unpack(self, ud, destdir, d): |
610 | """ unpack the downloaded src to destdir""" | 658 | """ unpack the downloaded src to destdir""" |
@@ -612,7 +660,7 @@ class Git(FetchMethod): | |||
612 | subdir = ud.parm.get("subdir") | 660 | subdir = ud.parm.get("subdir") |
613 | subpath = ud.parm.get("subpath") | 661 | subpath = ud.parm.get("subpath") |
614 | readpathspec = "" | 662 | readpathspec = "" |
615 | def_destsuffix = "git/" | 663 | def_destsuffix = (d.getVar("BB_GIT_DEFAULT_DESTSUFFIX") or "git") + "/" |
616 | 664 | ||
617 | if subpath: | 665 | if subpath: |
618 | readpathspec = ":%s" % subpath | 666 | readpathspec = ":%s" % subpath |
@@ -664,30 +712,43 @@ class Git(FetchMethod): | |||
664 | if not source_found: | 712 | if not source_found: |
665 | raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) | 713 | raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) |
666 | 714 | ||
715 | # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag | ||
716 | # matches the revision | ||
717 | if 'tag' in ud.parm and sha1_re.match(ud.revision): | ||
718 | output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir) | ||
719 | output = output.strip() | ||
720 | if output != ud.revision: | ||
721 | # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list | ||
722 | # If it resolves to the same thing there isn't a problem. | ||
723 | output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir) | ||
724 | output2 = output2.strip() | ||
725 | if output != output2: | ||
726 | raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url) | ||
727 | |||
667 | repourl = self._get_repo_url(ud) | 728 | repourl = self._get_repo_url(ud) |
668 | runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) | 729 | runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) |
669 | 730 | ||
670 | if self._contains_lfs(ud, d, destdir): | 731 | if self._contains_lfs(ud, d, destdir): |
671 | if need_lfs and not self._find_git_lfs(d): | 732 | if not need_lfs: |
672 | raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl)) | ||
673 | elif not need_lfs: | ||
674 | bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) | 733 | bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) |
675 | else: | 734 | else: |
735 | self._ensure_git_lfs(d, ud) | ||
736 | |||
676 | runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) | 737 | runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) |
677 | 738 | ||
678 | if not ud.nocheckout: | 739 | if not ud.nocheckout: |
679 | if subpath: | 740 | if subpath: |
680 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, | 741 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d, |
681 | workdir=destdir) | 742 | workdir=destdir) |
682 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) | 743 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) |
683 | elif not ud.nobranch: | 744 | elif not ud.nobranch: |
684 | branchname = ud.branches[ud.names[0]] | 745 | branchname = ud.branch |
685 | runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ | 746 | runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ |
686 | ud.revisions[ud.names[0]]), d, workdir=destdir) | 747 | ud.revision), d, workdir=destdir) |
687 | runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ | 748 | runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ |
688 | branchname), d, workdir=destdir) | 749 | branchname), d, workdir=destdir) |
689 | else: | 750 | else: |
690 | runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir) | 751 | runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir) |
691 | 752 | ||
692 | return True | 753 | return True |
693 | 754 | ||
@@ -701,8 +762,13 @@ class Git(FetchMethod): | |||
701 | clonedir = os.path.realpath(ud.localpath) | 762 | clonedir = os.path.realpath(ud.localpath) |
702 | to_remove.append(clonedir) | 763 | to_remove.append(clonedir) |
703 | 764 | ||
765 | # Remove shallow mirror tarball | ||
766 | if ud.shallow: | ||
767 | to_remove.append(ud.fullshallow) | ||
768 | to_remove.append(ud.fullshallow + ".done") | ||
769 | |||
704 | for r in to_remove: | 770 | for r in to_remove: |
705 | if os.path.exists(r): | 771 | if os.path.exists(r) or os.path.islink(r): |
706 | bb.note('Removing %s' % r) | 772 | bb.note('Removing %s' % r) |
707 | bb.utils.remove(r, True) | 773 | bb.utils.remove(r, True) |
708 | 774 | ||
@@ -713,10 +779,10 @@ class Git(FetchMethod): | |||
713 | cmd = "" | 779 | cmd = "" |
714 | if ud.nobranch: | 780 | if ud.nobranch: |
715 | cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( | 781 | cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( |
716 | ud.basecmd, ud.revisions[name]) | 782 | ud.basecmd, ud.revision) |
717 | else: | 783 | else: |
718 | cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( | 784 | cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( |
719 | ud.basecmd, ud.revisions[name], ud.branches[name]) | 785 | ud.basecmd, ud.revision, ud.branch) |
720 | try: | 786 | try: |
721 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) | 787 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) |
722 | except bb.fetch2.FetchError: | 788 | except bb.fetch2.FetchError: |
@@ -725,19 +791,21 @@ class Git(FetchMethod): | |||
725 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) | 791 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) |
726 | return output.split()[0] != "0" | 792 | return output.split()[0] != "0" |
727 | 793 | ||
728 | def _lfs_objects_downloaded(self, ud, d, name, wd): | 794 | def _lfs_objects_downloaded(self, ud, d, wd): |
729 | """ | 795 | """ |
730 | Verifies whether the LFS objects for requested revisions have already been downloaded | 796 | Verifies whether the LFS objects for requested revisions have already been downloaded |
731 | """ | 797 | """ |
732 | # Bail out early if this repository doesn't use LFS | 798 | # Bail out early if this repository doesn't use LFS |
733 | if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd): | 799 | if not self._contains_lfs(ud, d, wd): |
734 | return True | 800 | return True |
735 | 801 | ||
802 | self._ensure_git_lfs(d, ud) | ||
803 | |||
736 | # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file | 804 | # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file |
737 | # existence. | 805 | # existence. |
738 | # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git | 806 | # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git |
739 | cmd = "%s lfs ls-files -l %s" \ | 807 | cmd = "%s lfs ls-files -l %s" \ |
740 | % (ud.basecmd, ud.revisions[name]) | 808 | % (ud.basecmd, ud.revision) |
741 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() | 809 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() |
742 | # Do not do any further matching if no objects are managed by LFS | 810 | # Do not do any further matching if no objects are managed by LFS |
743 | if not output: | 811 | if not output: |
@@ -761,18 +829,8 @@ class Git(FetchMethod): | |||
761 | """ | 829 | """ |
762 | Check if the repository has 'lfs' (large file) content | 830 | Check if the repository has 'lfs' (large file) content |
763 | """ | 831 | """ |
764 | |||
765 | if ud.nobranch: | ||
766 | # If no branch is specified, use the current git commit | ||
767 | refname = self._build_revision(ud, d, ud.names[0]) | ||
768 | elif wd == ud.clonedir: | ||
769 | # The bare clonedir doesn't use the remote names; it has the branch immediately. | ||
770 | refname = ud.branches[ud.names[0]] | ||
771 | else: | ||
772 | refname = "origin/%s" % ud.branches[ud.names[0]] | ||
773 | |||
774 | cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( | 832 | cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( |
775 | ud.basecmd, refname) | 833 | ud.basecmd, ud.revision) |
776 | 834 | ||
777 | try: | 835 | try: |
778 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) | 836 | output = runfetchcmd(cmd, d, quiet=True, workdir=wd) |
@@ -782,12 +840,14 @@ class Git(FetchMethod): | |||
782 | pass | 840 | pass |
783 | return False | 841 | return False |
784 | 842 | ||
785 | def _find_git_lfs(self, d): | 843 | def _ensure_git_lfs(self, d, ud): |
786 | """ | 844 | """ |
787 | Return True if git-lfs can be found, False otherwise. | 845 | Ensures that git-lfs is available, raising a FetchError if it isn't. |
788 | """ | 846 | """ |
789 | import shutil | 847 | if shutil.which("git-lfs", path=d.getVar('PATH')) is None: |
790 | return shutil.which("git-lfs", path=d.getVar('PATH')) is not None | 848 | raise bb.fetch2.FetchError( |
849 | "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 " | ||
850 | "to ignore it)" % self._get_repo_url(ud)) | ||
791 | 851 | ||
792 | def _get_repo_url(self, ud): | 852 | def _get_repo_url(self, ud): |
793 | """ | 853 | """ |
@@ -795,21 +855,21 @@ class Git(FetchMethod): | |||
795 | """ | 855 | """ |
796 | # Note that we do not support passwords directly in the git urls. There are several | 856 | # Note that we do not support passwords directly in the git urls. There are several |
797 | # reasons. SRC_URI can be written out to things like buildhistory and people don't | 857 | # reasons. SRC_URI can be written out to things like buildhistory and people don't |
798 | # want to leak passwords like that. Its also all too easy to share metadata without | 858 | # want to leak passwords like that. Its also all too easy to share metadata without |
799 | # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as | 859 | # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as |
800 | # alternatives so we will not take patches adding password support here. | 860 | # alternatives so we will not take patches adding password support here. |
801 | if ud.user: | 861 | if ud.user: |
802 | username = ud.user + '@' | 862 | username = ud.user + '@' |
803 | else: | 863 | else: |
804 | username = "" | 864 | username = "" |
805 | return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) | 865 | return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path)) |
806 | 866 | ||
807 | def _revision_key(self, ud, d, name): | 867 | def _revision_key(self, ud, d, name): |
808 | """ | 868 | """ |
809 | Return a unique key for the url | 869 | Return a unique key for the url |
810 | """ | 870 | """ |
811 | # Collapse adjacent slashes | 871 | # Collapse adjacent slashes |
812 | return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] | 872 | return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev |
813 | 873 | ||
814 | def _lsremote(self, ud, d, search): | 874 | def _lsremote(self, ud, d, search): |
815 | """ | 875 | """ |
@@ -842,26 +902,26 @@ class Git(FetchMethod): | |||
842 | Compute the HEAD revision for the url | 902 | Compute the HEAD revision for the url |
843 | """ | 903 | """ |
844 | if not d.getVar("__BBSRCREV_SEEN"): | 904 | if not d.getVar("__BBSRCREV_SEEN"): |
845 | raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path)) | 905 | raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path)) |
846 | 906 | ||
847 | # Ensure we mark as not cached | 907 | # Ensure we mark as not cached |
848 | bb.fetch2.mark_recipe_nocache(d) | 908 | bb.fetch2.mark_recipe_nocache(d) |
849 | 909 | ||
850 | output = self._lsremote(ud, d, "") | 910 | output = self._lsremote(ud, d, "") |
851 | # Tags of the form ^{} may not work, need to fallback to other form | 911 | # Tags of the form ^{} may not work, need to fallback to other form |
852 | if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: | 912 | if ud.unresolvedrev[:5] == "refs/" or ud.usehead: |
853 | head = ud.unresolvedrev[name] | 913 | head = ud.unresolvedrev |
854 | tag = ud.unresolvedrev[name] | 914 | tag = ud.unresolvedrev |
855 | else: | 915 | else: |
856 | head = "refs/heads/%s" % ud.unresolvedrev[name] | 916 | head = "refs/heads/%s" % ud.unresolvedrev |
857 | tag = "refs/tags/%s" % ud.unresolvedrev[name] | 917 | tag = "refs/tags/%s" % ud.unresolvedrev |
858 | for s in [head, tag + "^{}", tag]: | 918 | for s in [head, tag + "^{}", tag]: |
859 | for l in output.strip().split('\n'): | 919 | for l in output.strip().split('\n'): |
860 | sha1, ref = l.split() | 920 | sha1, ref = l.split() |
861 | if s == ref: | 921 | if s == ref: |
862 | return sha1 | 922 | return sha1 |
863 | raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ | 923 | raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ |
864 | (ud.unresolvedrev[name], ud.host+ud.path)) | 924 | (ud.unresolvedrev, ud.host+ud.path)) |
865 | 925 | ||
866 | def latest_versionstring(self, ud, d): | 926 | def latest_versionstring(self, ud, d): |
867 | """ | 927 | """ |
@@ -912,23 +972,22 @@ class Git(FetchMethod): | |||
912 | return pupver | 972 | return pupver |
913 | 973 | ||
914 | def _build_revision(self, ud, d, name): | 974 | def _build_revision(self, ud, d, name): |
915 | return ud.revisions[name] | 975 | return ud.revision |
916 | 976 | ||
917 | def gitpkgv_revision(self, ud, d, name): | 977 | def gitpkgv_revision(self, ud, d, name): |
918 | """ | 978 | """ |
919 | Return a sortable revision number by counting commits in the history | 979 | Return a sortable revision number by counting commits in the history |
920 | Based on gitpkgv.bblass in meta-openembedded | 980 | Based on gitpkgv.bblass in meta-openembedded |
921 | """ | 981 | """ |
922 | rev = self._build_revision(ud, d, name) | 982 | rev = ud.revision |
923 | localpath = ud.localpath | 983 | localpath = ud.localpath |
924 | rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) | 984 | rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) |
925 | if not os.path.exists(localpath): | 985 | if not os.path.exists(localpath): |
926 | commits = None | 986 | commits = None |
927 | else: | 987 | else: |
928 | if not os.path.exists(rev_file) or not os.path.getsize(rev_file): | 988 | if not os.path.exists(rev_file) or not os.path.getsize(rev_file): |
929 | from pipes import quote | ||
930 | commits = bb.fetch2.runfetchcmd( | 989 | commits = bb.fetch2.runfetchcmd( |
931 | "git rev-list %s -- | wc -l" % quote(rev), | 990 | "git rev-list %s -- | wc -l" % shlex.quote(rev), |
932 | d, quiet=True).strip().lstrip('0') | 991 | d, quiet=True).strip().lstrip('0') |
933 | if commits: | 992 | if commits: |
934 | open(rev_file, "w").write("%d\n" % int(commits)) | 993 | open(rev_file, "w").write("%d\n" % int(commits)) |
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index f7f3af7212..5869e1b99b 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -62,36 +62,35 @@ class GitSM(Git): | |||
62 | return modules | 62 | return modules |
63 | 63 | ||
64 | # Collect the defined submodules, and their attributes | 64 | # Collect the defined submodules, and their attributes |
65 | for name in ud.names: | 65 | try: |
66 | gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=workdir) | ||
67 | except: | ||
68 | # No submodules to update | ||
69 | gitmodules = "" | ||
70 | |||
71 | for m, md in parse_gitmodules(gitmodules).items(): | ||
66 | try: | 72 | try: |
67 | gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) | 73 | module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revision, md['path']), d, quiet=True, workdir=workdir) |
68 | except: | 74 | except: |
69 | # No submodules to update | 75 | # If the command fails, we don't have a valid file to check. If it doesn't |
76 | # fail -- it still might be a failure, see next check... | ||
77 | module_hash = "" | ||
78 | |||
79 | if not module_hash: | ||
80 | logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) | ||
70 | continue | 81 | continue |
71 | 82 | ||
72 | for m, md in parse_gitmodules(gitmodules).items(): | 83 | submodules.append(m) |
73 | try: | 84 | paths[m] = md['path'] |
74 | module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) | 85 | revision[m] = ud.revision |
75 | except: | 86 | uris[m] = md['url'] |
76 | # If the command fails, we don't have a valid file to check. If it doesn't | 87 | subrevision[m] = module_hash.split()[2] |
77 | # fail -- it still might be a failure, see next check... | 88 | |
78 | module_hash = "" | 89 | # Convert relative to absolute uri based on parent uri |
79 | 90 | if uris[m].startswith('..') or uris[m].startswith('./'): | |
80 | if not module_hash: | 91 | newud = copy.copy(ud) |
81 | logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) | 92 | newud.path = os.path.normpath(os.path.join(newud.path, uris[m])) |
82 | continue | 93 | uris[m] = Git._get_repo_url(self, newud) |
83 | |||
84 | submodules.append(m) | ||
85 | paths[m] = md['path'] | ||
86 | revision[m] = ud.revisions[name] | ||
87 | uris[m] = md['url'] | ||
88 | subrevision[m] = module_hash.split()[2] | ||
89 | |||
90 | # Convert relative to absolute uri based on parent uri | ||
91 | if uris[m].startswith('..') or uris[m].startswith('./'): | ||
92 | newud = copy.copy(ud) | ||
93 | newud.path = os.path.normpath(os.path.join(newud.path, uris[m])) | ||
94 | uris[m] = Git._get_repo_url(self, newud) | ||
95 | 94 | ||
96 | for module in submodules: | 95 | for module in submodules: |
97 | # Translate the module url into a SRC_URI | 96 | # Translate the module url into a SRC_URI |
@@ -123,7 +122,7 @@ class GitSM(Git): | |||
123 | url += ";name=%s" % module | 122 | url += ";name=%s" % module |
124 | url += ";subpath=%s" % module | 123 | url += ";subpath=%s" % module |
125 | url += ";nobranch=1" | 124 | url += ";nobranch=1" |
126 | url += ";lfs=%s" % self._need_lfs(ud) | 125 | url += ";lfs=%s" % ("1" if self._need_lfs(ud) else "0") |
127 | # Note that adding "user=" here to give credentials to the | 126 | # Note that adding "user=" here to give credentials to the |
128 | # submodule is not supported. Since using SRC_URI to give git:// | 127 | # submodule is not supported. Since using SRC_URI to give git:// |
129 | # URL a password is not supported, one have to use one of the | 128 | # URL a password is not supported, one have to use one of the |
@@ -147,6 +146,22 @@ class GitSM(Git): | |||
147 | 146 | ||
148 | return submodules != [] | 147 | return submodules != [] |
149 | 148 | ||
149 | def call_process_submodules(self, ud, d, extra_check, subfunc): | ||
150 | # If we're using a shallow mirror tarball it needs to be | ||
151 | # unpacked temporarily so that we can examine the .gitmodules file | ||
152 | # Unpack even when ud.clonedir is not available, | ||
153 | # which may occur during a fast shallow clone | ||
154 | unpack = extra_check or not os.path.exists(ud.clonedir) | ||
155 | if ud.shallow and os.path.exists(ud.fullshallow) and unpack: | ||
156 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
157 | try: | ||
158 | runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir) | ||
159 | self.process_submodules(ud, tmpdir, subfunc, d) | ||
160 | finally: | ||
161 | shutil.rmtree(tmpdir) | ||
162 | else: | ||
163 | self.process_submodules(ud, ud.clonedir, subfunc, d) | ||
164 | |||
150 | def need_update(self, ud, d): | 165 | def need_update(self, ud, d): |
151 | if Git.need_update(self, ud, d): | 166 | if Git.need_update(self, ud, d): |
152 | return True | 167 | return True |
@@ -164,15 +179,7 @@ class GitSM(Git): | |||
164 | logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) | 179 | logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) |
165 | need_update_result = True | 180 | need_update_result = True |
166 | 181 | ||
167 | # If we're using a shallow mirror tarball it needs to be unpacked | 182 | self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule) |
168 | # temporarily so that we can examine the .gitmodules file | ||
169 | if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir): | ||
170 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
171 | runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir) | ||
172 | self.process_submodules(ud, tmpdir, need_update_submodule, d) | ||
173 | shutil.rmtree(tmpdir) | ||
174 | else: | ||
175 | self.process_submodules(ud, ud.clonedir, need_update_submodule, d) | ||
176 | 183 | ||
177 | if need_update_list: | 184 | if need_update_list: |
178 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) | 185 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) |
@@ -195,16 +202,7 @@ class GitSM(Git): | |||
195 | raise | 202 | raise |
196 | 203 | ||
197 | Git.download(self, ud, d) | 204 | Git.download(self, ud, d) |
198 | 205 | self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule) | |
199 | # If we're using a shallow mirror tarball it needs to be unpacked | ||
200 | # temporarily so that we can examine the .gitmodules file | ||
201 | if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): | ||
202 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
203 | runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir) | ||
204 | self.process_submodules(ud, tmpdir, download_submodule, d) | ||
205 | shutil.rmtree(tmpdir) | ||
206 | else: | ||
207 | self.process_submodules(ud, ud.clonedir, download_submodule, d) | ||
208 | 206 | ||
209 | def unpack(self, ud, destdir, d): | 207 | def unpack(self, ud, destdir, d): |
210 | def unpack_submodules(ud, url, module, modpath, workdir, d): | 208 | def unpack_submodules(ud, url, module, modpath, workdir, d): |
@@ -247,15 +245,27 @@ class GitSM(Git): | |||
247 | ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) | 245 | ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) |
248 | 246 | ||
249 | if not ud.bareclone and ret: | 247 | if not ud.bareclone and ret: |
250 | # All submodules should already be downloaded and configured in the tree. This simply | 248 | cmdprefix = "" |
251 | # sets up the configuration and checks out the files. The main project config should | 249 | # Avoid LFS smudging (replacing the LFS pointers with the actual content) when LFS shouldn't be used but git-lfs is installed. |
252 | # remain unmodified, and no download from the internet should occur. As such, lfs smudge | 250 | if not self._need_lfs(ud): |
253 | # should also be skipped as these files were already smudged in the fetch stage if lfs | 251 | cmdprefix = "GIT_LFS_SKIP_SMUDGE=1 " |
254 | # was enabled. | 252 | runfetchcmd("%s%s submodule update --recursive --no-fetch" % (cmdprefix, ud.basecmd), d, quiet=True, workdir=ud.destdir) |
255 | runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) | 253 | def clean(self, ud, d): |
254 | def clean_submodule(ud, url, module, modpath, workdir, d): | ||
255 | url += ";bareclone=1;nobranch=1" | ||
256 | try: | ||
257 | newfetch = Fetch([url], d, cache=False) | ||
258 | newfetch.clean() | ||
259 | except Exception as e: | ||
260 | logger.warning('gitsm: submodule clean failed: %s %s' % (type(e).__name__, str(e))) | ||
261 | |||
262 | self.call_process_submodules(ud, d, True, clean_submodule) | ||
263 | |||
264 | # Clean top git dir | ||
265 | Git.clean(self, ud, d) | ||
256 | 266 | ||
257 | def implicit_urldata(self, ud, d): | 267 | def implicit_urldata(self, ud, d): |
258 | import shutil, subprocess, tempfile | 268 | import subprocess |
259 | 269 | ||
260 | urldata = [] | 270 | urldata = [] |
261 | def add_submodule(ud, url, module, modpath, workdir, d): | 271 | def add_submodule(ud, url, module, modpath, workdir, d): |
@@ -263,14 +273,6 @@ class GitSM(Git): | |||
263 | newfetch = Fetch([url], d, cache=False) | 273 | newfetch = Fetch([url], d, cache=False) |
264 | urldata.extend(newfetch.expanded_urldata()) | 274 | urldata.extend(newfetch.expanded_urldata()) |
265 | 275 | ||
266 | # If we're using a shallow mirror tarball it needs to be unpacked | 276 | self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule) |
267 | # temporarily so that we can examine the .gitmodules file | ||
268 | if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d): | ||
269 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
270 | subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True) | ||
271 | self.process_submodules(ud, tmpdir, add_submodule, d) | ||
272 | shutil.rmtree(tmpdir) | ||
273 | else: | ||
274 | self.process_submodules(ud, ud.clonedir, add_submodule, d) | ||
275 | 277 | ||
276 | return urldata | 278 | return urldata |
diff --git a/bitbake/lib/bb/fetch2/gomod.py b/bitbake/lib/bb/fetch2/gomod.py new file mode 100644 index 0000000000..53c1d8d115 --- /dev/null +++ b/bitbake/lib/bb/fetch2/gomod.py | |||
@@ -0,0 +1,273 @@ | |||
1 | """ | ||
2 | BitBake 'Fetch' implementation for Go modules | ||
3 | |||
4 | The gomod/gomodgit fetchers are used to download Go modules to the module cache | ||
5 | from a module proxy or directly from a version control repository. | ||
6 | |||
7 | Example SRC_URI: | ||
8 | |||
9 | SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..." | ||
10 | SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..." | ||
11 | |||
12 | Required SRC_URI parameters: | ||
13 | |||
14 | - version | ||
15 | The version of the module. | ||
16 | |||
17 | Optional SRC_URI parameters: | ||
18 | |||
19 | - mod | ||
20 | Fetch and unpack the go.mod file only instead of the complete module. | ||
21 | The go command may need to download go.mod files for many different modules | ||
22 | when computing the build list, and go.mod files are much smaller than | ||
23 | module zip files. | ||
24 | The default is "0", set mod=1 for the go.mod file only. | ||
25 | |||
26 | - sha256sum | ||
27 | The checksum of the module zip file, or the go.mod file in case of fetching | ||
28 | only the go.mod file. Alternatively, set the SRC_URI varible flag for | ||
29 | "module@version.sha256sum". | ||
30 | |||
31 | - protocol | ||
32 | The method used when fetching directly from a version control repository. | ||
33 | The default is "https" for git. | ||
34 | |||
35 | - repo | ||
36 | The URL when fetching directly from a version control repository. Required | ||
37 | when the URL is different from the module path. | ||
38 | |||
39 | - srcrev | ||
40 | The revision identifier used when fetching directly from a version control | ||
41 | repository. Alternatively, set the SRCREV varible for "module@version". | ||
42 | |||
43 | - subdir | ||
44 | The module subdirectory when fetching directly from a version control | ||
45 | repository. Required when the module is not located in the root of the | ||
46 | repository. | ||
47 | |||
48 | Related variables: | ||
49 | |||
50 | - GO_MOD_PROXY | ||
51 | The module proxy used by the fetcher. | ||
52 | |||
53 | - GO_MOD_CACHE_DIR | ||
54 | The directory where the module cache is located. | ||
55 | This must match the exported GOMODCACHE variable for the go command to find | ||
56 | the downloaded modules. | ||
57 | |||
58 | See the Go modules reference, https://go.dev/ref/mod, for more information | ||
59 | about the module cache, module proxies and version control systems. | ||
60 | """ | ||
61 | |||
62 | import hashlib | ||
63 | import os | ||
64 | import re | ||
65 | import shutil | ||
66 | import subprocess | ||
67 | import zipfile | ||
68 | |||
69 | import bb | ||
70 | from bb.fetch2 import FetchError | ||
71 | from bb.fetch2 import MissingParameterError | ||
72 | from bb.fetch2 import runfetchcmd | ||
73 | from bb.fetch2 import subprocess_setup | ||
74 | from bb.fetch2.git import Git | ||
75 | from bb.fetch2.wget import Wget | ||
76 | |||
77 | |||
78 | def escape(path): | ||
79 | """Escape capital letters using exclamation points.""" | ||
80 | return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path) | ||
81 | |||
82 | |||
83 | class GoMod(Wget): | ||
84 | """Class to fetch Go modules from a Go module proxy via wget""" | ||
85 | |||
86 | def supports(self, ud, d): | ||
87 | """Check to see if a given URL is for this fetcher.""" | ||
88 | return ud.type == 'gomod' | ||
89 | |||
90 | def urldata_init(self, ud, d): | ||
91 | """Set up to download the module from the module proxy. | ||
92 | |||
93 | Set up to download the module zip file to the module cache directory | ||
94 | and unpack the go.mod file (unless downloading only the go.mod file): | ||
95 | |||
96 | cache/download/<module>/@v/<version>.zip: The module zip file. | ||
97 | cache/download/<module>/@v/<version>.mod: The go.mod file. | ||
98 | """ | ||
99 | |||
100 | proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org' | ||
101 | moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod' | ||
102 | |||
103 | if 'version' not in ud.parm: | ||
104 | raise MissingParameterError('version', ud.url) | ||
105 | |||
106 | module = ud.host | ||
107 | if ud.path != '/': | ||
108 | module += ud.path | ||
109 | ud.parm['module'] = module | ||
110 | version = ud.parm['version'] | ||
111 | |||
112 | # Set URL and filename for wget download | ||
113 | if ud.parm.get('mod', '0') == '1': | ||
114 | ext = '.mod' | ||
115 | else: | ||
116 | ext = '.zip' | ||
117 | path = escape(f"{module}/@v/{version}{ext}") | ||
118 | ud.url = bb.fetch2.encodeurl( | ||
119 | ('https', proxy, '/' + path, None, None, None)) | ||
120 | ud.parm['downloadfilename'] = f"{module.replace('/', '.')}@{version}{ext}" | ||
121 | |||
122 | # Set name for checksum verification | ||
123 | ud.parm['name'] = f"{module}@{version}" | ||
124 | |||
125 | # Set path for unpack | ||
126 | ud.parm['unpackpath'] = os.path.join(moddir, 'cache/download', path) | ||
127 | |||
128 | super().urldata_init(ud, d) | ||
129 | |||
130 | def unpack(self, ud, rootdir, d): | ||
131 | """Unpack the module in the module cache.""" | ||
132 | |||
133 | # Unpack the module zip file or go.mod file | ||
134 | unpackpath = os.path.join(rootdir, ud.parm['unpackpath']) | ||
135 | unpackdir = os.path.dirname(unpackpath) | ||
136 | bb.utils.mkdirhier(unpackdir) | ||
137 | ud.unpack_tracer.unpack("file-copy", unpackdir) | ||
138 | cmd = f"cp {ud.localpath} {unpackpath}" | ||
139 | path = d.getVar('PATH') | ||
140 | if path: | ||
141 | cmd = f"PATH={path} {cmd}" | ||
142 | name = os.path.basename(unpackpath) | ||
143 | bb.note(f"Unpacking {name} to {unpackdir}/") | ||
144 | subprocess.check_call(cmd, shell=True, preexec_fn=subprocess_setup) | ||
145 | |||
146 | if name.endswith('.zip'): | ||
147 | # Unpack the go.mod file from the zip file | ||
148 | module = ud.parm['module'] | ||
149 | name = name.rsplit('.', 1)[0] + '.mod' | ||
150 | bb.note(f"Unpacking {name} to {unpackdir}/") | ||
151 | with zipfile.ZipFile(ud.localpath) as zf: | ||
152 | with open(os.path.join(unpackdir, name), mode='wb') as mf: | ||
153 | try: | ||
154 | f = module + '@' + ud.parm['version'] + '/go.mod' | ||
155 | shutil.copyfileobj(zf.open(f), mf) | ||
156 | except KeyError: | ||
157 | # If the module does not have a go.mod file, synthesize | ||
158 | # one containing only a module statement. | ||
159 | mf.write(f'module {module}\n'.encode()) | ||
160 | |||
161 | |||
162 | class GoModGit(Git): | ||
163 | """Class to fetch Go modules directly from a git repository""" | ||
164 | |||
165 | def supports(self, ud, d): | ||
166 | """Check to see if a given URL is for this fetcher.""" | ||
167 | return ud.type == 'gomodgit' | ||
168 | |||
169 | def urldata_init(self, ud, d): | ||
170 | """Set up to download the module from the git repository. | ||
171 | |||
172 | Set up to download the git repository to the module cache directory and | ||
173 | unpack the module zip file and the go.mod file: | ||
174 | |||
175 | cache/vcs/<hash>: The bare git repository. | ||
176 | cache/download/<module>/@v/<version>.zip: The module zip file. | ||
177 | cache/download/<module>/@v/<version>.mod: The go.mod file. | ||
178 | """ | ||
179 | |||
180 | moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod' | ||
181 | |||
182 | if 'version' not in ud.parm: | ||
183 | raise MissingParameterError('version', ud.url) | ||
184 | |||
185 | module = ud.host | ||
186 | if ud.path != '/': | ||
187 | module += ud.path | ||
188 | ud.parm['module'] = module | ||
189 | |||
190 | # Set host, path and srcrev for git download | ||
191 | if 'repo' in ud.parm: | ||
192 | repo = ud.parm['repo'] | ||
193 | idx = repo.find('/') | ||
194 | if idx != -1: | ||
195 | ud.host = repo[:idx] | ||
196 | ud.path = repo[idx:] | ||
197 | else: | ||
198 | ud.host = repo | ||
199 | ud.path = '' | ||
200 | if 'protocol' not in ud.parm: | ||
201 | ud.parm['protocol'] = 'https' | ||
202 | ud.name = f"{module}@{ud.parm['version']}" | ||
203 | srcrev = d.getVar('SRCREV_' + ud.name) | ||
204 | if srcrev: | ||
205 | if 'srcrev' not in ud.parm: | ||
206 | ud.parm['srcrev'] = srcrev | ||
207 | else: | ||
208 | if 'srcrev' in ud.parm: | ||
209 | d.setVar('SRCREV_' + ud.name, ud.parm['srcrev']) | ||
210 | if 'branch' not in ud.parm: | ||
211 | ud.parm['nobranch'] = '1' | ||
212 | |||
213 | # Set subpath, subdir and bareclone for git unpack | ||
214 | if 'subdir' in ud.parm: | ||
215 | ud.parm['subpath'] = ud.parm['subdir'] | ||
216 | key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode() | ||
217 | ud.parm['key'] = key | ||
218 | ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs', | ||
219 | hashlib.sha256(key).hexdigest()) | ||
220 | ud.parm['bareclone'] = '1' | ||
221 | |||
222 | super().urldata_init(ud, d) | ||
223 | |||
224 | def unpack(self, ud, rootdir, d): | ||
225 | """Unpack the module in the module cache.""" | ||
226 | |||
227 | # Unpack the bare git repository | ||
228 | super().unpack(ud, rootdir, d) | ||
229 | |||
230 | moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod' | ||
231 | |||
232 | # Create the info file | ||
233 | module = ud.parm['module'] | ||
234 | repodir = os.path.join(rootdir, ud.parm['subdir']) | ||
235 | with open(repodir + '.info', 'wb') as f: | ||
236 | f.write(ud.parm['key']) | ||
237 | |||
238 | # Unpack the go.mod file from the repository | ||
239 | unpackdir = os.path.join(rootdir, moddir, 'cache/download', | ||
240 | escape(module), '@v') | ||
241 | bb.utils.mkdirhier(unpackdir) | ||
242 | srcrev = ud.parm['srcrev'] | ||
243 | version = ud.parm['version'] | ||
244 | escaped_version = escape(version) | ||
245 | cmd = f"git ls-tree -r --name-only '{srcrev}'" | ||
246 | if 'subpath' in ud.parm: | ||
247 | cmd += f" '{ud.parm['subpath']}'" | ||
248 | files = runfetchcmd(cmd, d, workdir=repodir).split() | ||
249 | name = escaped_version + '.mod' | ||
250 | bb.note(f"Unpacking {name} to {unpackdir}/") | ||
251 | with open(os.path.join(unpackdir, name), mode='wb') as mf: | ||
252 | f = 'go.mod' | ||
253 | if 'subpath' in ud.parm: | ||
254 | f = os.path.join(ud.parm['subpath'], f) | ||
255 | if f in files: | ||
256 | cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f] | ||
257 | subprocess.check_call(cmd, stdout=mf, cwd=repodir, | ||
258 | preexec_fn=subprocess_setup) | ||
259 | else: | ||
260 | # If the module does not have a go.mod file, synthesize one | ||
261 | # containing only a module statement. | ||
262 | mf.write(f'module {module}\n'.encode()) | ||
263 | |||
264 | # Synthesize the module zip file from the repository | ||
265 | name = escaped_version + '.zip' | ||
266 | bb.note(f"Unpacking {name} to {unpackdir}/") | ||
267 | with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf: | ||
268 | prefix = module + '@' + version + '/' | ||
269 | for f in files: | ||
270 | cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f] | ||
271 | data = subprocess.check_output(cmd, cwd=repodir, | ||
272 | preexec_fn=subprocess_setup) | ||
273 | zf.writestr(prefix + f, data) | ||
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 7d7668110e..fda56a564e 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
@@ -29,11 +29,10 @@ class Local(FetchMethod): | |||
29 | 29 | ||
30 | def urldata_init(self, ud, d): | 30 | def urldata_init(self, ud, d): |
31 | # We don't set localfile as for this fetcher the file is already local! | 31 | # We don't set localfile as for this fetcher the file is already local! |
32 | ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) | 32 | ud.basename = os.path.basename(ud.path) |
33 | ud.basename = os.path.basename(ud.decodedurl) | 33 | ud.basepath = ud.path |
34 | ud.basepath = ud.decodedurl | ||
35 | ud.needdonestamp = False | 34 | ud.needdonestamp = False |
36 | if "*" in ud.decodedurl: | 35 | if "*" in ud.path: |
37 | raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url) | 36 | raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url) |
38 | return | 37 | return |
39 | 38 | ||
@@ -48,7 +47,7 @@ class Local(FetchMethod): | |||
48 | Return the local filename of a given url assuming a successful fetch. | 47 | Return the local filename of a given url assuming a successful fetch. |
49 | """ | 48 | """ |
50 | searched = [] | 49 | searched = [] |
51 | path = urldata.decodedurl | 50 | path = urldata.path |
52 | newpath = path | 51 | newpath = path |
53 | if path[0] == "/": | 52 | if path[0] == "/": |
54 | logger.debug2("Using absolute %s" % (path)) | 53 | logger.debug2("Using absolute %s" % (path)) |
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py index 15f3f19bc8..e469d66768 100644 --- a/bitbake/lib/bb/fetch2/npm.py +++ b/bitbake/lib/bb/fetch2/npm.py | |||
@@ -42,11 +42,12 @@ from bb.utils import is_semver | |||
42 | 42 | ||
43 | def npm_package(package): | 43 | def npm_package(package): |
44 | """Convert the npm package name to remove unsupported character""" | 44 | """Convert the npm package name to remove unsupported character""" |
45 | # Scoped package names (with the @) use the same naming convention | 45 | # For scoped package names ('@user/package') the '/' is replaced by a '-'. |
46 | # as the 'npm pack' command. | 46 | # This is similar to what 'npm pack' does, but 'npm pack' also strips the |
47 | # leading '@', which can lead to ambiguous package names. | ||
47 | name = re.sub("/", "-", package) | 48 | name = re.sub("/", "-", package) |
48 | name = name.lower() | 49 | name = name.lower() |
49 | name = re.sub(r"[^\-a-z0-9]", "", name) | 50 | name = re.sub(r"[^\-a-z0-9@]", "", name) |
50 | name = name.strip("-") | 51 | name = name.strip("-") |
51 | return name | 52 | return name |
52 | 53 | ||
@@ -90,6 +91,12 @@ class NpmEnvironment(object): | |||
90 | self.d = d | 91 | self.d = d |
91 | 92 | ||
92 | self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1) | 93 | self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1) |
94 | |||
95 | hn = self._home_npmrc(d) | ||
96 | if hn is not None: | ||
97 | with open(hn, 'r') as hnf: | ||
98 | self.user_config.write(hnf.read()) | ||
99 | |||
93 | for key, value in configs: | 100 | for key, value in configs: |
94 | self.user_config.write("%s=%s\n" % (key, value)) | 101 | self.user_config.write("%s=%s\n" % (key, value)) |
95 | 102 | ||
@@ -102,6 +109,15 @@ class NpmEnvironment(object): | |||
102 | if self.user_config: | 109 | if self.user_config: |
103 | self.user_config.close() | 110 | self.user_config.close() |
104 | 111 | ||
112 | def _home_npmrc(self, d): | ||
113 | """Function to return user's HOME .npmrc file (or None if it doesn't exist)""" | ||
114 | home_npmrc_file = os.path.join(os.environ.get("HOME"), ".npmrc") | ||
115 | if d.getVar("BB_USE_HOME_NPMRC") == "1" and os.path.exists(home_npmrc_file): | ||
116 | bb.warn(f"BB_USE_HOME_NPMRC flag set and valid .npmrc detected - "\ | ||
117 | f"npm fetcher will use {home_npmrc_file}") | ||
118 | return home_npmrc_file | ||
119 | return None | ||
120 | |||
105 | def run(self, cmd, args=None, configs=None, workdir=None): | 121 | def run(self, cmd, args=None, configs=None, workdir=None): |
106 | """Run npm command in a controlled environment""" | 122 | """Run npm command in a controlled environment""" |
107 | with tempfile.TemporaryDirectory() as tmpdir: | 123 | with tempfile.TemporaryDirectory() as tmpdir: |
@@ -165,7 +181,7 @@ class Npm(FetchMethod): | |||
165 | # Using the 'downloadfilename' parameter as local filename | 181 | # Using the 'downloadfilename' parameter as local filename |
166 | # or the npm package name. | 182 | # or the npm package name. |
167 | if "downloadfilename" in ud.parm: | 183 | if "downloadfilename" in ud.parm: |
168 | ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"])) | 184 | ud.localfile = npm_localfile(ud.parm["downloadfilename"]) |
169 | else: | 185 | else: |
170 | ud.localfile = npm_localfile(ud.package, ud.version) | 186 | ud.localfile = npm_localfile(ud.package, ud.version) |
171 | 187 | ||
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py index ff5f8dc755..2f9599ee9e 100644 --- a/bitbake/lib/bb/fetch2/npmsw.py +++ b/bitbake/lib/bb/fetch2/npmsw.py | |||
@@ -37,38 +37,26 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False): | |||
37 | """ | 37 | """ |
38 | Run a callback for each dependencies of a shrinkwrap file. | 38 | Run a callback for each dependencies of a shrinkwrap file. |
39 | The callback is using the format: | 39 | The callback is using the format: |
40 | callback(name, params, deptree) | 40 | callback(name, data, location) |
41 | with: | 41 | with: |
42 | name = the package name (string) | 42 | name = the package name (string) |
43 | params = the package parameters (dictionary) | 43 | data = the package data (dictionary) |
44 | destdir = the destination of the package (string) | 44 | location = the location of the package (string) |
45 | """ | 45 | """ |
46 | # For handling old style dependencies entries in shinkwrap files | 46 | packages = shrinkwrap.get("packages") |
47 | def _walk_deps(deps, deptree): | 47 | if not packages: |
48 | for name in deps: | 48 | raise FetchError("Invalid shrinkwrap file format") |
49 | subtree = [*deptree, name] | 49 | |
50 | _walk_deps(deps[name].get("dependencies", {}), subtree) | 50 | for location, data in packages.items(): |
51 | if callback is not None: | 51 | # Skip empty main and local link target packages |
52 | if deps[name].get("dev", False) and not dev: | 52 | if not location.startswith('node_modules/'): |
53 | continue | 53 | continue |
54 | elif deps[name].get("bundled", False): | 54 | elif not dev and data.get("dev", False): |
55 | continue | 55 | continue |
56 | destsubdirs = [os.path.join("node_modules", dep) for dep in subtree] | 56 | elif data.get("inBundle", False): |
57 | destsuffix = os.path.join(*destsubdirs) | 57 | continue |
58 | callback(name, deps[name], destsuffix) | 58 | name = location.split('node_modules/')[-1] |
59 | 59 | callback(name, data, location) | |
60 | # packages entry means new style shrinkwrap file, else use dependencies | ||
61 | packages = shrinkwrap.get("packages", None) | ||
62 | if packages is not None: | ||
63 | for package in packages: | ||
64 | if package != "": | ||
65 | name = package.split('node_modules/')[-1] | ||
66 | package_infos = packages.get(package, {}) | ||
67 | if dev == False and package_infos.get("dev", False): | ||
68 | continue | ||
69 | callback(name, package_infos, package) | ||
70 | else: | ||
71 | _walk_deps(shrinkwrap.get("dependencies", {}), []) | ||
72 | 60 | ||
73 | class NpmShrinkWrap(FetchMethod): | 61 | class NpmShrinkWrap(FetchMethod): |
74 | """Class to fetch all package from a shrinkwrap file""" | 62 | """Class to fetch all package from a shrinkwrap file""" |
@@ -95,12 +83,18 @@ class NpmShrinkWrap(FetchMethod): | |||
95 | extrapaths = [] | 83 | extrapaths = [] |
96 | unpack = True | 84 | unpack = True |
97 | 85 | ||
98 | integrity = params.get("integrity", None) | 86 | integrity = params.get("integrity") |
99 | resolved = params.get("resolved", None) | 87 | resolved = params.get("resolved") |
100 | version = params.get("version", None) | 88 | version = params.get("version") |
89 | link = params.get("link", False) | ||
90 | |||
91 | # Handle link sources | ||
92 | if link: | ||
93 | localpath = resolved | ||
94 | unpack = False | ||
101 | 95 | ||
102 | # Handle registry sources | 96 | # Handle registry sources |
103 | if is_semver(version) and integrity: | 97 | elif version and is_semver(version) and integrity: |
104 | # Handle duplicate dependencies without url | 98 | # Handle duplicate dependencies without url |
105 | if not resolved: | 99 | if not resolved: |
106 | return | 100 | return |
@@ -128,10 +122,10 @@ class NpmShrinkWrap(FetchMethod): | |||
128 | extrapaths.append(resolvefile) | 122 | extrapaths.append(resolvefile) |
129 | 123 | ||
130 | # Handle http tarball sources | 124 | # Handle http tarball sources |
131 | elif version.startswith("http") and integrity: | 125 | elif resolved.startswith("http") and integrity: |
132 | localfile = npm_localfile(os.path.basename(version)) | 126 | localfile = npm_localfile(os.path.basename(resolved)) |
133 | 127 | ||
134 | uri = URI(version) | 128 | uri = URI(resolved) |
135 | uri.params["downloadfilename"] = localfile | 129 | uri.params["downloadfilename"] = localfile |
136 | 130 | ||
137 | checksum_name, checksum_expected = npm_integrity(integrity) | 131 | checksum_name, checksum_expected = npm_integrity(integrity) |
@@ -141,28 +135,12 @@ class NpmShrinkWrap(FetchMethod): | |||
141 | 135 | ||
142 | localpath = os.path.join(d.getVar("DL_DIR"), localfile) | 136 | localpath = os.path.join(d.getVar("DL_DIR"), localfile) |
143 | 137 | ||
144 | # Handle local tarball and link sources | 138 | # Handle local tarball sources |
145 | elif version.startswith("file"): | 139 | elif resolved.startswith("file"): |
146 | localpath = version[5:] | 140 | localpath = resolved[5:] |
147 | if not version.endswith(".tgz"): | ||
148 | unpack = False | ||
149 | 141 | ||
150 | # Handle git sources | 142 | # Handle git sources |
151 | elif version.startswith(("git", "bitbucket","gist")) or ( | 143 | elif resolved.startswith("git"): |
152 | not version.endswith((".tgz", ".tar", ".tar.gz")) | ||
153 | and not version.startswith((".", "@", "/")) | ||
154 | and "/" in version | ||
155 | ): | ||
156 | if version.startswith("github:"): | ||
157 | version = "git+https://github.com/" + version[len("github:"):] | ||
158 | elif version.startswith("gist:"): | ||
159 | version = "git+https://gist.github.com/" + version[len("gist:"):] | ||
160 | elif version.startswith("bitbucket:"): | ||
161 | version = "git+https://bitbucket.org/" + version[len("bitbucket:"):] | ||
162 | elif version.startswith("gitlab:"): | ||
163 | version = "git+https://gitlab.com/" + version[len("gitlab:"):] | ||
164 | elif not version.startswith(("git+","git:")): | ||
165 | version = "git+https://github.com/" + version | ||
166 | regex = re.compile(r""" | 144 | regex = re.compile(r""" |
167 | ^ | 145 | ^ |
168 | git\+ | 146 | git\+ |
@@ -174,16 +152,16 @@ class NpmShrinkWrap(FetchMethod): | |||
174 | $ | 152 | $ |
175 | """, re.VERBOSE) | 153 | """, re.VERBOSE) |
176 | 154 | ||
177 | match = regex.match(version) | 155 | match = regex.match(resolved) |
178 | |||
179 | if not match: | 156 | if not match: |
180 | raise ParameterError("Invalid git url: %s" % version, ud.url) | 157 | raise ParameterError("Invalid git url: %s" % resolved, ud.url) |
181 | 158 | ||
182 | groups = match.groupdict() | 159 | groups = match.groupdict() |
183 | 160 | ||
184 | uri = URI("git://" + str(groups["url"])) | 161 | uri = URI("git://" + str(groups["url"])) |
185 | uri.params["protocol"] = str(groups["protocol"]) | 162 | uri.params["protocol"] = str(groups["protocol"]) |
186 | uri.params["rev"] = str(groups["rev"]) | 163 | uri.params["rev"] = str(groups["rev"]) |
164 | uri.params["nobranch"] = "1" | ||
187 | uri.params["destsuffix"] = destsuffix | 165 | uri.params["destsuffix"] = destsuffix |
188 | 166 | ||
189 | url = str(uri) | 167 | url = str(uri) |
@@ -268,7 +246,7 @@ class NpmShrinkWrap(FetchMethod): | |||
268 | 246 | ||
269 | def unpack(self, ud, rootdir, d): | 247 | def unpack(self, ud, rootdir, d): |
270 | """Unpack the downloaded dependencies""" | 248 | """Unpack the downloaded dependencies""" |
271 | destdir = d.getVar("S") | 249 | destdir = rootdir |
272 | destsuffix = ud.parm.get("destsuffix") | 250 | destsuffix = ud.parm.get("destsuffix") |
273 | if destsuffix: | 251 | if destsuffix: |
274 | destdir = os.path.join(rootdir, destsuffix) | 252 | destdir = os.path.join(rootdir, destsuffix) |
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py index 6b8ffd5359..22c0538139 100644 --- a/bitbake/lib/bb/fetch2/s3.py +++ b/bitbake/lib/bb/fetch2/s3.py | |||
@@ -77,7 +77,7 @@ class S3(FetchMethod): | |||
77 | else: | 77 | else: |
78 | ud.basename = os.path.basename(ud.path) | 78 | ud.basename = os.path.basename(ud.path) |
79 | 79 | ||
80 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | 80 | ud.localfile = ud.basename |
81 | 81 | ||
82 | ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" | 82 | ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" |
83 | 83 | ||
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py index 7884cce949..bee71a0d0d 100644 --- a/bitbake/lib/bb/fetch2/sftp.py +++ b/bitbake/lib/bb/fetch2/sftp.py | |||
@@ -77,7 +77,7 @@ class SFTP(FetchMethod): | |||
77 | else: | 77 | else: |
78 | ud.basename = os.path.basename(ud.path) | 78 | ud.basename = os.path.basename(ud.path) |
79 | 79 | ||
80 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | 80 | ud.localfile = ud.basename |
81 | 81 | ||
82 | def download(self, ud, d): | 82 | def download(self, ud, d): |
83 | """Fetch urls""" | 83 | """Fetch urls""" |
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py index 0cbb2a6f25..2a0f2cb44b 100644 --- a/bitbake/lib/bb/fetch2/ssh.py +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
@@ -73,8 +73,7 @@ class SSH(FetchMethod): | |||
73 | path = m.group('path') | 73 | path = m.group('path') |
74 | path = urllib.parse.unquote(path) | 74 | path = urllib.parse.unquote(path) |
75 | host = m.group('host') | 75 | host = m.group('host') |
76 | urldata.localpath = os.path.join(d.getVar('DL_DIR'), | 76 | urldata.localfile = os.path.basename(os.path.normpath(path)) |
77 | os.path.basename(os.path.normpath(path))) | ||
78 | 77 | ||
79 | def download(self, urldata, d): | 78 | def download(self, urldata, d): |
80 | dldir = d.getVar('DL_DIR') | 79 | dldir = d.getVar('DL_DIR') |
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index d40e4d2909..0852108e7d 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
@@ -210,3 +210,6 @@ class Svn(FetchMethod): | |||
210 | 210 | ||
211 | def _build_revision(self, ud, d): | 211 | def _build_revision(self, ud, d): |
212 | return ud.revision | 212 | return ud.revision |
213 | |||
214 | def supports_checksum(self, urldata): | ||
215 | return False | ||
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index fbfa6938ac..7e43d3bc97 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -53,11 +53,6 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler): | |||
53 | class Wget(FetchMethod): | 53 | class Wget(FetchMethod): |
54 | """Class to fetch urls via 'wget'""" | 54 | """Class to fetch urls via 'wget'""" |
55 | 55 | ||
56 | # CDNs like CloudFlare may do a 'browser integrity test' which can fail | ||
57 | # with the standard wget/urllib User-Agent, so pretend to be a modern | ||
58 | # browser. | ||
59 | user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" | ||
60 | |||
61 | def check_certs(self, d): | 56 | def check_certs(self, d): |
62 | """ | 57 | """ |
63 | Should certificates be checked? | 58 | Should certificates be checked? |
@@ -83,11 +78,11 @@ class Wget(FetchMethod): | |||
83 | else: | 78 | else: |
84 | ud.basename = os.path.basename(ud.path) | 79 | ud.basename = os.path.basename(ud.path) |
85 | 80 | ||
86 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | 81 | ud.localfile = ud.basename |
87 | if not ud.localfile: | 82 | if not ud.localfile: |
88 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) | 83 | ud.localfile = ud.host + ud.path.replace("/", ".") |
89 | 84 | ||
90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" | 85 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget --tries=2 --timeout=100" |
91 | 86 | ||
92 | if ud.type == 'ftp' or ud.type == 'ftps': | 87 | if ud.type == 'ftp' or ud.type == 'ftps': |
93 | self.basecmd += " --passive-ftp" | 88 | self.basecmd += " --passive-ftp" |
@@ -101,16 +96,17 @@ class Wget(FetchMethod): | |||
101 | 96 | ||
102 | logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) | 97 | logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) |
103 | bb.fetch2.check_network_access(d, command, ud.url) | 98 | bb.fetch2.check_network_access(d, command, ud.url) |
104 | runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) | 99 | runfetchcmd(command + ' --progress=dot --verbose', d, quiet, log=progresshandler, workdir=workdir) |
105 | 100 | ||
106 | def download(self, ud, d): | 101 | def download(self, ud, d): |
107 | """Fetch urls""" | 102 | """Fetch urls""" |
108 | 103 | ||
109 | fetchcmd = self.basecmd | 104 | fetchcmd = self.basecmd |
110 | 105 | ||
111 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" | 106 | dldir = os.path.realpath(d.getVar("DL_DIR")) |
107 | localpath = os.path.join(dldir, ud.localfile) + ".tmp" | ||
112 | bb.utils.mkdirhier(os.path.dirname(localpath)) | 108 | bb.utils.mkdirhier(os.path.dirname(localpath)) |
113 | fetchcmd += " -O %s" % shlex.quote(localpath) | 109 | fetchcmd += " --output-document=%s" % shlex.quote(localpath) |
114 | 110 | ||
115 | if ud.user and ud.pswd: | 111 | if ud.user and ud.pswd: |
116 | fetchcmd += " --auth-no-challenge" | 112 | fetchcmd += " --auth-no-challenge" |
@@ -126,14 +122,18 @@ class Wget(FetchMethod): | |||
126 | fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd) | 122 | fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd) |
127 | 123 | ||
128 | uri = ud.url.split(";")[0] | 124 | uri = ud.url.split(";")[0] |
129 | if os.path.exists(ud.localpath): | 125 | fetchcmd += " --continue --directory-prefix=%s '%s'" % (dldir, uri) |
130 | # file exists, but we didnt complete it.. trying again.. | ||
131 | fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) | ||
132 | else: | ||
133 | fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) | ||
134 | |||
135 | self._runwget(ud, d, fetchcmd, False) | 126 | self._runwget(ud, d, fetchcmd, False) |
136 | 127 | ||
128 | # Sanity check since wget can pretend it succeed when it didn't | ||
129 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
130 | if not os.path.exists(localpath): | ||
131 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri) | ||
132 | |||
133 | if os.path.getsize(localpath) == 0: | ||
134 | os.remove(localpath) | ||
135 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) | ||
136 | |||
137 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the | 137 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the |
138 | # original file, which might be a race (imagine two recipes referencing the same | 138 | # original file, which might be a race (imagine two recipes referencing the same |
139 | # source, one with an incorrect checksum) | 139 | # source, one with an incorrect checksum) |
@@ -143,15 +143,6 @@ class Wget(FetchMethod): | |||
143 | # Our lock prevents multiple writers but mirroring code may grab incomplete files | 143 | # Our lock prevents multiple writers but mirroring code may grab incomplete files |
144 | os.rename(localpath, localpath[:-4]) | 144 | os.rename(localpath, localpath[:-4]) |
145 | 145 | ||
146 | # Sanity check since wget can pretend it succeed when it didn't | ||
147 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
148 | if not os.path.exists(ud.localpath): | ||
149 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) | ||
150 | |||
151 | if os.path.getsize(ud.localpath) == 0: | ||
152 | os.remove(ud.localpath) | ||
153 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) | ||
154 | |||
155 | return True | 146 | return True |
156 | 147 | ||
157 | def checkstatus(self, fetch, ud, d, try_again=True): | 148 | def checkstatus(self, fetch, ud, d, try_again=True): |
@@ -243,7 +234,12 @@ class Wget(FetchMethod): | |||
243 | fetch.connection_cache.remove_connection(h.host, h.port) | 234 | fetch.connection_cache.remove_connection(h.host, h.port) |
244 | raise urllib.error.URLError(err) | 235 | raise urllib.error.URLError(err) |
245 | else: | 236 | else: |
246 | r = h.getresponse() | 237 | try: |
238 | r = h.getresponse() | ||
239 | except TimeoutError as e: | ||
240 | if fetch.connection_cache: | ||
241 | fetch.connection_cache.remove_connection(h.host, h.port) | ||
242 | raise TimeoutError(e) | ||
247 | 243 | ||
248 | # Pick apart the HTTPResponse object to get the addinfourl | 244 | # Pick apart the HTTPResponse object to get the addinfourl |
249 | # object initialized properly. | 245 | # object initialized properly. |
@@ -304,13 +300,45 @@ class Wget(FetchMethod): | |||
304 | 300 | ||
305 | class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): | 301 | class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): |
306 | """ | 302 | """ |
307 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, | 303 | urllib2.HTTPRedirectHandler before 3.13 has two flaws: |
308 | when we want to follow redirects using the original method. | 304 | |
305 | It resets the method to GET on redirect when we want to follow | ||
306 | redirects using the original method (typically HEAD). This was fixed | ||
307 | in 759e8e7. | ||
308 | |||
309 | It also doesn't handle 308 (Permanent Redirect). This was fixed in | ||
310 | c379bc5. | ||
311 | |||
312 | Until we depend on Python 3.13 onwards, copy the redirect_request | ||
313 | method to fix these issues. | ||
309 | """ | 314 | """ |
310 | def redirect_request(self, req, fp, code, msg, headers, newurl): | 315 | def redirect_request(self, req, fp, code, msg, headers, newurl): |
311 | newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) | 316 | m = req.get_method() |
312 | newreq.get_method = req.get_method | 317 | if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD") |
313 | return newreq | 318 | or code in (301, 302, 303) and m == "POST")): |
319 | raise urllib.HTTPError(req.full_url, code, msg, headers, fp) | ||
320 | |||
321 | # Strictly (according to RFC 2616), 301 or 302 in response to | ||
322 | # a POST MUST NOT cause a redirection without confirmation | ||
323 | # from the user (of urllib.request, in this case). In practice, | ||
324 | # essentially all clients do redirect in this case, so we do | ||
325 | # the same. | ||
326 | |||
327 | # Be conciliant with URIs containing a space. This is mainly | ||
328 | # redundant with the more complete encoding done in http_error_302(), | ||
329 | # but it is kept for compatibility with other callers. | ||
330 | newurl = newurl.replace(' ', '%20') | ||
331 | |||
332 | CONTENT_HEADERS = ("content-length", "content-type") | ||
333 | newheaders = {k: v for k, v in req.headers.items() | ||
334 | if k.lower() not in CONTENT_HEADERS} | ||
335 | return urllib.request.Request(newurl, | ||
336 | method="HEAD" if m == "HEAD" else "GET", | ||
337 | headers=newheaders, | ||
338 | origin_req_host=req.origin_req_host, | ||
339 | unverifiable=True) | ||
340 | |||
341 | http_error_308 = urllib.request.HTTPRedirectHandler.http_error_302 | ||
314 | 342 | ||
315 | # We need to update the environment here as both the proxy and HTTPS | 343 | # We need to update the environment here as both the proxy and HTTPS |
316 | # handlers need variables set. The proxy needs http_proxy and friends to | 344 | # handlers need variables set. The proxy needs http_proxy and friends to |
@@ -343,14 +371,14 @@ class Wget(FetchMethod): | |||
343 | opener = urllib.request.build_opener(*handlers) | 371 | opener = urllib.request.build_opener(*handlers) |
344 | 372 | ||
345 | try: | 373 | try: |
346 | uri_base = ud.url.split(";")[0] | 374 | parts = urllib.parse.urlparse(ud.url.split(";")[0]) |
347 | uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path) | 375 | uri = "{}://{}{}".format(parts.scheme, parts.netloc, parts.path) |
348 | r = urllib.request.Request(uri) | 376 | r = urllib.request.Request(uri) |
349 | r.get_method = lambda: "HEAD" | 377 | r.get_method = lambda: "HEAD" |
350 | # Some servers (FusionForge, as used on Alioth) require that the | 378 | # Some servers (FusionForge, as used on Alioth) require that the |
351 | # optional Accept header is set. | 379 | # optional Accept header is set. |
352 | r.add_header("Accept", "*/*") | 380 | r.add_header("Accept", "*/*") |
353 | r.add_header("User-Agent", self.user_agent) | 381 | r.add_header("User-Agent", "bitbake/{}".format(bb.__version__)) |
354 | def add_basic_auth(login_str, request): | 382 | def add_basic_auth(login_str, request): |
355 | '''Adds Basic auth to http request, pass in login:password as string''' | 383 | '''Adds Basic auth to http request, pass in login:password as string''' |
356 | import base64 | 384 | import base64 |
@@ -370,7 +398,7 @@ class Wget(FetchMethod): | |||
370 | except (FileNotFoundError, netrc.NetrcParseError): | 398 | except (FileNotFoundError, netrc.NetrcParseError): |
371 | pass | 399 | pass |
372 | 400 | ||
373 | with opener.open(r, timeout=30) as response: | 401 | with opener.open(r, timeout=100) as response: |
374 | pass | 402 | pass |
375 | except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: | 403 | except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: |
376 | if try_again: | 404 | if try_again: |
@@ -457,7 +485,7 @@ class Wget(FetchMethod): | |||
457 | f = tempfile.NamedTemporaryFile() | 485 | f = tempfile.NamedTemporaryFile() |
458 | with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: | 486 | with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: |
459 | fetchcmd = self.basecmd | 487 | fetchcmd = self.basecmd |
460 | fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'" | 488 | fetchcmd += " --output-document=%s '%s'" % (f.name, uri) |
461 | try: | 489 | try: |
462 | self._runwget(ud, d, fetchcmd, True, workdir=workdir) | 490 | self._runwget(ud, d, fetchcmd, True, workdir=workdir) |
463 | fetchresult = f.read() | 491 | fetchresult = f.read() |
@@ -617,13 +645,17 @@ class Wget(FetchMethod): | |||
617 | 645 | ||
618 | sanity check to ensure same name and type. | 646 | sanity check to ensure same name and type. |
619 | """ | 647 | """ |
620 | package = ud.path.split("/")[-1] | 648 | if 'downloadfilename' in ud.parm: |
649 | package = ud.parm['downloadfilename'] | ||
650 | else: | ||
651 | package = ud.path.split("/")[-1] | ||
621 | current_version = ['', d.getVar('PV'), ''] | 652 | current_version = ['', d.getVar('PV'), ''] |
622 | 653 | ||
623 | """possible to have no version in pkg name, such as spectrum-fw""" | 654 | """possible to have no version in pkg name, such as spectrum-fw""" |
624 | if not re.search(r"\d+", package): | 655 | if not re.search(r"\d+", package): |
625 | current_version[1] = re.sub('_', '.', current_version[1]) | 656 | current_version[1] = re.sub('_', '.', current_version[1]) |
626 | current_version[1] = re.sub('-', '.', current_version[1]) | 657 | current_version[1] = re.sub('-', '.', current_version[1]) |
658 | bb.debug(3, "latest_versionstring: no version found in %s" % package) | ||
627 | return (current_version[1], '') | 659 | return (current_version[1], '') |
628 | 660 | ||
629 | package_regex = self._init_regexes(package, ud, d) | 661 | package_regex = self._init_regexes(package, ud, d) |
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py index 3e18596faa..4f616ff42e 100644 --- a/bitbake/lib/bb/msg.py +++ b/bitbake/lib/bb/msg.py | |||
@@ -89,10 +89,6 @@ class BBLogFormatter(logging.Formatter): | |||
89 | msg = logging.Formatter.format(self, record) | 89 | msg = logging.Formatter.format(self, record) |
90 | if hasattr(record, 'bb_exc_formatted'): | 90 | if hasattr(record, 'bb_exc_formatted'): |
91 | msg += '\n' + ''.join(record.bb_exc_formatted) | 91 | msg += '\n' + ''.join(record.bb_exc_formatted) |
92 | elif hasattr(record, 'bb_exc_info'): | ||
93 | etype, value, tb = record.bb_exc_info | ||
94 | formatted = bb.exceptions.format_exception(etype, value, tb, limit=5) | ||
95 | msg += '\n' + ''.join(formatted) | ||
96 | return msg | 92 | return msg |
97 | 93 | ||
98 | def colorize(self, record): | 94 | def colorize(self, record): |
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index a4358f1374..d428d8a4b4 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe): | |||
49 | __mtime_cache = {} | 49 | __mtime_cache = {} |
50 | def cached_mtime(f): | 50 | def cached_mtime(f): |
51 | if f not in __mtime_cache: | 51 | if f not in __mtime_cache: |
52 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 52 | res = os.stat(f) |
53 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
53 | return __mtime_cache[f] | 54 | return __mtime_cache[f] |
54 | 55 | ||
55 | def cached_mtime_noerror(f): | 56 | def cached_mtime_noerror(f): |
56 | if f not in __mtime_cache: | 57 | if f not in __mtime_cache: |
57 | try: | 58 | try: |
58 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 59 | res = os.stat(f) |
60 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
59 | except OSError: | 61 | except OSError: |
60 | return 0 | 62 | return 0 |
61 | return __mtime_cache[f] | 63 | return __mtime_cache[f] |
62 | 64 | ||
63 | def check_mtime(f, mtime): | 65 | def check_mtime(f, mtime): |
64 | try: | 66 | try: |
65 | current_mtime = os.stat(f)[stat.ST_MTIME] | 67 | res = os.stat(f) |
68 | current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
66 | __mtime_cache[f] = current_mtime | 69 | __mtime_cache[f] = current_mtime |
67 | except OSError: | 70 | except OSError: |
68 | current_mtime = 0 | 71 | current_mtime = 0 |
@@ -70,7 +73,8 @@ def check_mtime(f, mtime): | |||
70 | 73 | ||
71 | def update_mtime(f): | 74 | def update_mtime(f): |
72 | try: | 75 | try: |
73 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 76 | res = os.stat(f) |
77 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
74 | except OSError: | 78 | except OSError: |
75 | if f in __mtime_cache: | 79 | if f in __mtime_cache: |
76 | del __mtime_cache[f] | 80 | del __mtime_cache[f] |
@@ -172,4 +176,41 @@ def get_file_depends(d): | |||
172 | dep_files.append(os.path.abspath(fn)) | 176 | dep_files.append(os.path.abspath(fn)) |
173 | return " ".join(dep_files) | 177 | return " ".join(dep_files) |
174 | 178 | ||
179 | def vardeps(*varnames): | ||
180 | """ | ||
181 | Function decorator that can be used to instruct the bitbake dependency | ||
182 | parsing to add a dependency on the specified variables names | ||
183 | |||
184 | Example: | ||
185 | |||
186 | @bb.parse.vardeps("FOO", "BAR") | ||
187 | def my_function(): | ||
188 | ... | ||
189 | |||
190 | """ | ||
191 | def inner(f): | ||
192 | if not hasattr(f, "bb_vardeps"): | ||
193 | f.bb_vardeps = set() | ||
194 | f.bb_vardeps |= set(varnames) | ||
195 | return f | ||
196 | return inner | ||
197 | |||
198 | def vardepsexclude(*varnames): | ||
199 | """ | ||
200 | Function decorator that can be used to instruct the bitbake dependency | ||
201 | parsing to ignore dependencies on the specified variable names in the code | ||
202 | |||
203 | Example: | ||
204 | |||
205 | @bb.parse.vardepsexclude("FOO", "BAR") | ||
206 | def my_function(): | ||
207 | ... | ||
208 | """ | ||
209 | def inner(f): | ||
210 | if not hasattr(f, "bb_vardepsexclude"): | ||
211 | f.bb_vardepsexclude = set() | ||
212 | f.bb_vardepsexclude |= set(varnames) | ||
213 | return f | ||
214 | return inner | ||
215 | |||
175 | from bb.parse.parse_py import __version__, ConfHandler, BBHandler | 216 | from bb.parse.parse_py import __version__, ConfHandler, BBHandler |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index 7581d003fd..49a0788038 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
@@ -43,6 +43,21 @@ class IncludeNode(AstNode): | |||
43 | else: | 43 | else: |
44 | bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) | 44 | bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) |
45 | 45 | ||
46 | class IncludeAllNode(AstNode): | ||
47 | def __init__(self, filename, lineno, what_file): | ||
48 | AstNode.__init__(self, filename, lineno) | ||
49 | self.what_file = what_file | ||
50 | |||
51 | def eval(self, data): | ||
52 | """ | ||
53 | Include the file and evaluate the statements | ||
54 | """ | ||
55 | s = data.expand(self.what_file) | ||
56 | logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s) | ||
57 | |||
58 | for path in data.getVar("BBPATH").split(":"): | ||
59 | bb.parse.ConfHandler.include(self.filename, os.path.join(path, s), self.lineno, data, False) | ||
60 | |||
46 | class ExportNode(AstNode): | 61 | class ExportNode(AstNode): |
47 | def __init__(self, filename, lineno, var): | 62 | def __init__(self, filename, lineno, var): |
48 | AstNode.__init__(self, filename, lineno) | 63 | AstNode.__init__(self, filename, lineno) |
@@ -137,7 +152,10 @@ class DataNode(AstNode): | |||
137 | 152 | ||
138 | flag = None | 153 | flag = None |
139 | if 'flag' in groupd and groupd['flag'] is not None: | 154 | if 'flag' in groupd and groupd['flag'] is not None: |
140 | flag = groupd['flag'] | 155 | if groupd["lazyques"]: |
156 | flag = "_defaultval_flag_"+groupd['flag'] | ||
157 | else: | ||
158 | flag = groupd['flag'] | ||
141 | elif groupd["lazyques"]: | 159 | elif groupd["lazyques"]: |
142 | flag = "_defaultval" | 160 | flag = "_defaultval" |
143 | 161 | ||
@@ -240,14 +258,16 @@ class ExportFuncsNode(AstNode): | |||
240 | data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True) | 258 | data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True) |
241 | 259 | ||
242 | class AddTaskNode(AstNode): | 260 | class AddTaskNode(AstNode): |
243 | def __init__(self, filename, lineno, func, before, after): | 261 | def __init__(self, filename, lineno, tasks, before, after): |
244 | AstNode.__init__(self, filename, lineno) | 262 | AstNode.__init__(self, filename, lineno) |
245 | self.func = func | 263 | self.tasks = tasks |
246 | self.before = before | 264 | self.before = before |
247 | self.after = after | 265 | self.after = after |
248 | 266 | ||
249 | def eval(self, data): | 267 | def eval(self, data): |
250 | bb.build.addtask(self.func, self.before, self.after, data) | 268 | tasks = self.tasks.split() |
269 | for task in tasks: | ||
270 | bb.build.addtask(task, self.before, self.after, data) | ||
251 | 271 | ||
252 | class DelTaskNode(AstNode): | 272 | class DelTaskNode(AstNode): |
253 | def __init__(self, filename, lineno, tasks): | 273 | def __init__(self, filename, lineno, tasks): |
@@ -320,13 +340,62 @@ class InheritDeferredNode(AstNode): | |||
320 | self.inherit = (classes, filename, lineno) | 340 | self.inherit = (classes, filename, lineno) |
321 | 341 | ||
322 | def eval(self, data): | 342 | def eval(self, data): |
323 | inherits = data.getVar('__BBDEFINHERITS', False) or [] | 343 | bb.parse.BBHandler.inherit_defer(*self.inherit, data) |
324 | inherits.append(self.inherit) | 344 | |
325 | data.setVar('__BBDEFINHERITS', inherits) | 345 | class AddFragmentsNode(AstNode): |
346 | def __init__(self, filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable): | ||
347 | AstNode.__init__(self, filename, lineno) | ||
348 | self.fragments_path_prefix = fragments_path_prefix | ||
349 | self.fragments_variable = fragments_variable | ||
350 | self.flagged_variables_list_variable = flagged_variables_list_variable | ||
351 | self.builtin_fragments_variable = builtin_fragments_variable | ||
352 | |||
353 | def eval(self, data): | ||
354 | # No need to use mark_dependency since we would only match a fragment | ||
355 | # from a specific layer and there can only be a single layer with a | ||
356 | # given namespace. | ||
357 | def find_fragment(layers, layerid, full_fragment_name): | ||
358 | for layerpath in layers.split(): | ||
359 | candidate_fragment_path = os.path.join(layerpath, full_fragment_name) | ||
360 | if os.path.exists(candidate_fragment_path) and bb.utils.get_file_layer(candidate_fragment_path, data) == layerid: | ||
361 | return candidate_fragment_path | ||
362 | return None | ||
363 | |||
364 | def check_and_set_builtin_fragment(fragment, data, builtin_fragments): | ||
365 | prefix, value = fragment.split('/', 1) | ||
366 | if prefix in builtin_fragments.keys(): | ||
367 | data.setVar(builtin_fragments[prefix], value) | ||
368 | return True | ||
369 | return False | ||
370 | |||
371 | fragments = data.getVar(self.fragments_variable) | ||
372 | layers = data.getVar('BBLAYERS') | ||
373 | flagged_variables = data.getVar(self.flagged_variables_list_variable).split() | ||
374 | builtin_fragments = {f[0]:f[1] for f in [f.split(':') for f in data.getVar(self.builtin_fragments_variable).split()] } | ||
375 | |||
376 | if not fragments: | ||
377 | return | ||
378 | for f in fragments.split(): | ||
379 | if check_and_set_builtin_fragment(f, data, builtin_fragments): | ||
380 | continue | ||
381 | layerid, fragment_name = f.split('/', 1) | ||
382 | full_fragment_name = data.expand("{}/{}.conf".format(self.fragments_path_prefix, fragment_name)) | ||
383 | fragment_path = find_fragment(layers, layerid, full_fragment_name) | ||
384 | if fragment_path: | ||
385 | bb.parse.ConfHandler.include(self.filename, fragment_path, self.lineno, data, "include fragment") | ||
386 | for flagged_var in flagged_variables: | ||
387 | val = data.getVar(flagged_var) | ||
388 | data.setVarFlag(flagged_var, f, val) | ||
389 | data.setVar(flagged_var, None) | ||
390 | else: | ||
391 | bb.error("Could not find fragment {} in enabled layers: {}".format(f, layers)) | ||
326 | 392 | ||
327 | def handleInclude(statements, filename, lineno, m, force): | 393 | def handleInclude(statements, filename, lineno, m, force): |
328 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) | 394 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) |
329 | 395 | ||
396 | def handleIncludeAll(statements, filename, lineno, m): | ||
397 | statements.append(IncludeAllNode(filename, lineno, m.group(1))) | ||
398 | |||
330 | def handleExport(statements, filename, lineno, m): | 399 | def handleExport(statements, filename, lineno, m): |
331 | statements.append(ExportNode(filename, lineno, m.group(1))) | 400 | statements.append(ExportNode(filename, lineno, m.group(1))) |
332 | 401 | ||
@@ -348,21 +417,11 @@ def handlePythonMethod(statements, filename, lineno, funcname, modulename, body) | |||
348 | def handleExportFuncs(statements, filename, lineno, m, classname): | 417 | def handleExportFuncs(statements, filename, lineno, m, classname): |
349 | statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) | 418 | statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) |
350 | 419 | ||
351 | def handleAddTask(statements, filename, lineno, m): | 420 | def handleAddTask(statements, filename, lineno, tasks, before, after): |
352 | func = m.group("func") | 421 | statements.append(AddTaskNode(filename, lineno, tasks, before, after)) |
353 | before = m.group("before") | ||
354 | after = m.group("after") | ||
355 | if func is None: | ||
356 | return | ||
357 | |||
358 | statements.append(AddTaskNode(filename, lineno, func, before, after)) | ||
359 | 422 | ||
360 | def handleDelTask(statements, filename, lineno, m): | 423 | def handleDelTask(statements, filename, lineno, tasks): |
361 | func = m.group(1) | 424 | statements.append(DelTaskNode(filename, lineno, tasks)) |
362 | if func is None: | ||
363 | return | ||
364 | |||
365 | statements.append(DelTaskNode(filename, lineno, func)) | ||
366 | 425 | ||
367 | def handleBBHandlers(statements, filename, lineno, m): | 426 | def handleBBHandlers(statements, filename, lineno, m): |
368 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) | 427 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) |
@@ -378,12 +437,43 @@ def handleInheritDeferred(statements, filename, lineno, m): | |||
378 | classes = m.group(1) | 437 | classes = m.group(1) |
379 | statements.append(InheritDeferredNode(filename, lineno, classes)) | 438 | statements.append(InheritDeferredNode(filename, lineno, classes)) |
380 | 439 | ||
440 | def handleAddFragments(statements, filename, lineno, m): | ||
441 | fragments_path_prefix = m.group(1) | ||
442 | fragments_variable = m.group(2) | ||
443 | flagged_variables_list_variable = m.group(3) | ||
444 | builtin_fragments_variable = m.group(4) | ||
445 | statements.append(AddFragmentsNode(filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable)) | ||
446 | |||
381 | def runAnonFuncs(d): | 447 | def runAnonFuncs(d): |
382 | code = [] | 448 | code = [] |
383 | for funcname in d.getVar("__BBANONFUNCS", False) or []: | 449 | for funcname in d.getVar("__BBANONFUNCS", False) or []: |
384 | code.append("%s(d)" % funcname) | 450 | code.append("%s(d)" % funcname) |
385 | bb.utils.better_exec("\n".join(code), {"d": d}) | 451 | bb.utils.better_exec("\n".join(code), {"d": d}) |
386 | 452 | ||
453 | # Handle recipe level PREFERRED_PROVIDERs | ||
454 | def handleVirtRecipeProviders(tasklist, d): | ||
455 | depends = (d.getVar("DEPENDS") or "").split() | ||
456 | virtprovs = (d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS") or "").split() | ||
457 | newdeps = [] | ||
458 | for dep in depends: | ||
459 | if dep in virtprovs: | ||
460 | newdep = d.getVar("PREFERRED_PROVIDER_" + dep) | ||
461 | if not newdep: | ||
462 | bb.fatal("Error, recipe virtual provider PREFERRED_PROVIDER_%s not set" % dep) | ||
463 | newdeps.append(newdep) | ||
464 | else: | ||
465 | newdeps.append(dep) | ||
466 | d.setVar("DEPENDS", " ".join(newdeps)) | ||
467 | for task in tasklist: | ||
468 | taskdeps = (d.getVarFlag(task, "depends") or "").split() | ||
469 | remapped = [] | ||
470 | for entry in taskdeps: | ||
471 | r, t = entry.split(":") | ||
472 | if r in virtprovs: | ||
473 | r = d.getVar("PREFERRED_PROVIDER_" + r) | ||
474 | remapped.append("%s:%s" % (r, t)) | ||
475 | d.setVarFlag(task, "depends", " ".join(remapped)) | ||
476 | |||
387 | def finalize(fn, d, variant = None): | 477 | def finalize(fn, d, variant = None): |
388 | saved_handlers = bb.event.get_handlers().copy() | 478 | saved_handlers = bb.event.get_handlers().copy() |
389 | try: | 479 | try: |
@@ -391,6 +481,17 @@ def finalize(fn, d, variant = None): | |||
391 | if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: | 481 | if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: |
392 | raise bb.BBHandledException() | 482 | raise bb.BBHandledException() |
393 | 483 | ||
484 | inherits = [x[0] for x in (d.getVar('__BBDEFINHERITS', False) or [('',)])] | ||
485 | bb.event.fire(bb.event.RecipePreDeferredInherits(fn, inherits), d) | ||
486 | |||
487 | while True: | ||
488 | inherits = d.getVar('__BBDEFINHERITS', False) or [] | ||
489 | if not inherits: | ||
490 | break | ||
491 | inherit, filename, lineno = inherits.pop(0) | ||
492 | d.setVar('__BBDEFINHERITS', inherits) | ||
493 | bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True) | ||
494 | |||
394 | for var in d.getVar('__BBHANDLERS', False) or []: | 495 | for var in d.getVar('__BBHANDLERS', False) or []: |
395 | # try to add the handler | 496 | # try to add the handler |
396 | handlerfn = d.getVarFlag(var, "filename", False) | 497 | handlerfn = d.getVarFlag(var, "filename", False) |
@@ -409,6 +510,7 @@ def finalize(fn, d, variant = None): | |||
409 | 510 | ||
410 | tasklist = d.getVar('__BBTASKS', False) or [] | 511 | tasklist = d.getVar('__BBTASKS', False) or [] |
411 | bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) | 512 | bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) |
513 | handleVirtRecipeProviders(tasklist, d) | ||
412 | bb.build.add_tasks(tasklist, d) | 514 | bb.build.add_tasks(tasklist, d) |
413 | 515 | ||
414 | bb.parse.siggen.finalise(fn, d, variant) | 516 | bb.parse.siggen.finalise(fn, d, variant) |
@@ -444,14 +546,6 @@ def multi_finalize(fn, d): | |||
444 | logger.debug("Appending .bbappend file %s to %s", append, fn) | 546 | logger.debug("Appending .bbappend file %s to %s", append, fn) |
445 | bb.parse.BBHandler.handle(append, d, True) | 547 | bb.parse.BBHandler.handle(append, d, True) |
446 | 548 | ||
447 | while True: | ||
448 | inherits = d.getVar('__BBDEFINHERITS', False) or [] | ||
449 | if not inherits: | ||
450 | break | ||
451 | inherit, filename, lineno = inherits.pop(0) | ||
452 | d.setVar('__BBDEFINHERITS', inherits) | ||
453 | bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True) | ||
454 | |||
455 | onlyfinalise = d.getVar("__ONLYFINALISE", False) | 549 | onlyfinalise = d.getVar("__ONLYFINALISE", False) |
456 | 550 | ||
457 | safe_d = d | 551 | safe_d = d |
@@ -487,7 +581,7 @@ def multi_finalize(fn, d): | |||
487 | d.setVar("BBEXTENDVARIANT", variantmap[name]) | 581 | d.setVar("BBEXTENDVARIANT", variantmap[name]) |
488 | else: | 582 | else: |
489 | d.setVar("PN", "%s-%s" % (pn, name)) | 583 | d.setVar("PN", "%s-%s" % (pn, name)) |
490 | bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) | 584 | bb.parse.BBHandler.inherit_defer(extendedmap[name], fn, 0, d) |
491 | 585 | ||
492 | safe_d.setVar("BBCLASSEXTEND", extended) | 586 | safe_d.setVar("BBCLASSEXTEND", extended) |
493 | _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) | 587 | _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index c13e4b9755..008fec2308 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
@@ -23,8 +23,8 @@ __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>faker | |||
23 | __inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) | 23 | __inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) |
24 | __inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" ) | 24 | __inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" ) |
25 | __export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) | 25 | __export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) |
26 | __addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") | 26 | __addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)") |
27 | __deltask_regexp__ = re.compile(r"deltask\s+(.+)") | 27 | __deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)") |
28 | __addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) | 28 | __addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) |
29 | __def_regexp__ = re.compile(r"def\s+(\w+).*:" ) | 29 | __def_regexp__ = re.compile(r"def\s+(\w+).*:" ) |
30 | __python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) | 30 | __python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) |
@@ -42,12 +42,22 @@ def supports(fn, d): | |||
42 | """Return True if fn has a supported extension""" | 42 | """Return True if fn has a supported extension""" |
43 | return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] | 43 | return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] |
44 | 44 | ||
45 | def inherit_defer(expression, fn, lineno, d): | ||
46 | inherit = (expression, fn, lineno) | ||
47 | inherits = d.getVar('__BBDEFINHERITS', False) or [] | ||
48 | inherits.append(inherit) | ||
49 | d.setVar('__BBDEFINHERITS', inherits) | ||
50 | |||
45 | def inherit(files, fn, lineno, d, deferred=False): | 51 | def inherit(files, fn, lineno, d, deferred=False): |
46 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | 52 | __inherit_cache = d.getVar('__inherit_cache', False) or [] |
47 | #if "${" in files and not deferred: | 53 | #if "${" in files and not deferred: |
48 | # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno)) | 54 | # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno)) |
49 | files = d.expand(files).split() | 55 | files = d.expand(files).split() |
50 | for file in files: | 56 | for file in files: |
57 | defer = (d.getVar("BB_DEFER_BBCLASSES") or "").split() | ||
58 | if not deferred and file in defer: | ||
59 | inherit_defer(file, fn, lineno, d) | ||
60 | continue | ||
51 | classtype = d.getVar("__bbclasstype", False) | 61 | classtype = d.getVar("__bbclasstype", False) |
52 | origfile = file | 62 | origfile = file |
53 | for t in ["classes-" + classtype, "classes"]: | 63 | for t in ["classes-" + classtype, "classes"]: |
@@ -239,29 +249,38 @@ def feeder(lineno, s, fn, root, statements, eof=False): | |||
239 | 249 | ||
240 | m = __addtask_regexp__.match(s) | 250 | m = __addtask_regexp__.match(s) |
241 | if m: | 251 | if m: |
242 | if len(m.group().split()) == 2: | 252 | after = "" |
243 | # Check and warn for "addtask task1 task2" | 253 | before = "" |
244 | m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s) | 254 | |
245 | if m2 and m2.group('ignores'): | 255 | # This code splits on 'before' and 'after' instead of on whitespace so we can defer |
246 | logger.warning('addtask ignored: "%s"' % m2.group('ignores')) | 256 | # evaluation to as late as possible. |
247 | 257 | tasks = m.group(1).split(" before ")[0].split(" after ")[0] | |
248 | # Check and warn for "addtask task1 before task2 before task3", the | 258 | |
249 | # similar to "after" | 259 | for exp in m.group(1).split(" before "): |
250 | taskexpression = s.split() | 260 | exp2 = exp.split(" after ") |
251 | for word in ('before', 'after'): | 261 | if len(exp2) > 1: |
252 | if taskexpression.count(word) > 1: | 262 | after = after + " ".join(exp2[1:]) |
253 | logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word) | ||
254 | 263 | ||
255 | # Check and warn for having task with exprssion as part of task name | 264 | for exp in m.group(1).split(" after "): |
265 | exp2 = exp.split(" before ") | ||
266 | if len(exp2) > 1: | ||
267 | before = before + " ".join(exp2[1:]) | ||
268 | |||
269 | # Check and warn for having task with a keyword as part of task name | ||
270 | taskexpression = s.split() | ||
256 | for te in taskexpression: | 271 | for te in taskexpression: |
257 | if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): | 272 | if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): |
258 | raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) | 273 | raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) |
259 | ast.handleAddTask(statements, fn, lineno, m) | 274 | |
275 | if tasks is not None: | ||
276 | ast.handleAddTask(statements, fn, lineno, tasks, before, after) | ||
260 | return | 277 | return |
261 | 278 | ||
262 | m = __deltask_regexp__.match(s) | 279 | m = __deltask_regexp__.match(s) |
263 | if m: | 280 | if m: |
264 | ast.handleDelTask(statements, fn, lineno, m) | 281 | task = m.group(1) |
282 | if task is not None: | ||
283 | ast.handleDelTask(statements, fn, lineno, task) | ||
265 | return | 284 | return |
266 | 285 | ||
267 | m = __addhandler_regexp__.match(s) | 286 | m = __addhandler_regexp__.match(s) |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index 7826dee7d3..9ddbae123d 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
@@ -20,10 +20,10 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle | |||
20 | __config_regexp__ = re.compile( r""" | 20 | __config_regexp__ = re.compile( r""" |
21 | ^ | 21 | ^ |
22 | (?P<exp>export\s+)? | 22 | (?P<exp>export\s+)? |
23 | (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?) | 23 | (?P<var>[a-zA-Z0-9\-_+.${}/~:]*?) |
24 | (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])? | 24 | (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@/]*)\])? |
25 | 25 | ||
26 | \s* ( | 26 | (?P<whitespace>\s*) ( |
27 | (?P<colon>:=) | | 27 | (?P<colon>:=) | |
28 | (?P<lazyques>\?\?=) | | 28 | (?P<lazyques>\?\?=) | |
29 | (?P<ques>\?=) | | 29 | (?P<ques>\?=) | |
@@ -32,7 +32,7 @@ __config_regexp__ = re.compile( r""" | |||
32 | (?P<predot>=\.) | | 32 | (?P<predot>=\.) | |
33 | (?P<postdot>\.=) | | 33 | (?P<postdot>\.=) | |
34 | = | 34 | = |
35 | ) \s* | 35 | ) (?P<whitespace2>\s*) |
36 | 36 | ||
37 | (?!'[^']*'[^']*'$) | 37 | (?!'[^']*'[^']*'$) |
38 | (?!\"[^\"]*\"[^\"]*\"$) | 38 | (?!\"[^\"]*\"[^\"]*\"$) |
@@ -43,10 +43,12 @@ __config_regexp__ = re.compile( r""" | |||
43 | """, re.X) | 43 | """, re.X) |
44 | __include_regexp__ = re.compile( r"include\s+(.+)" ) | 44 | __include_regexp__ = re.compile( r"include\s+(.+)" ) |
45 | __require_regexp__ = re.compile( r"require\s+(.+)" ) | 45 | __require_regexp__ = re.compile( r"require\s+(.+)" ) |
46 | __includeall_regexp__ = re.compile( r"include_all\s+(.+)" ) | ||
46 | __export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) | 47 | __export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) |
47 | __unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) | 48 | __unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) |
48 | __unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" ) | 49 | __unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" ) |
49 | __addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" ) | 50 | __addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" ) |
51 | __addfragments_regexp__ = re.compile(r"addfragments\s+(.+)\s+(.+)\s+(.+)\s+(.+)" ) | ||
50 | 52 | ||
51 | def init(data): | 53 | def init(data): |
52 | return | 54 | return |
@@ -164,6 +166,10 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True): | |||
164 | m = __config_regexp__.match(s) | 166 | m = __config_regexp__.match(s) |
165 | if m: | 167 | if m: |
166 | groupd = m.groupdict() | 168 | groupd = m.groupdict() |
169 | if groupd['var'] == "": | ||
170 | raise ParseError("Empty variable name in assignment: '%s'" % s, fn, lineno); | ||
171 | if not groupd['whitespace'] or not groupd['whitespace2']: | ||
172 | logger.warning("%s:%s has a lack of whitespace around the assignment: '%s'" % (fn, lineno, s)) | ||
167 | ast.handleData(statements, fn, lineno, groupd) | 173 | ast.handleData(statements, fn, lineno, groupd) |
168 | return | 174 | return |
169 | 175 | ||
@@ -177,6 +183,11 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True): | |||
177 | ast.handleInclude(statements, fn, lineno, m, True) | 183 | ast.handleInclude(statements, fn, lineno, m, True) |
178 | return | 184 | return |
179 | 185 | ||
186 | m = __includeall_regexp__.match(s) | ||
187 | if m: | ||
188 | ast.handleIncludeAll(statements, fn, lineno, m) | ||
189 | return | ||
190 | |||
180 | m = __export_regexp__.match(s) | 191 | m = __export_regexp__.match(s) |
181 | if m: | 192 | if m: |
182 | ast.handleExport(statements, fn, lineno, m) | 193 | ast.handleExport(statements, fn, lineno, m) |
@@ -197,6 +208,11 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True): | |||
197 | ast.handlePyLib(statements, fn, lineno, m) | 208 | ast.handlePyLib(statements, fn, lineno, m) |
198 | return | 209 | return |
199 | 210 | ||
211 | m = __addfragments_regexp__.match(s) | ||
212 | if m: | ||
213 | ast.handleAddFragments(statements, fn, lineno, m) | ||
214 | return | ||
215 | |||
200 | raise ParseError("unparsed line: '%s'" % s, fn, lineno); | 216 | raise ParseError("unparsed line: '%s'" % s, fn, lineno); |
201 | 217 | ||
202 | # Add us to the handlers list | 218 | # Add us to the handlers list |
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py deleted file mode 100644 index bcca791edf..0000000000 --- a/bitbake/lib/bb/persist_data.py +++ /dev/null | |||
@@ -1,271 +0,0 @@ | |||
1 | """BitBake Persistent Data Store | ||
2 | |||
3 | Used to store data in a central location such that other threads/tasks can | ||
4 | access them at some future date. Acts as a convenience wrapper around sqlite, | ||
5 | currently, providing a key/value store accessed by 'domain'. | ||
6 | """ | ||
7 | |||
8 | # Copyright (C) 2007 Richard Purdie | ||
9 | # Copyright (C) 2010 Chris Larson <chris_larson@mentor.com> | ||
10 | # | ||
11 | # SPDX-License-Identifier: GPL-2.0-only | ||
12 | # | ||
13 | |||
14 | import collections | ||
15 | import collections.abc | ||
16 | import contextlib | ||
17 | import functools | ||
18 | import logging | ||
19 | import os.path | ||
20 | import sqlite3 | ||
21 | import sys | ||
22 | from collections.abc import Mapping | ||
23 | |||
24 | sqlversion = sqlite3.sqlite_version_info | ||
25 | if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | ||
26 | raise Exception("sqlite3 version 3.3.0 or later is required.") | ||
27 | |||
28 | |||
29 | logger = logging.getLogger("BitBake.PersistData") | ||
30 | |||
31 | @functools.total_ordering | ||
32 | class SQLTable(collections.abc.MutableMapping): | ||
33 | class _Decorators(object): | ||
34 | @staticmethod | ||
35 | def retry(*, reconnect=True): | ||
36 | """ | ||
37 | Decorator that restarts a function if a database locked sqlite | ||
38 | exception occurs. If reconnect is True, the database connection | ||
39 | will be closed and reopened each time a failure occurs | ||
40 | """ | ||
41 | def retry_wrapper(f): | ||
42 | def wrap_func(self, *args, **kwargs): | ||
43 | # Reconnect if necessary | ||
44 | if self.connection is None and reconnect: | ||
45 | self.reconnect() | ||
46 | |||
47 | count = 0 | ||
48 | while True: | ||
49 | try: | ||
50 | return f(self, *args, **kwargs) | ||
51 | except sqlite3.OperationalError as exc: | ||
52 | if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)): | ||
53 | count = count + 1 | ||
54 | if reconnect: | ||
55 | self.reconnect() | ||
56 | continue | ||
57 | raise | ||
58 | return wrap_func | ||
59 | return retry_wrapper | ||
60 | |||
61 | @staticmethod | ||
62 | def transaction(f): | ||
63 | """ | ||
64 | Decorator that starts a database transaction and creates a database | ||
65 | cursor for performing queries. If no exception is thrown, the | ||
66 | database results are committed. If an exception occurs, the database | ||
67 | is rolled back. In all cases, the cursor is closed after the | ||
68 | function ends. | ||
69 | |||
70 | Note that the cursor is passed as an extra argument to the function | ||
71 | after `self` and before any of the normal arguments | ||
72 | """ | ||
73 | def wrap_func(self, *args, **kwargs): | ||
74 | # Context manager will COMMIT the database on success, | ||
75 | # or ROLLBACK on an exception | ||
76 | with self.connection: | ||
77 | # Automatically close the cursor when done | ||
78 | with contextlib.closing(self.connection.cursor()) as cursor: | ||
79 | return f(self, cursor, *args, **kwargs) | ||
80 | return wrap_func | ||
81 | |||
82 | """Object representing a table/domain in the database""" | ||
83 | def __init__(self, cachefile, table): | ||
84 | self.cachefile = cachefile | ||
85 | self.table = table | ||
86 | |||
87 | self.connection = None | ||
88 | self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table) | ||
89 | |||
90 | @_Decorators.retry(reconnect=False) | ||
91 | @_Decorators.transaction | ||
92 | def _setup_database(self, cursor): | ||
93 | cursor.execute("pragma synchronous = off;") | ||
94 | # Enable WAL and keep the autocheckpoint length small (the default is | ||
95 | # usually 1000). Persistent caches are usually read-mostly, so keeping | ||
96 | # this short will keep readers running quickly | ||
97 | cursor.execute("pragma journal_mode = WAL;") | ||
98 | cursor.execute("pragma wal_autocheckpoint = 100;") | ||
99 | |||
100 | def reconnect(self): | ||
101 | if self.connection is not None: | ||
102 | self.connection.close() | ||
103 | self.connection = sqlite3.connect(self.cachefile, timeout=5) | ||
104 | self.connection.text_factory = str | ||
105 | self._setup_database() | ||
106 | |||
107 | @_Decorators.retry() | ||
108 | @_Decorators.transaction | ||
109 | def _execute_single(self, cursor, *query): | ||
110 | """ | ||
111 | Executes a single query and discards the results. This correctly closes | ||
112 | the database cursor when finished | ||
113 | """ | ||
114 | cursor.execute(*query) | ||
115 | |||
116 | @_Decorators.retry() | ||
117 | def _row_iter(self, f, *query): | ||
118 | """ | ||
119 | Helper function that returns a row iterator. Each time __next__ is | ||
120 | called on the iterator, the provided function is evaluated to determine | ||
121 | the return value | ||
122 | """ | ||
123 | class CursorIter(object): | ||
124 | def __init__(self, cursor): | ||
125 | self.cursor = cursor | ||
126 | |||
127 | def __iter__(self): | ||
128 | return self | ||
129 | |||
130 | def __next__(self): | ||
131 | row = self.cursor.fetchone() | ||
132 | if row is None: | ||
133 | self.cursor.close() | ||
134 | raise StopIteration | ||
135 | return f(row) | ||
136 | |||
137 | def __enter__(self): | ||
138 | return self | ||
139 | |||
140 | def __exit__(self, typ, value, traceback): | ||
141 | self.cursor.close() | ||
142 | return False | ||
143 | |||
144 | cursor = self.connection.cursor() | ||
145 | try: | ||
146 | cursor.execute(*query) | ||
147 | return CursorIter(cursor) | ||
148 | except: | ||
149 | cursor.close() | ||
150 | |||
151 | def __enter__(self): | ||
152 | self.connection.__enter__() | ||
153 | return self | ||
154 | |||
155 | def __exit__(self, *excinfo): | ||
156 | self.connection.__exit__(*excinfo) | ||
157 | |||
158 | @_Decorators.retry() | ||
159 | @_Decorators.transaction | ||
160 | def __getitem__(self, cursor, key): | ||
161 | cursor.execute("SELECT * from %s where key=?;" % self.table, [key]) | ||
162 | row = cursor.fetchone() | ||
163 | if row is not None: | ||
164 | return row[1] | ||
165 | raise KeyError(key) | ||
166 | |||
167 | @_Decorators.retry() | ||
168 | @_Decorators.transaction | ||
169 | def __delitem__(self, cursor, key): | ||
170 | if key not in self: | ||
171 | raise KeyError(key) | ||
172 | cursor.execute("DELETE from %s where key=?;" % self.table, [key]) | ||
173 | |||
174 | @_Decorators.retry() | ||
175 | @_Decorators.transaction | ||
176 | def __setitem__(self, cursor, key, value): | ||
177 | if not isinstance(key, str): | ||
178 | raise TypeError('Only string keys are supported') | ||
179 | elif not isinstance(value, str): | ||
180 | raise TypeError('Only string values are supported') | ||
181 | |||
182 | # Ensure the entire transaction (including SELECT) executes under write lock | ||
183 | cursor.execute("BEGIN EXCLUSIVE") | ||
184 | |||
185 | cursor.execute("SELECT * from %s where key=?;" % self.table, [key]) | ||
186 | row = cursor.fetchone() | ||
187 | if row is not None: | ||
188 | cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key]) | ||
189 | else: | ||
190 | cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value]) | ||
191 | |||
192 | @_Decorators.retry() | ||
193 | @_Decorators.transaction | ||
194 | def __contains__(self, cursor, key): | ||
195 | cursor.execute('SELECT * from %s where key=?;' % self.table, [key]) | ||
196 | return cursor.fetchone() is not None | ||
197 | |||
198 | @_Decorators.retry() | ||
199 | @_Decorators.transaction | ||
200 | def __len__(self, cursor): | ||
201 | cursor.execute("SELECT COUNT(key) FROM %s;" % self.table) | ||
202 | row = cursor.fetchone() | ||
203 | if row is not None: | ||
204 | return row[0] | ||
205 | |||
206 | def __iter__(self): | ||
207 | return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table) | ||
208 | |||
209 | def __lt__(self, other): | ||
210 | if not isinstance(other, Mapping): | ||
211 | raise NotImplementedError() | ||
212 | |||
213 | return len(self) < len(other) | ||
214 | |||
215 | def get_by_pattern(self, pattern): | ||
216 | return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" % | ||
217 | self.table, [pattern]) | ||
218 | |||
219 | def values(self): | ||
220 | return list(self.itervalues()) | ||
221 | |||
222 | def itervalues(self): | ||
223 | return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" % | ||
224 | self.table) | ||
225 | |||
226 | def items(self): | ||
227 | return list(self.iteritems()) | ||
228 | |||
229 | def iteritems(self): | ||
230 | return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" % | ||
231 | self.table) | ||
232 | |||
233 | @_Decorators.retry() | ||
234 | @_Decorators.transaction | ||
235 | def clear(self, cursor): | ||
236 | cursor.execute("DELETE FROM %s;" % self.table) | ||
237 | |||
238 | def has_key(self, key): | ||
239 | return key in self | ||
240 | |||
241 | def persist(domain, d): | ||
242 | """Convenience factory for SQLTable objects based upon metadata""" | ||
243 | import bb.utils | ||
244 | cachedir = (d.getVar("PERSISTENT_DIR") or | ||
245 | d.getVar("CACHE")) | ||
246 | if not cachedir: | ||
247 | logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") | ||
248 | sys.exit(1) | ||
249 | |||
250 | bb.utils.mkdirhier(cachedir) | ||
251 | cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") | ||
252 | |||
253 | try: | ||
254 | return SQLTable(cachefile, domain) | ||
255 | except sqlite3.OperationalError: | ||
256 | # Sqlite fails to open database when its path is too long. | ||
257 | # After testing, 504 is the biggest path length that can be opened by | ||
258 | # sqlite. | ||
259 | # Note: This code is called before sanity.bbclass and its path length | ||
260 | # check | ||
261 | max_len = 504 | ||
262 | if len(cachefile) > max_len: | ||
263 | logger.critical("The path of the cache file is too long " | ||
264 | "({0} chars > {1}) to be opened by sqlite! " | ||
265 | "Your cache file is \"{2}\"".format( | ||
266 | len(cachefile), | ||
267 | max_len, | ||
268 | cachefile)) | ||
269 | sys.exit(1) | ||
270 | else: | ||
271 | raise | ||
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index bc7e18175d..80f3d3282f 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -14,6 +14,7 @@ import os | |||
14 | import sys | 14 | import sys |
15 | import stat | 15 | import stat |
16 | import errno | 16 | import errno |
17 | import itertools | ||
17 | import logging | 18 | import logging |
18 | import re | 19 | import re |
19 | import bb | 20 | import bb |
@@ -128,6 +129,7 @@ class RunQueueStats: | |||
128 | # runQueue state machine | 129 | # runQueue state machine |
129 | runQueuePrepare = 2 | 130 | runQueuePrepare = 2 |
130 | runQueueSceneInit = 3 | 131 | runQueueSceneInit = 3 |
132 | runQueueDumpSigs = 4 | ||
131 | runQueueRunning = 6 | 133 | runQueueRunning = 6 |
132 | runQueueFailed = 7 | 134 | runQueueFailed = 7 |
133 | runQueueCleanUp = 8 | 135 | runQueueCleanUp = 8 |
@@ -475,7 +477,6 @@ class RunQueueData: | |||
475 | self.runtaskentries = {} | 477 | self.runtaskentries = {} |
476 | 478 | ||
477 | def runq_depends_names(self, ids): | 479 | def runq_depends_names(self, ids): |
478 | import re | ||
479 | ret = [] | 480 | ret = [] |
480 | for id in ids: | 481 | for id in ids: |
481 | nam = os.path.basename(id) | 482 | nam = os.path.basename(id) |
@@ -728,6 +729,8 @@ class RunQueueData: | |||
728 | if mc == frommc: | 729 | if mc == frommc: |
729 | fn = taskData[mcdep].build_targets[pn][0] | 730 | fn = taskData[mcdep].build_targets[pn][0] |
730 | newdep = '%s:%s' % (fn,deptask) | 731 | newdep = '%s:%s' % (fn,deptask) |
732 | if newdep not in taskData[mcdep].taskentries: | ||
733 | bb.fatal("Task mcdepends on non-existent task %s" % (newdep)) | ||
731 | taskData[mc].taskentries[tid].tdepends.append(newdep) | 734 | taskData[mc].taskentries[tid].tdepends.append(newdep) |
732 | 735 | ||
733 | for mc in taskData: | 736 | for mc in taskData: |
@@ -1273,27 +1276,41 @@ class RunQueueData: | |||
1273 | 1276 | ||
1274 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) | 1277 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) |
1275 | 1278 | ||
1279 | starttime = time.time() | ||
1280 | lasttime = starttime | ||
1281 | |||
1276 | # Iterate over the task list and call into the siggen code | 1282 | # Iterate over the task list and call into the siggen code |
1277 | dealtwith = set() | 1283 | dealtwith = set() |
1278 | todeal = set(self.runtaskentries) | 1284 | todeal = set(self.runtaskentries) |
1279 | while todeal: | 1285 | while todeal: |
1286 | ready = set() | ||
1280 | for tid in todeal.copy(): | 1287 | for tid in todeal.copy(): |
1281 | if not (self.runtaskentries[tid].depends - dealtwith): | 1288 | if not (self.runtaskentries[tid].depends - dealtwith): |
1282 | dealtwith.add(tid) | 1289 | self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1283 | todeal.remove(tid) | 1290 | # get_taskhash for a given tid *must* be called before get_unihash* below |
1284 | self.prepare_task_hash(tid) | 1291 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1285 | bb.event.check_for_interrupts(self.cooker.data) | 1292 | ready.add(tid) |
1293 | unihashes = bb.parse.siggen.get_unihashes(ready) | ||
1294 | for tid in ready: | ||
1295 | dealtwith.add(tid) | ||
1296 | todeal.remove(tid) | ||
1297 | self.runtaskentries[tid].unihash = unihashes[tid] | ||
1298 | |||
1299 | bb.event.check_for_interrupts(self.cooker.data) | ||
1300 | |||
1301 | if time.time() > (lasttime + 30): | ||
1302 | lasttime = time.time() | ||
1303 | hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime)) | ||
1304 | |||
1305 | endtime = time.time() | ||
1306 | if (endtime-starttime > 60): | ||
1307 | hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime)) | ||
1286 | 1308 | ||
1287 | bb.parse.siggen.writeout_file_checksum_cache() | 1309 | bb.parse.siggen.writeout_file_checksum_cache() |
1288 | 1310 | ||
1289 | #self.dump_data() | 1311 | #self.dump_data() |
1290 | return len(self.runtaskentries) | 1312 | return len(self.runtaskentries) |
1291 | 1313 | ||
1292 | def prepare_task_hash(self, tid): | ||
1293 | bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) | ||
1294 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) | ||
1295 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) | ||
1296 | |||
1297 | def dump_data(self): | 1314 | def dump_data(self): |
1298 | """ | 1315 | """ |
1299 | Dump some debug information on the internal data structures | 1316 | Dump some debug information on the internal data structures |
@@ -1574,14 +1591,19 @@ class RunQueue: | |||
1574 | self.rqdata.init_progress_reporter.next_stage() | 1591 | self.rqdata.init_progress_reporter.next_stage() |
1575 | self.rqexe = RunQueueExecute(self) | 1592 | self.rqexe = RunQueueExecute(self) |
1576 | 1593 | ||
1577 | dump = self.cooker.configuration.dump_signatures | 1594 | dumpsigs = self.cooker.configuration.dump_signatures |
1578 | if dump: | 1595 | if dumpsigs: |
1579 | self.rqdata.init_progress_reporter.finish() | 1596 | self.rqdata.init_progress_reporter.finish() |
1580 | if 'printdiff' in dump: | 1597 | if 'printdiff' in dumpsigs: |
1581 | invalidtasks = self.print_diffscenetasks() | 1598 | self.invalidtasks_dump = self.print_diffscenetasks() |
1582 | self.dump_signatures(dump) | 1599 | self.state = runQueueDumpSigs |
1583 | if 'printdiff' in dump: | 1600 | |
1584 | self.write_diffscenetasks(invalidtasks) | 1601 | if self.state is runQueueDumpSigs: |
1602 | dumpsigs = self.cooker.configuration.dump_signatures | ||
1603 | retval = self.dump_signatures(dumpsigs) | ||
1604 | if retval is False: | ||
1605 | if 'printdiff' in dumpsigs: | ||
1606 | self.write_diffscenetasks(self.invalidtasks_dump) | ||
1585 | self.state = runQueueComplete | 1607 | self.state = runQueueComplete |
1586 | 1608 | ||
1587 | if self.state is runQueueSceneInit: | 1609 | if self.state is runQueueSceneInit: |
@@ -1672,33 +1694,42 @@ class RunQueue: | |||
1672 | bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True) | 1694 | bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True) |
1673 | 1695 | ||
1674 | def dump_signatures(self, options): | 1696 | def dump_signatures(self, options): |
1675 | if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset: | 1697 | if not hasattr(self, "dumpsigs_launched"): |
1676 | bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled") | 1698 | if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset: |
1677 | 1699 | bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled") | |
1678 | bb.note("Writing task signature files") | 1700 | |
1679 | 1701 | bb.note("Writing task signature files") | |
1680 | max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) | 1702 | |
1681 | def chunkify(l, n): | 1703 | max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) |
1682 | return [l[i::n] for i in range(n)] | 1704 | def chunkify(l, n): |
1683 | tids = chunkify(list(self.rqdata.runtaskentries), max_process) | 1705 | return [l[i::n] for i in range(n)] |
1684 | # We cannot use the real multiprocessing.Pool easily due to some local data | 1706 | dumpsigs_tids = chunkify(list(self.rqdata.runtaskentries), max_process) |
1685 | # that can't be pickled. This is a cheap multi-process solution. | 1707 | |
1686 | launched = [] | 1708 | # We cannot use the real multiprocessing.Pool easily due to some local data |
1687 | while tids: | 1709 | # that can't be pickled. This is a cheap multi-process solution. |
1688 | if len(launched) < max_process: | 1710 | self.dumpsigs_launched = [] |
1689 | p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), )) | 1711 | |
1712 | for tids in dumpsigs_tids: | ||
1713 | p = Process(target=self._rq_dump_sigtid, args=(tids, )) | ||
1690 | p.start() | 1714 | p.start() |
1691 | launched.append(p) | 1715 | self.dumpsigs_launched.append(p) |
1692 | for q in launched: | 1716 | |
1693 | # The finished processes are joined when calling is_alive() | 1717 | return 1.0 |
1694 | if not q.is_alive(): | 1718 | |
1695 | launched.remove(q) | 1719 | for q in self.dumpsigs_launched: |
1696 | for p in launched: | 1720 | # The finished processes are joined when calling is_alive() |
1721 | if not q.is_alive(): | ||
1722 | self.dumpsigs_launched.remove(q) | ||
1723 | |||
1724 | if self.dumpsigs_launched: | ||
1725 | return 1.0 | ||
1726 | |||
1727 | for p in self.dumpsigs_launched: | ||
1697 | p.join() | 1728 | p.join() |
1698 | 1729 | ||
1699 | bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) | 1730 | bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) |
1700 | 1731 | ||
1701 | return | 1732 | return False |
1702 | 1733 | ||
1703 | def print_diffscenetasks(self): | 1734 | def print_diffscenetasks(self): |
1704 | def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid): | 1735 | def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid): |
@@ -2175,12 +2206,20 @@ class RunQueueExecute: | |||
2175 | if not hasattr(self, "sorted_setscene_tids"): | 2206 | if not hasattr(self, "sorted_setscene_tids"): |
2176 | # Don't want to sort this set every execution | 2207 | # Don't want to sort this set every execution |
2177 | self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) | 2208 | self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) |
2209 | # Resume looping where we left off when we returned to feed the mainloop | ||
2210 | self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids) | ||
2178 | 2211 | ||
2179 | task = None | 2212 | task = None |
2180 | if not self.sqdone and self.can_start_task(): | 2213 | if not self.sqdone and self.can_start_task(): |
2181 | # Find the next setscene to run | 2214 | loopcount = 0 |
2182 | for nexttask in self.sorted_setscene_tids: | 2215 | # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute |
2216 | while loopcount < len(self.rqdata.runq_setscene_tids): | ||
2217 | loopcount += 1 | ||
2218 | nexttask = next(self.setscene_tids_generator) | ||
2183 | if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred: | 2219 | if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred: |
2220 | if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete: | ||
2221 | # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds | ||
2222 | continue | ||
2184 | if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \ | 2223 | if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \ |
2185 | nexttask not in self.sq_needed_harddeps and \ | 2224 | nexttask not in self.sq_needed_harddeps and \ |
2186 | self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \ | 2225 | self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \ |
@@ -2210,8 +2249,7 @@ class RunQueueExecute: | |||
2210 | if t in self.runq_running and t not in self.runq_complete: | 2249 | if t in self.runq_running and t not in self.runq_complete: |
2211 | continue | 2250 | continue |
2212 | if nexttask in self.sq_deferred: | 2251 | if nexttask in self.sq_deferred: |
2213 | if self.sq_deferred[nexttask] not in self.runq_complete: | 2252 | # Deferred tasks that were still deferred were skipped above so we now need to process |
2214 | continue | ||
2215 | logger.debug("Task %s no longer deferred" % nexttask) | 2253 | logger.debug("Task %s no longer deferred" % nexttask) |
2216 | del self.sq_deferred[nexttask] | 2254 | del self.sq_deferred[nexttask] |
2217 | valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) | 2255 | valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) |
@@ -2438,14 +2476,17 @@ class RunQueueExecute: | |||
2438 | taskdepdata_cache = {} | 2476 | taskdepdata_cache = {} |
2439 | for task in self.rqdata.runtaskentries: | 2477 | for task in self.rqdata.runtaskentries: |
2440 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) | 2478 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) |
2441 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 2479 | taskdepdata_cache[task] = bb.TaskData( |
2442 | deps = self.rqdata.runtaskentries[task].depends | 2480 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], |
2443 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2481 | taskname = taskname, |
2444 | taskhash = self.rqdata.runtaskentries[task].hash | 2482 | fn = fn, |
2445 | unihash = self.rqdata.runtaskentries[task].unihash | 2483 | deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends), |
2446 | deps = self.filtermcdeps(task, mc, deps) | 2484 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], |
2447 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | 2485 | taskhash = self.rqdata.runtaskentries[task].hash, |
2448 | taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] | 2486 | unihash = self.rqdata.runtaskentries[task].unihash, |
2487 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], | ||
2488 | taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps, | ||
2489 | ) | ||
2449 | 2490 | ||
2450 | self.taskdepdata_cache = taskdepdata_cache | 2491 | self.taskdepdata_cache = taskdepdata_cache |
2451 | 2492 | ||
@@ -2460,9 +2501,11 @@ class RunQueueExecute: | |||
2460 | while next: | 2501 | while next: |
2461 | additional = [] | 2502 | additional = [] |
2462 | for revdep in next: | 2503 | for revdep in next: |
2463 | self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash | 2504 | self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace( |
2505 | unihash=self.rqdata.runtaskentries[revdep].unihash | ||
2506 | ) | ||
2464 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] | 2507 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] |
2465 | for revdep2 in self.taskdepdata_cache[revdep][3]: | 2508 | for revdep2 in self.taskdepdata_cache[revdep].deps: |
2466 | if revdep2 not in taskdepdata: | 2509 | if revdep2 not in taskdepdata: |
2467 | additional.append(revdep2) | 2510 | additional.append(revdep2) |
2468 | next = additional | 2511 | next = additional |
@@ -2531,9 +2574,6 @@ class RunQueueExecute: | |||
2531 | self.rqdata.runtaskentries[hashtid].unihash = unihash | 2574 | self.rqdata.runtaskentries[hashtid].unihash = unihash |
2532 | bb.parse.siggen.set_unihash(hashtid, unihash) | 2575 | bb.parse.siggen.set_unihash(hashtid, unihash) |
2533 | toprocess.add(hashtid) | 2576 | toprocess.add(hashtid) |
2534 | if torehash: | ||
2535 | # Need to save after set_unihash above | ||
2536 | bb.parse.siggen.save_unitaskhashes() | ||
2537 | 2577 | ||
2538 | # Work out all tasks which depend upon these | 2578 | # Work out all tasks which depend upon these |
2539 | total = set() | 2579 | total = set() |
@@ -2556,17 +2596,28 @@ class RunQueueExecute: | |||
2556 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): | 2596 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): |
2557 | next.add(p) | 2597 | next.add(p) |
2558 | 2598 | ||
2599 | starttime = time.time() | ||
2600 | lasttime = starttime | ||
2601 | |||
2559 | # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled | 2602 | # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled |
2560 | while next: | 2603 | while next: |
2561 | current = next.copy() | 2604 | current = next.copy() |
2562 | next = set() | 2605 | next = set() |
2606 | ready = {} | ||
2563 | for tid in current: | 2607 | for tid in current: |
2564 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): | 2608 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): |
2565 | continue | 2609 | continue |
2610 | # get_taskhash for a given tid *must* be called before get_unihash* below | ||
2611 | ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) | ||
2612 | |||
2613 | unihashes = bb.parse.siggen.get_unihashes(ready.keys()) | ||
2614 | |||
2615 | for tid in ready: | ||
2566 | orighash = self.rqdata.runtaskentries[tid].hash | 2616 | orighash = self.rqdata.runtaskentries[tid].hash |
2567 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) | 2617 | newhash = ready[tid] |
2568 | origuni = self.rqdata.runtaskentries[tid].unihash | 2618 | origuni = self.rqdata.runtaskentries[tid].unihash |
2569 | newuni = bb.parse.siggen.get_unihash(tid) | 2619 | newuni = unihashes[tid] |
2620 | |||
2570 | # FIXME, need to check it can come from sstate at all for determinism? | 2621 | # FIXME, need to check it can come from sstate at all for determinism? |
2571 | remapped = False | 2622 | remapped = False |
2572 | if newuni == origuni: | 2623 | if newuni == origuni: |
@@ -2587,6 +2638,15 @@ class RunQueueExecute: | |||
2587 | next |= self.rqdata.runtaskentries[tid].revdeps | 2638 | next |= self.rqdata.runtaskentries[tid].revdeps |
2588 | total.remove(tid) | 2639 | total.remove(tid) |
2589 | next.intersection_update(total) | 2640 | next.intersection_update(total) |
2641 | bb.event.check_for_interrupts(self.cooker.data) | ||
2642 | |||
2643 | if time.time() > (lasttime + 30): | ||
2644 | lasttime = time.time() | ||
2645 | hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime)) | ||
2646 | |||
2647 | endtime = time.time() | ||
2648 | if (endtime-starttime > 60): | ||
2649 | hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime)) | ||
2590 | 2650 | ||
2591 | if changed: | 2651 | if changed: |
2592 | for mc in self.rq.worker: | 2652 | for mc in self.rq.worker: |
@@ -2712,8 +2772,12 @@ class RunQueueExecute: | |||
2712 | logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) | 2772 | logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) |
2713 | self.sq_task_failoutright(dep) | 2773 | self.sq_task_failoutright(dep) |
2714 | continue | 2774 | continue |
2775 | |||
2776 | # For performance, only compute allcovered once if needed | ||
2777 | if self.sqdata.sq_deps[task]: | ||
2778 | allcovered = self.scenequeue_covered | self.scenequeue_notcovered | ||
2715 | for dep in sorted(self.sqdata.sq_deps[task]): | 2779 | for dep in sorted(self.sqdata.sq_deps[task]): |
2716 | if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | 2780 | if self.sqdata.sq_revdeps[dep].issubset(allcovered): |
2717 | if dep not in self.sq_buildable: | 2781 | if dep not in self.sq_buildable: |
2718 | self.sq_buildable.add(dep) | 2782 | self.sq_buildable.add(dep) |
2719 | 2783 | ||
@@ -2806,13 +2870,19 @@ class RunQueueExecute: | |||
2806 | additional = [] | 2870 | additional = [] |
2807 | for revdep in next: | 2871 | for revdep in next: |
2808 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) | 2872 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) |
2809 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | ||
2810 | deps = getsetscenedeps(revdep) | 2873 | deps = getsetscenedeps(revdep) |
2811 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2874 | |
2812 | taskhash = self.rqdata.runtaskentries[revdep].hash | 2875 | taskdepdata[revdep] = bb.TaskData( |
2813 | unihash = self.rqdata.runtaskentries[revdep].unihash | 2876 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], |
2814 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | 2877 | taskname = taskname, |
2815 | taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] | 2878 | fn = fn, |
2879 | deps = deps, | ||
2880 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], | ||
2881 | taskhash = self.rqdata.runtaskentries[revdep].hash, | ||
2882 | unihash = self.rqdata.runtaskentries[revdep].unihash, | ||
2883 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], | ||
2884 | taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps, | ||
2885 | ) | ||
2816 | for revdep2 in deps: | 2886 | for revdep2 in deps: |
2817 | if revdep2 not in taskdepdata: | 2887 | if revdep2 not in taskdepdata: |
2818 | additional.append(revdep2) | 2888 | additional.append(revdep2) |
@@ -2964,14 +3034,13 @@ def build_scenequeue_data(sqdata, rqdata, sqrq): | |||
2964 | rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries)) | 3034 | rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries)) |
2965 | 3035 | ||
2966 | # Sanity check all dependencies could be changed to setscene task references | 3036 | # Sanity check all dependencies could be changed to setscene task references |
2967 | for taskcounter, tid in enumerate(rqdata.runtaskentries): | 3037 | for tid in rqdata.runtaskentries: |
2968 | if tid in rqdata.runq_setscene_tids: | 3038 | if tid in rqdata.runq_setscene_tids: |
2969 | pass | 3039 | pass |
2970 | elif sq_revdeps_squash[tid]: | 3040 | elif sq_revdeps_squash[tid]: |
2971 | bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.") | 3041 | bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.") |
2972 | else: | 3042 | else: |
2973 | del sq_revdeps_squash[tid] | 3043 | del sq_revdeps_squash[tid] |
2974 | rqdata.init_progress_reporter.update(taskcounter) | ||
2975 | 3044 | ||
2976 | rqdata.init_progress_reporter.next_stage() | 3045 | rqdata.init_progress_reporter.next_stage() |
2977 | 3046 | ||
@@ -3261,7 +3330,7 @@ class runQueuePipe(): | |||
3261 | 3330 | ||
3262 | start = len(self.queue) | 3331 | start = len(self.queue) |
3263 | try: | 3332 | try: |
3264 | self.queue.extend(self.input.read(102400) or b"") | 3333 | self.queue.extend(self.input.read(512 * 1024) or b"") |
3265 | except (OSError, IOError) as e: | 3334 | except (OSError, IOError) as e: |
3266 | if e.errno != errno.EAGAIN: | 3335 | if e.errno != errno.EAGAIN: |
3267 | raise | 3336 | raise |
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py index 76b189291d..4b35be62cd 100644 --- a/bitbake/lib/bb/server/process.py +++ b/bitbake/lib/bb/server/process.py | |||
@@ -321,7 +321,22 @@ class ProcessServer(): | |||
321 | bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) | 321 | bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) |
322 | seendata = True | 322 | seendata = True |
323 | 323 | ||
324 | ready = self.idle_commands(.1, fds) | 324 | if not self.idle: |
325 | self.idle = threading.Thread(target=self.idle_thread) | ||
326 | self.idle.start() | ||
327 | elif self.idle and not self.idle.is_alive(): | ||
328 | serverlog("Idle thread terminated, main thread exiting too") | ||
329 | bb.error("Idle thread terminated, main thread exiting too") | ||
330 | self.quit = True | ||
331 | |||
332 | nextsleep = 1.0 | ||
333 | if self.xmlrpc: | ||
334 | nextsleep = self.xmlrpc.get_timeout(nextsleep) | ||
335 | try: | ||
336 | ready = select.select(fds,[],[],nextsleep)[0] | ||
337 | except InterruptedError: | ||
338 | # Ignore EINTR | ||
339 | ready = [] | ||
325 | 340 | ||
326 | if self.idle: | 341 | if self.idle: |
327 | self.idle.join() | 342 | self.idle.join() |
@@ -424,7 +439,7 @@ class ProcessServer(): | |||
424 | self.idle_cond.notify_all() | 439 | self.idle_cond.notify_all() |
425 | 440 | ||
426 | while not self.quit: | 441 | while not self.quit: |
427 | nextsleep = 0.1 | 442 | nextsleep = 1.0 |
428 | fds = [] | 443 | fds = [] |
429 | 444 | ||
430 | with bb.utils.lock_timeout(self._idlefuncsLock): | 445 | with bb.utils.lock_timeout(self._idlefuncsLock): |
@@ -462,7 +477,7 @@ class ProcessServer(): | |||
462 | 477 | ||
463 | # Create new heartbeat event? | 478 | # Create new heartbeat event? |
464 | now = time.time() | 479 | now = time.time() |
465 | if bb.event._heartbeat_enabled and now >= self.next_heartbeat: | 480 | if items and bb.event._heartbeat_enabled and now >= self.next_heartbeat: |
466 | # We might have missed heartbeats. Just trigger once in | 481 | # We might have missed heartbeats. Just trigger once in |
467 | # that case and continue after the usual delay. | 482 | # that case and continue after the usual delay. |
468 | self.next_heartbeat += self.heartbeat_seconds | 483 | self.next_heartbeat += self.heartbeat_seconds |
@@ -485,31 +500,6 @@ class ProcessServer(): | |||
485 | if nextsleep is not None: | 500 | if nextsleep is not None: |
486 | select.select(fds,[],[],nextsleep)[0] | 501 | select.select(fds,[],[],nextsleep)[0] |
487 | 502 | ||
488 | def idle_commands(self, delay, fds=None): | ||
489 | nextsleep = delay | ||
490 | if not fds: | ||
491 | fds = [] | ||
492 | |||
493 | if not self.idle: | ||
494 | self.idle = threading.Thread(target=self.idle_thread) | ||
495 | self.idle.start() | ||
496 | elif self.idle and not self.idle.is_alive(): | ||
497 | serverlog("Idle thread terminated, main thread exiting too") | ||
498 | bb.error("Idle thread terminated, main thread exiting too") | ||
499 | self.quit = True | ||
500 | |||
501 | if nextsleep is not None: | ||
502 | if self.xmlrpc: | ||
503 | nextsleep = self.xmlrpc.get_timeout(nextsleep) | ||
504 | try: | ||
505 | return select.select(fds,[],[],nextsleep)[0] | ||
506 | except InterruptedError: | ||
507 | # Ignore EINTR | ||
508 | return [] | ||
509 | else: | ||
510 | return select.select(fds,[],[],0)[0] | ||
511 | |||
512 | |||
513 | class ServerCommunicator(): | 503 | class ServerCommunicator(): |
514 | def __init__(self, connection, recv): | 504 | def __init__(self, connection, recv): |
515 | self.connection = connection | 505 | self.connection = connection |
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py index 04b0b17db1..ebc271aca4 100644 --- a/bitbake/lib/bb/server/xmlrpcserver.py +++ b/bitbake/lib/bb/server/xmlrpcserver.py | |||
@@ -14,6 +14,8 @@ from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler | |||
14 | import bb.server.xmlrpcclient | 14 | import bb.server.xmlrpcclient |
15 | 15 | ||
16 | import bb | 16 | import bb |
17 | import bb.cooker | ||
18 | import bb.event | ||
17 | 19 | ||
18 | # This request handler checks if the request has a "Bitbake-token" header | 20 | # This request handler checks if the request has a "Bitbake-token" header |
19 | # field (this comes from the client side) and compares it with its internal | 21 | # field (this comes from the client side) and compares it with its internal |
@@ -54,7 +56,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer): | |||
54 | 56 | ||
55 | def __init__(self, interface, cooker, parent): | 57 | def __init__(self, interface, cooker, parent): |
56 | # Use auto port configuration | 58 | # Use auto port configuration |
57 | if (interface[1] == -1): | 59 | if interface[1] == -1: |
58 | interface = (interface[0], 0) | 60 | interface = (interface[0], 0) |
59 | SimpleXMLRPCServer.__init__(self, interface, | 61 | SimpleXMLRPCServer.__init__(self, interface, |
60 | requestHandler=BitBakeXMLRPCRequestHandler, | 62 | requestHandler=BitBakeXMLRPCRequestHandler, |
@@ -87,11 +89,12 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer): | |||
87 | def handle_requests(self): | 89 | def handle_requests(self): |
88 | self._handle_request_noblock() | 90 | self._handle_request_noblock() |
89 | 91 | ||
90 | class BitBakeXMLRPCServerCommands(): | 92 | class BitBakeXMLRPCServerCommands: |
91 | 93 | ||
92 | def __init__(self, server): | 94 | def __init__(self, server): |
93 | self.server = server | 95 | self.server = server |
94 | self.has_client = False | 96 | self.has_client = False |
97 | self.event_handle = None | ||
95 | 98 | ||
96 | def registerEventHandler(self, host, port): | 99 | def registerEventHandler(self, host, port): |
97 | """ | 100 | """ |
@@ -100,8 +103,8 @@ class BitBakeXMLRPCServerCommands(): | |||
100 | s, t = bb.server.xmlrpcclient._create_server(host, port) | 103 | s, t = bb.server.xmlrpcclient._create_server(host, port) |
101 | 104 | ||
102 | # we don't allow connections if the cooker is running | 105 | # we don't allow connections if the cooker is running |
103 | if (self.server.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]): | 106 | if self.server.cooker.state in [bb.cooker.State.PARSING, bb.cooker.State.RUNNING]: |
104 | return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.server.cooker.state) | 107 | return None, f"Cooker is busy: {self.server.cooker.state.name}" |
105 | 108 | ||
106 | self.event_handle = bb.event.register_UIHhandler(s, True) | 109 | self.event_handle = bb.event.register_UIHhandler(s, True) |
107 | return self.event_handle, 'OK' | 110 | return self.event_handle, 'OK' |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 8ab08ec961..a6163b55ea 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -201,9 +201,6 @@ class SignatureGenerator(object): | |||
201 | def save_unitaskhashes(self): | 201 | def save_unitaskhashes(self): |
202 | return | 202 | return |
203 | 203 | ||
204 | def copy_unitaskhashes(self, targetdir): | ||
205 | return | ||
206 | |||
207 | def set_setscene_tasks(self, setscene_tasks): | 204 | def set_setscene_tasks(self, setscene_tasks): |
208 | return | 205 | return |
209 | 206 | ||
@@ -381,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
381 | self.taints[tid] = taint | 378 | self.taints[tid] = taint |
382 | logger.warning("%s is tainted from a forced run" % tid) | 379 | logger.warning("%s is tainted from a forced run" % tid) |
383 | 380 | ||
384 | return | 381 | return set(dep for _, dep in self.runtaskdeps[tid]) |
385 | 382 | ||
386 | def get_taskhash(self, tid, deps, dataCaches): | 383 | def get_taskhash(self, tid, deps, dataCaches): |
387 | 384 | ||
@@ -418,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
418 | def save_unitaskhashes(self): | 415 | def save_unitaskhashes(self): |
419 | self.unihash_cache.save(self.unitaskhashes) | 416 | self.unihash_cache.save(self.unitaskhashes) |
420 | 417 | ||
421 | def copy_unitaskhashes(self, targetdir): | ||
422 | self.unihash_cache.copyfile(targetdir) | ||
423 | |||
424 | def dump_sigtask(self, mcfn, task, stampbase, runtime): | 418 | def dump_sigtask(self, mcfn, task, stampbase, runtime): |
425 | tid = mcfn + ":" + task | 419 | tid = mcfn + ":" + task |
426 | mc = bb.runqueue.mc_from_tid(mcfn) | 420 | mc = bb.runqueue.mc_from_tid(mcfn) |
@@ -540,7 +534,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
540 | def __init__(self, data): | 534 | def __init__(self, data): |
541 | self.extramethod = {} | 535 | self.extramethod = {} |
542 | # NOTE: The cache only tracks hashes that exist. Hashes that don't | 536 | # NOTE: The cache only tracks hashes that exist. Hashes that don't |
543 | # exist are always queries from the server since it is possible for | 537 | # exist are always queried from the server since it is possible for |
544 | # hashes to appear over time, but much less likely for them to | 538 | # hashes to appear over time, but much less likely for them to |
545 | # disappear | 539 | # disappear |
546 | self.unihash_exists_cache = set() | 540 | self.unihash_exists_cache = set() |
@@ -558,11 +552,11 @@ class SignatureGeneratorUniHashMixIn(object): | |||
558 | super().__init__(data) | 552 | super().__init__(data) |
559 | 553 | ||
560 | def get_taskdata(self): | 554 | def get_taskdata(self): |
561 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() | 555 | return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata() |
562 | 556 | ||
563 | def set_taskdata(self, data): | 557 | def set_taskdata(self, data): |
564 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] | 558 | self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6] |
565 | super().set_taskdata(data[7:]) | 559 | super().set_taskdata(data[6:]) |
566 | 560 | ||
567 | def get_hashserv_creds(self): | 561 | def get_hashserv_creds(self): |
568 | if self.username and self.password: | 562 | if self.username and self.password: |
@@ -595,13 +589,6 @@ class SignatureGeneratorUniHashMixIn(object): | |||
595 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) | 589 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) |
596 | yield self._client | 590 | yield self._client |
597 | 591 | ||
598 | @contextmanager | ||
599 | def client_pool(self): | ||
600 | with self._client_env(): | ||
601 | if getattr(self, '_client_pool', None) is None: | ||
602 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) | ||
603 | yield self._client_pool | ||
604 | |||
605 | def reset(self, data): | 592 | def reset(self, data): |
606 | self.__close_clients() | 593 | self.__close_clients() |
607 | return super().reset(data) | 594 | return super().reset(data) |
@@ -678,25 +665,20 @@ class SignatureGeneratorUniHashMixIn(object): | |||
678 | if len(query) == 0: | 665 | if len(query) == 0: |
679 | return {} | 666 | return {} |
680 | 667 | ||
681 | uncached_query = {} | 668 | query_keys = [] |
682 | result = {} | 669 | result = {} |
683 | for key, unihash in query.items(): | 670 | for key, unihash in query.items(): |
684 | if unihash in self.unihash_exists_cache: | 671 | if unihash in self.unihash_exists_cache: |
685 | result[key] = True | 672 | result[key] = True |
686 | else: | 673 | else: |
687 | uncached_query[key] = unihash | 674 | query_keys.append(key) |
688 | 675 | ||
689 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | 676 | if query_keys: |
690 | # No parallelism required. Make the query serially with the single client | ||
691 | with self.client() as client: | 677 | with self.client() as client: |
692 | uncached_result = { | 678 | query_result = client.unihash_exists_batch(query[k] for k in query_keys) |
693 | key: client.unihash_exists(value) for key, value in uncached_query.items() | ||
694 | } | ||
695 | else: | ||
696 | with self.client_pool() as client_pool: | ||
697 | uncached_result = client_pool.unihashes_exist(uncached_query) | ||
698 | 679 | ||
699 | for key, exists in uncached_result.items(): | 680 | for idx, key in enumerate(query_keys): |
681 | exists = query_result[idx] | ||
700 | if exists: | 682 | if exists: |
701 | self.unihash_exists_cache.add(query[key]) | 683 | self.unihash_exists_cache.add(query[key]) |
702 | result[key] = exists | 684 | result[key] = exists |
@@ -712,29 +694,24 @@ class SignatureGeneratorUniHashMixIn(object): | |||
712 | unihash | 694 | unihash |
713 | """ | 695 | """ |
714 | result = {} | 696 | result = {} |
715 | queries = {} | 697 | query_tids = [] |
716 | query_result = {} | ||
717 | 698 | ||
718 | for tid in tids: | 699 | for tid in tids: |
719 | unihash = self.get_cached_unihash(tid) | 700 | unihash = self.get_cached_unihash(tid) |
720 | if unihash: | 701 | if unihash: |
721 | result[tid] = unihash | 702 | result[tid] = unihash |
722 | else: | 703 | else: |
723 | queries[tid] = (self._get_method(tid), self.taskhash[tid]) | 704 | query_tids.append(tid) |
724 | |||
725 | if len(queries) == 0: | ||
726 | return result | ||
727 | 705 | ||
728 | if self.max_parallel <= 1 or len(queries) <= 1: | 706 | if query_tids: |
729 | # No parallelism required. Make the query serially with the single client | 707 | unihashes = [] |
730 | with self.client() as client: | 708 | try: |
731 | for tid, args in queries.items(): | 709 | with self.client() as client: |
732 | query_result[tid] = client.get_unihash(*args) | 710 | unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids) |
733 | else: | 711 | except (ConnectionError, FileNotFoundError) as e: |
734 | with self.client_pool() as client_pool: | 712 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
735 | query_result = client_pool.get_unihashes(queries) | ||
736 | 713 | ||
737 | for tid, unihash in query_result.items(): | 714 | for idx, tid in enumerate(query_tids): |
738 | # In the absence of being able to discover a unique hash from the | 715 | # In the absence of being able to discover a unique hash from the |
739 | # server, make it be equivalent to the taskhash. The unique "hash" only | 716 | # server, make it be equivalent to the taskhash. The unique "hash" only |
740 | # really needs to be a unique string (not even necessarily a hash), but | 717 | # really needs to be a unique string (not even necessarily a hash), but |
@@ -749,7 +726,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
749 | # to the server, there is a better chance that they will agree on | 726 | # to the server, there is a better chance that they will agree on |
750 | # the unique hash. | 727 | # the unique hash. |
751 | taskhash = self.taskhash[tid] | 728 | taskhash = self.taskhash[tid] |
752 | if unihash: | 729 | |
730 | if unihashes and unihashes[idx]: | ||
731 | unihash = unihashes[idx] | ||
753 | # A unique hash equal to the taskhash is not very interesting, | 732 | # A unique hash equal to the taskhash is not very interesting, |
754 | # so it is reported it at debug level 2. If they differ, that | 733 | # so it is reported it at debug level 2. If they differ, that |
755 | # is much more interesting, so it is reported at debug level 1 | 734 | # is much more interesting, so it is reported at debug level 1 |
@@ -758,7 +737,6 @@ class SignatureGeneratorUniHashMixIn(object): | |||
758 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) | 737 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) |
759 | unihash = taskhash | 738 | unihash = taskhash |
760 | 739 | ||
761 | |||
762 | self.set_unihash(tid, unihash) | 740 | self.set_unihash(tid, unihash) |
763 | self.unihash[tid] = unihash | 741 | self.unihash[tid] = unihash |
764 | result[tid] = unihash | 742 | result[tid] = unihash |
@@ -839,7 +817,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
839 | d.setVar('BB_UNIHASH', new_unihash) | 817 | d.setVar('BB_UNIHASH', new_unihash) |
840 | else: | 818 | else: |
841 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) | 819 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) |
842 | except ConnectionError as e: | 820 | except (ConnectionError, FileNotFoundError) as e: |
843 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 821 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
844 | finally: | 822 | finally: |
845 | if sigfile: | 823 | if sigfile: |
@@ -881,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
881 | # TODO: What to do here? | 859 | # TODO: What to do here? |
882 | hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) | 860 | hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) |
883 | 861 | ||
884 | except ConnectionError as e: | 862 | except (ConnectionError, FileNotFoundError) as e: |
885 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 863 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
886 | 864 | ||
887 | return False | 865 | return False |
@@ -895,13 +873,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG | |||
895 | super().init_rundepcheck(data) | 873 | super().init_rundepcheck(data) |
896 | self.server = data.getVar('BB_HASHSERVE') | 874 | self.server = data.getVar('BB_HASHSERVE') |
897 | self.method = "sstate_output_hash" | 875 | self.method = "sstate_output_hash" |
898 | self.max_parallel = 1 | ||
899 | 876 | ||
900 | def clean_checksum_file_path(file_checksum_tuple): | 877 | def clean_checksum_file_path(file_checksum_tuple): |
901 | f, cs = file_checksum_tuple | 878 | f, cs = file_checksum_tuple |
902 | if "/./" in f: | 879 | if "/./" in f: |
903 | return "./" + f.split("/./")[1] | 880 | return "./" + f.split("/./")[1] |
904 | return f | 881 | return os.path.basename(f) |
905 | 882 | ||
906 | def dump_this_task(outfile, d): | 883 | def dump_this_task(outfile, d): |
907 | import bb.parse | 884 | import bb.parse |
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py index f6585fb3aa..c0d1362a0c 100644 --- a/bitbake/lib/bb/tests/codeparser.py +++ b/bitbake/lib/bb/tests/codeparser.py | |||
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc | |||
106 | self.parseExpression("foo=$(echo bar)") | 106 | self.parseExpression("foo=$(echo bar)") |
107 | self.assertExecs(set(["echo"])) | 107 | self.assertExecs(set(["echo"])) |
108 | 108 | ||
109 | def test_assign_subshell_expansion_quotes(self): | ||
110 | self.parseExpression('foo="$(echo bar)"') | ||
111 | self.assertExecs(set(["echo"])) | ||
112 | |||
113 | def test_assign_subshell_expansion_nested(self): | ||
114 | self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"') | ||
115 | self.assertExecs(set(["func1", "func2", "func3"])) | ||
116 | |||
117 | def test_assign_subshell_expansion_multiple(self): | ||
118 | self.parseExpression('foo="$(func1 "$(func2)") $(func3)"') | ||
119 | self.assertExecs(set(["func1", "func2", "func3"])) | ||
120 | |||
121 | def test_assign_subshell_expansion_escaped_quotes(self): | ||
122 | self.parseExpression('foo="\\"fo\\"o$(func1)"') | ||
123 | self.assertExecs(set(["func1"])) | ||
124 | |||
125 | def test_assign_subshell_expansion_empty(self): | ||
126 | self.parseExpression('foo="bar$()foo"') | ||
127 | self.assertExecs(set()) | ||
128 | |||
129 | def test_assign_subshell_backticks(self): | ||
130 | self.parseExpression("foo=`echo bar`") | ||
131 | self.assertExecs(set(["echo"])) | ||
132 | |||
133 | def test_assign_subshell_backticks_quotes(self): | ||
134 | self.parseExpression('foo="`echo bar`"') | ||
135 | self.assertExecs(set(["echo"])) | ||
136 | |||
137 | def test_assign_subshell_backticks_multiple(self): | ||
138 | self.parseExpression('foo="`func1 bar` `func2`"') | ||
139 | self.assertExecs(set(["func1", "func2"])) | ||
140 | |||
141 | def test_assign_subshell_backticks_escaped_quotes(self): | ||
142 | self.parseExpression('foo="\\"fo\\"o`func1`"') | ||
143 | self.assertExecs(set(["func1"])) | ||
144 | |||
145 | def test_assign_subshell_backticks_empty(self): | ||
146 | self.parseExpression('foo="bar``foo"') | ||
147 | self.assertExecs(set()) | ||
148 | |||
109 | def test_shell_unexpanded(self): | 149 | def test_shell_unexpanded(self): |
110 | self.setEmptyVars(["QT_BASE_NAME"]) | 150 | self.setEmptyVars(["QT_BASE_NAME"]) |
111 | self.parseExpression('echo "${QT_BASE_NAME}"') | 151 | self.parseExpression('echo "${QT_BASE_NAME}"') |
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py index 95af3f96d7..16c297b315 100644 --- a/bitbake/lib/bb/tests/compression.py +++ b/bitbake/lib/bb/tests/compression.py | |||
@@ -66,8 +66,8 @@ class CompressionTests(object): | |||
66 | 66 | ||
67 | class LZ4Tests(CompressionTests, unittest.TestCase): | 67 | class LZ4Tests(CompressionTests, unittest.TestCase): |
68 | def setUp(self): | 68 | def setUp(self): |
69 | if shutil.which("lz4c") is None: | 69 | if shutil.which("lz4") is None: |
70 | self.skipTest("'lz4c' not found") | 70 | self.skipTest("'lz4' not found") |
71 | super().setUp() | 71 | super().setUp() |
72 | 72 | ||
73 | @contextlib.contextmanager | 73 | @contextlib.contextmanager |
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py index cbc7c1ecd4..a895f6a58e 100644 --- a/bitbake/lib/bb/tests/data.py +++ b/bitbake/lib/bb/tests/data.py | |||
@@ -450,17 +450,64 @@ class TestFlags(unittest.TestCase): | |||
450 | self.d = bb.data.init() | 450 | self.d = bb.data.init() |
451 | self.d.setVar("foo", "value of foo") | 451 | self.d.setVar("foo", "value of foo") |
452 | self.d.setVarFlag("foo", "flag1", "value of flag1") | 452 | self.d.setVarFlag("foo", "flag1", "value of flag1") |
453 | self.d.setVarFlag("foo", "_defaultval_flag_flag1", "default of flag1") | ||
453 | self.d.setVarFlag("foo", "flag2", "value of flag2") | 454 | self.d.setVarFlag("foo", "flag2", "value of flag2") |
455 | self.d.setVarFlag("foo", "_defaultval_flag_flag2", "default of flag2") | ||
456 | self.d.setVarFlag("foo", "flag3", "value of flag3") | ||
457 | self.d.setVarFlag("foo", "_defaultval_flag_flagnovalue", "default of flagnovalue") | ||
454 | 458 | ||
455 | def test_setflag(self): | 459 | def test_setflag(self): |
456 | self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") | 460 | self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") |
457 | self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2") | 461 | self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2") |
462 | self.assertDictEqual( | ||
463 | self.d.getVarFlags("foo"), | ||
464 | { | ||
465 | "flag1": "value of flag1", | ||
466 | "flag2": "value of flag2", | ||
467 | "flag3": "value of flag3", | ||
468 | "flagnovalue": "default of flagnovalue", | ||
469 | } | ||
470 | ) | ||
471 | self.assertDictEqual( | ||
472 | self.d.getVarFlags("foo", internalflags=True), | ||
473 | { | ||
474 | "_content": "value of foo", | ||
475 | "flag1": "value of flag1", | ||
476 | "flag2": "value of flag2", | ||
477 | "flag3": "value of flag3", | ||
478 | "_defaultval_flag_flag1": "default of flag1", | ||
479 | "_defaultval_flag_flag2": "default of flag2", | ||
480 | "_defaultval_flag_flagnovalue": "default of flagnovalue", | ||
481 | } | ||
482 | ) | ||
458 | 483 | ||
459 | def test_delflag(self): | 484 | def test_delflag(self): |
460 | self.d.delVarFlag("foo", "flag2") | 485 | self.d.delVarFlag("foo", "flag2") |
486 | self.d.delVarFlag("foo", "flag3") | ||
461 | self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") | 487 | self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") |
462 | self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) | 488 | self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) |
463 | 489 | self.assertDictEqual( | |
490 | self.d.getVarFlags("foo"), | ||
491 | { | ||
492 | "flag1": "value of flag1", | ||
493 | "flagnovalue": "default of flagnovalue", | ||
494 | } | ||
495 | ) | ||
496 | self.assertDictEqual( | ||
497 | self.d.getVarFlags("foo", internalflags=True), | ||
498 | { | ||
499 | "_content": "value of foo", | ||
500 | "flag1": "value of flag1", | ||
501 | "_defaultval_flag_flag1": "default of flag1", | ||
502 | "_defaultval_flag_flagnovalue": "default of flagnovalue", | ||
503 | } | ||
504 | ) | ||
505 | |||
506 | def test_delvar(self): | ||
507 | self.d.delVar("foo") | ||
508 | self.assertEqual(self.d.getVarFlag("foo", "flag1", False), None) | ||
509 | self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) | ||
510 | self.assertEqual(self.d.getVarFlags("foo", internalflags=True), None) | ||
464 | 511 | ||
465 | class Contains(unittest.TestCase): | 512 | class Contains(unittest.TestCase): |
466 | def setUp(self): | 513 | def setUp(self): |
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php new file mode 100644 index 0000000000..e27ee134f2 --- /dev/null +++ b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php | |||
@@ -0,0 +1,3528 @@ | |||
1 | <?xml version="1.0" encoding="UTF-8"?> | ||
2 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" | ||
3 | "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> | ||
4 | <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> | ||
5 | <head> | ||
6 | <title>MiniUPnP download zone</title> | ||
7 | <link href="../css/miniupnp.css" rel="stylesheet" type="text/css"/> | ||
8 | <meta name="description" content="files download of the miniupnp project"/> | ||
9 | <meta name="keywords" content="upnp,download,openbsd,freebsd,linux,windows"/> | ||
10 | <meta name="viewport" content="width=device-width" /> | ||
11 | <link href="rss.php" title="MiniUPnPd, MiniUPnPc and MiniSSDPd Files" type="application/rss+xml" rel="alternate" /> | ||
12 | <link rel="canonical" href="http://miniupnp.free.fr/files/" /> | ||
13 | <link rel="alternate" hreflang="fr" href="/files/index_fr.php" /> | ||
14 | <script async="async" src="//pagead2.googlesyndication.com/pagead/js/adsbygoogle.js" type="text/javascript"></script> | ||
15 | <script type="text/javascript"> | ||
16 | (adsbygoogle = window.adsbygoogle || []).push({ | ||
17 | google_ad_client: "ca-pub-6883148866513192", | ||
18 | enable_page_level_ads: true | ||
19 | }); | ||
20 | </script> | ||
21 | </head> | ||
22 | <body> | ||
23 | <h2>MiniUPnP Project</h2> | ||
24 | |||
25 | <p align="center"> | ||
26 | <a href="../">Home</a> | | ||
27 | <b>Downloads</b> | | ||
28 | <a href="../devicelist.php">Compatibility list</a> | | ||
29 | <a href="../libnatpmp.html">libnatpmp</a> | | ||
30 | <a href="../minissdpd.html">MiniSSDPd</a> | | ||
31 | <a href="../xchat-upnp.html">xchat upnp patch</a> | | ||
32 | <a href="../search.html">Search</a> | | ||
33 | <a href="https://miniupnp.tuxfamily.org/forum/">Forum</a> | ||
34 | </p> | ||
35 | <p align="center"> | ||
36 | <b>English</b> | <a href="/files/index_fr.php">Français</a> | ||
37 | </p> | ||
38 | |||
39 | <div align="center"> | ||
40 | <script type="text/javascript"><!-- | ||
41 | google_ad_client = "pub-6883148866513192"; | ||
42 | /* 728x90, created 7/10/08 */ | ||
43 | google_ad_slot = "0774293141"; | ||
44 | google_ad_width = 728; | ||
45 | google_ad_height = 90; | ||
46 | //--> | ||
47 | </script> | ||
48 | <script type="text/javascript" | ||
49 | src="https://pagead2.googlesyndication.com/pagead/show_ads.js"> | ||
50 | </script> | ||
51 | </div> | ||
52 | |||
53 | <h2>MiniUPnP download zone</h2> | ||
54 | <p> | ||
55 | Find on this page the source of miniupnp and | ||
56 | some related files. You will also find precompiled binaries | ||
57 | of the UPnP client sample program for windows compiled using | ||
58 | <a href="https://mingw.osdn.io/">MinGW</a>. There are also Windows | ||
59 | binaries (including python module) automatically built using | ||
60 | <a href="https://ci.appveyor.com/project/miniupnp/miniupnp/build/artifacts">AppVeyor</a>. | ||
61 | </p> | ||
62 | <p>If you just need one of the software installed on your machine, | ||
63 | you probably don't need to download and compile the source files. | ||
64 | It is very likely that a package/port already exists for | ||
65 | your system/distribution. Refer to your system documentation | ||
66 | to find how to search and install a package/port. | ||
67 | Mac OS X does have port systems too : see | ||
68 | <a href="http://www.macports.org/">MacPorts</a> or | ||
69 | <a href="http://mxcl.github.com/homebrew/">Homebrew</a> or | ||
70 | <a href="http://www.finkproject.org/">Fink</a>. | ||
71 | </p> | ||
72 | <p> | ||
73 | The miniupnpc (client) sources have been successfully compiled | ||
74 | under Windows XP/vista/7/10/etc. (using | ||
75 | <a href="https://mingw.osdn.io/">MinGW</a>, | ||
76 | <a href="https://www.mingw-w64.org/">Mingw-w64</a> | ||
77 | or <a href="http://www.cygwin.com/">Cygwin</a>), | ||
78 | Linux, OpenBSD, FreeBSD, NetBSD, DragonFlyBSD, | ||
79 | Solaris, MacOS X and AmigaOS. <br/> | ||
80 | The Makefile of the client is made for GNU make : | ||
81 | check which version your system have | ||
82 | with the command "make --version". On some systems, such as OpenBSD, | ||
83 | you have to use "gmake". Under Windows with MinGW, GNU make is | ||
84 | called "mingw32-make" and a sligthly modified version of the Makefile | ||
85 | should be used : Makefile.mingw. Run "mingw32make.bat" to compile. <br/> | ||
86 | If you have any compatibility problem, please post on the | ||
87 | <a href="https://miniupnp.tuxfamily.org/forum/">forum</a> | ||
88 | or contact me by email. | ||
89 | </p> | ||
90 | <!-- | ||
91 | <p>A devoted user compiled miniupnp<strong>c</strong> for | ||
92 | Openwrt (currently Kamikaze 7.09) | ||
93 | and his work is available here : | ||
94 | <a href="http://replay.waybackmachine.org/20081120030628/http://www.myantihero.net/pub/openwrt/packages/">http://myantihero.net/pub/openwrt/packages/</a>.</p> | ||
95 | --> | ||
96 | <p>Get miniupnpc under AmigaOS 4 on | ||
97 | <a href="http://os4depot.net/index.php?function=showfile&file=network/misc/miniupnpc.lha">OS4Depot</a>. | ||
98 | </p> | ||
99 | <p> | ||
100 | Dario Meloni has made a Ruby Gem embedding miniupnpc : | ||
101 | <a href="https://rubygems.org/gems/mupnp">https://rubygems.org/gems/mupnp</a>. | ||
102 | </p> | ||
103 | <p> | ||
104 | The python module is available on pypi.org : | ||
105 | <a href="https://pypi.org/project/miniupnpc/">pip install miniupnpc</a>. | ||
106 | </p> | ||
107 | <p> | ||
108 | The daemon (starting in November 2006) compiles with BSD make under BSD | ||
109 | and Solaris.<br/> | ||
110 | To compile the daemon under linux, use "make -f Makefile.linux"<br/> | ||
111 | To compile for <a href="http://openwrt.org/">OpenWRT</a> | ||
112 | please read the README.openwrt file, or use the packages | ||
113 | <a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpc</a> and | ||
114 | <a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpd</a>. | ||
115 | <!-- The | ||
116 | <a href="http://www.x-wrt.org/">X-Wrt</a> project is providing | ||
117 | precompiled ipkg packages for OpenWrt for both OpenWrt | ||
118 | <a href="ftp://ftp.berlios.de/pub/xwrt/packages/">White Russian</a> | ||
119 | and OpenWrt | ||
120 | <a href="ftp://ftp.berlios.de/pub/xwrt/kamikaze/packages">kamikaze</a>. | ||
121 | Check | ||
122 | <a href="ftp://ftp.berlios.de/pub/xwrt/">ftp://ftp.berlios.de/pub/xwrt/</a>. | ||
123 | For White Russian, take a look at | ||
124 | <a href="http://jackassofalltrades.com/openwrt/">this</a>. --> | ||
125 | <br/> | ||
126 | <a href="http://pfsense.com">pfSense</a> users are advised to use the | ||
127 | miniupnpd port available for their system. Recent versions of | ||
128 | pfSense include MiniUPnPd in the base system. | ||
129 | <br/> | ||
130 | For <a href="http://en.wikipedia.org/wiki/WRT54G">Linksys WRT54G</a> | ||
131 | and WRT54GL owners, | ||
132 | <a href="http://sourceforge.net/projects/tarifa/">Tarifa firmware</a> | ||
133 | is another alternative to get miniUPnPd running on the router. | ||
134 | </p> | ||
135 | <p> | ||
136 | Please read README and | ||
137 | LICENCE files included with the distribution for further informations. | ||
138 | </p> | ||
139 | <p> | ||
140 | The MiniUPnP daemon (miniupnpd) is working under | ||
141 | <a href="http://www.openbsd.org/">OpenBSD</a>, | ||
142 | <a href="http://www.netbsd.org/">NetBSD</a>, | ||
143 | <a href="http://www.freebsd.org/">FreeBSD</a>, | ||
144 | <a href="http://www.dragonflybsd.org/">DragonFlyBSD</a>, | ||
145 | <a href="http://www.apple.com/macosx/">Mac OS X</a> and | ||
146 | (<a href="https://en.wikipedia.org/wiki/OpenSolaris">Open</a>)<a href="http://www.oracle.com/us/products/servers-storage/solaris/solaris11/overview/index.html">Solaris</a> | ||
147 | with <a href="http://www.openbsd.org/faq/pf/">pf</a>, | ||
148 | with <a href="https://en.wikipedia.org/wiki/IPFilter">IP Filter</a> or | ||
149 | with <a href="http://en.wikipedia.org/wiki/Ipfirewall">ipfw</a>. | ||
150 | The linux version uses either libiptc which permits to access | ||
151 | <a href="http://netfilter.org/">netfilter</a> | ||
152 | rules inside the kernel the same way as | ||
153 | <a href="https://www.netfilter.org/projects/iptables/index.html">iptables</a>, or | ||
154 | <a href="https://www.netfilter.org/projects/libnftnl/index.html">libnftnl</a> | ||
155 | which is the equivalent for | ||
156 | <a href="https://www.netfilter.org/projects/nftables/index.html">nftables</a>. | ||
157 | </p> | ||
158 | |||
159 | <p>Releases are now GPG signed with the key <a href="../A31ACAAF.asc">A31ACAAF</a>. | ||
160 | Previous signing key was <a href="../A5C0863C.asc">A5C0863C</a>. | ||
161 | Get it from your favorite | ||
162 | <a href="https://pgp.mit.edu/pks/lookup?search=0xA31ACAAF&op=index&fingerprint=on">key server</a>.</p> | ||
163 | |||
164 | <h4>REST API</h4> | ||
165 | <p>You can use the REST API to get the latest releases available:</p> | ||
166 | <ul> | ||
167 | <li><a href="rest.php/tags/miniupnpd?count=1">rest.php/tags/miniupnpd?count=1</a>: latest miniupnpd.</li> | ||
168 | <li><a href="rest.php/tags?count=1">rest.php/tags?count=1</a>: miniupnpc, miniupnpd and minissdpd.</li> | ||
169 | </ul> | ||
170 | |||
171 | <h4>You can help !</h4> | ||
172 | <p>If you make a package/port for your favorite OS distribution, | ||
173 | inform me so I can upload the package here or add a link to your | ||
174 | repository. | ||
175 | </p> | ||
176 | |||
177 | <h4>Latest files</h4> | ||
178 | <table> | ||
179 | <tr><th>name</th> | ||
180 | <th>size</th> | ||
181 | <th>date</th> | ||
182 | <th>comment</th> | ||
183 | <th><!-- Changelog --></th> | ||
184 | <th><!-- Signature --></th> | ||
185 | </tr> | ||
186 | <tr> | ||
187 | <td class="filename"><a href='miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td> | ||
188 | <td class="filesize">140137</td> | ||
189 | <td class="filedate">05/03/2025 10:31</td> | ||
190 | <td class="comment">MiniUPnP client release source code</td> | ||
191 | <td><a href="changelog.php?file=miniupnpc-2.3.2.tar.gz">changelog</a></td> | ||
192 | <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td> | ||
193 | </tr> | ||
194 | <tr> | ||
195 | <td class="filename"><a href='miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td> | ||
196 | <td class="filesize">265329</td> | ||
197 | <td class="filedate">22/06/2024 22:31</td> | ||
198 | <td class="comment">MiniUPnP daemon release source code</td> | ||
199 | <td><a href="changelog.php?file=miniupnpd-2.3.7.tar.gz">changelog</a></td> | ||
200 | <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td> | ||
201 | </tr> | ||
202 | <tr> | ||
203 | <td class="filename"><a href='libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td> | ||
204 | <td class="filesize">26506</td> | ||
205 | <td class="filedate">23/04/2023 11:02</td> | ||
206 | <td class="comment">latest libnatpmp source code</td> | ||
207 | <td><a href="changelog.php?file=libnatpmp-20230423.tar.gz">changelog</a></td> | ||
208 | <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td> | ||
209 | </tr> | ||
210 | <tr> | ||
211 | <td class="filename"><a href='minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td> | ||
212 | <td class="filesize">39077</td> | ||
213 | <td class="filedate">22/10/2022 18:41</td> | ||
214 | <td class="comment">MiniSSDPd release source code</td> | ||
215 | <td><a href="changelog.php?file=minissdpd-1.6.0.tar.gz">changelog</a></td> | ||
216 | <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td> | ||
217 | </tr> | ||
218 | <tr> | ||
219 | <td class="filename"><a href='upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td> | ||
220 | <td class="filesize">69503</td> | ||
221 | <td class="filedate">15/05/2022 14:31</td> | ||
222 | <td class="comment">Windows executable</td> | ||
223 | <td><a href="changelog.php?file=upnpc-exe-win32-20220515.zip">changelog</a></td> | ||
224 | <td></td> | ||
225 | </tr> | ||
226 | <tr> | ||
227 | <td class="filename"><a href='minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td> | ||
228 | <td class="filesize">38870</td> | ||
229 | <td class="filedate">04/11/2021 23:34</td> | ||
230 | <td class="comment">latest MiniSSDPd source code</td> | ||
231 | <td><a href="changelog.php?file=minissdpd-1.5.20211105.tar.gz">changelog</a></td> | ||
232 | <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td> | ||
233 | </tr> | ||
234 | <tr> | ||
235 | <td class="filename"><a href='miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td> | ||
236 | <td class="filesize">97682</td> | ||
237 | <td class="filedate">15/10/2020 22:31</td> | ||
238 | <td class="comment">latest MiniUPnP client source code</td> | ||
239 | <td><a href="changelog.php?file=miniupnpc-2.1.20201016.tar.gz">changelog</a></td> | ||
240 | <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td> | ||
241 | </tr> | ||
242 | <tr> | ||
243 | <td class="filename"><a href='miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td> | ||
244 | <td class="filesize">245426</td> | ||
245 | <td class="filedate">10/05/2020 18:23</td> | ||
246 | <td class="comment">latest MiniUPnP daemon source code</td> | ||
247 | <td><a href="changelog.php?file=miniupnpd-2.1.20200510.tar.gz">changelog</a></td> | ||
248 | <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td> | ||
249 | </tr> | ||
250 | <tr> | ||
251 | <td class="filename"><a href='xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td> | ||
252 | <td class="filesize">10329</td> | ||
253 | <td class="filedate">11/08/2011 15:18</td> | ||
254 | <td class="comment">Patch to add UPnP capabilities to xchat</td> | ||
255 | <td><a href="changelog.php?file=xchat-upnp20110811.patch">changelog</a></td> | ||
256 | <td></td> | ||
257 | </tr> | ||
258 | <tr> | ||
259 | <td class="filename"><a href='minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td> | ||
260 | <td class="filesize">7598</td> | ||
261 | <td class="filedate">25/07/2011 14:57</td> | ||
262 | <td class="comment">Patch for MiniDLNA to use miniSSDPD</td> | ||
263 | <td><a href="changelog.php?file=minidlna_1.0.21.minissdp1.patch">changelog</a></td> | ||
264 | <td></td> | ||
265 | </tr> | ||
266 | <tr> | ||
267 | <td class="filename"><a href='miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td> | ||
268 | <td class="filesize">14840</td> | ||
269 | <td class="filedate">04/11/2006 18:16</td> | ||
270 | <td class="comment">João Paulo Barraca version of the upnp client</td> | ||
271 | <td><a href="changelog.php?file=miniupnpc-new20060630.tar.gz">changelog</a></td> | ||
272 | <td></td> | ||
273 | </tr> | ||
274 | </table> | ||
275 | |||
276 | <h4>All files</h4> | ||
277 | <table> | ||
278 | <tr><th>name</th> | ||
279 | <th>size</th> | ||
280 | <th>date</th> | ||
281 | <th>comment</th> | ||
282 | <th><!-- signature --></th> | ||
283 | </tr> | ||
284 | <tr> | ||
285 | <td class="filename"><a href='download.php?file=miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td> | ||
286 | <td class="filesize">140137</td> | ||
287 | <td class="filedate">05/03/2025 10:31:36 +0000</td> | ||
288 | <td class="comment">MiniUPnP client release source code</td> | ||
289 | <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td> | ||
290 | </tr> | ||
291 | <tr> | ||
292 | <td class="filename"><a href='download.php?file=miniupnpc-2.3.1.tar.gz'>miniupnpc-2.3.1.tar.gz</a></td> | ||
293 | <td class="filesize">139499</td> | ||
294 | <td class="filedate">23/02/2025 16:44:16 +0000</td> | ||
295 | <td class="comment">MiniUPnP client release source code</td> | ||
296 | <td><a href="miniupnpc-2.3.1.tar.gz.sig">Signature</a></td> | ||
297 | </tr> | ||
298 | <tr> | ||
299 | <td class="filename"><a href='download.php?file=miniupnpc-2.3.0.tar.gz'>miniupnpc-2.3.0.tar.gz</a></td> | ||
300 | <td class="filesize">105071</td> | ||
301 | <td class="filedate">10/01/2025 23:16:45 +0000</td> | ||
302 | <td class="comment">MiniUPnP client release source code</td> | ||
303 | <td><a href="miniupnpc-2.3.0.tar.gz.sig">Signature</a></td> | ||
304 | </tr> | ||
305 | <tr> | ||
306 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td> | ||
307 | <td class="filesize">265329</td> | ||
308 | <td class="filedate">22/06/2024 22:31:38 +0000</td> | ||
309 | <td class="comment">MiniUPnP daemon release source code</td> | ||
310 | <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td> | ||
311 | </tr> | ||
312 | <tr> | ||
313 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.8.tar.gz'>miniupnpc-2.2.8.tar.gz</a></td> | ||
314 | <td class="filesize">104603</td> | ||
315 | <td class="filedate">08/06/2024 22:13:39 +0000</td> | ||
316 | <td class="comment">MiniUPnP client release source code</td> | ||
317 | <td><a href="miniupnpc-2.2.8.tar.gz.sig">Signature</a></td> | ||
318 | </tr> | ||
319 | <tr> | ||
320 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.6.tar.gz'>miniupnpd-2.3.6.tar.gz</a></td> | ||
321 | <td class="filesize">263018</td> | ||
322 | <td class="filedate">19/03/2024 23:39:51 +0000</td> | ||
323 | <td class="comment">MiniUPnP daemon release source code</td> | ||
324 | <td><a href="miniupnpd-2.3.6.tar.gz.sig">Signature</a></td> | ||
325 | </tr> | ||
326 | <tr> | ||
327 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.7.tar.gz'>miniupnpc-2.2.7.tar.gz</a></td> | ||
328 | <td class="filesize">104258</td> | ||
329 | <td class="filedate">19/03/2024 23:25:18 +0000</td> | ||
330 | <td class="comment">MiniUPnP client release source code</td> | ||
331 | <td><a href="miniupnpc-2.2.7.tar.gz.sig">Signature</a></td> | ||
332 | </tr> | ||
333 | <tr> | ||
334 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.5.tar.gz'>miniupnpd-2.3.5.tar.gz</a></td> | ||
335 | <td class="filesize">261952</td> | ||
336 | <td class="filedate">02/03/2024 11:04:07 +0000</td> | ||
337 | <td class="comment">MiniUPnP daemon release source code</td> | ||
338 | <td><a href="miniupnpd-2.3.5.tar.gz.sig">Signature</a></td> | ||
339 | </tr> | ||
340 | <tr> | ||
341 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.4.tar.gz'>miniupnpd-2.3.4.tar.gz</a></td> | ||
342 | <td class="filesize">260810</td> | ||
343 | <td class="filedate">04/01/2024 00:53:17 +0000</td> | ||
344 | <td class="comment">MiniUPnP daemon release source code</td> | ||
345 | <td><a href="miniupnpd-2.3.4.tar.gz.sig">Signature</a></td> | ||
346 | </tr> | ||
347 | <tr> | ||
348 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.6.tar.gz'>miniupnpc-2.2.6.tar.gz</a></td> | ||
349 | <td class="filesize">103949</td> | ||
350 | <td class="filedate">04/01/2024 00:27:14 +0000</td> | ||
351 | <td class="comment">MiniUPnP client release source code</td> | ||
352 | <td><a href="miniupnpc-2.2.6.tar.gz.sig">Signature</a></td> | ||
353 | </tr> | ||
354 | <tr> | ||
355 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.5.tar.gz'>miniupnpc-2.2.5.tar.gz</a></td> | ||
356 | <td class="filesize">103654</td> | ||
357 | <td class="filedate">11/06/2023 23:14:56 +0000</td> | ||
358 | <td class="comment">MiniUPnP client release source code</td> | ||
359 | <td><a href="miniupnpc-2.2.5.tar.gz.sig">Signature</a></td> | ||
360 | </tr> | ||
361 | <tr> | ||
362 | <td class="filename"><a href='download.php?file=libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td> | ||
363 | <td class="filesize">26506</td> | ||
364 | <td class="filedate">23/04/2023 11:02:09 +0000</td> | ||
365 | <td class="comment">libnatpmp source code</td> | ||
366 | <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td> | ||
367 | </tr> | ||
368 | <tr> | ||
369 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.3.tar.gz'>miniupnpd-2.3.3.tar.gz</a></td> | ||
370 | <td class="filesize">260079</td> | ||
371 | <td class="filedate">17/02/2023 03:07:46 +0000</td> | ||
372 | <td class="comment">MiniUPnP daemon release source code</td> | ||
373 | <td><a href="miniupnpd-2.3.3.tar.gz.sig">Signature</a></td> | ||
374 | </tr> | ||
375 | <tr> | ||
376 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.2.tar.gz'>miniupnpd-2.3.2.tar.gz</a></td> | ||
377 | <td class="filesize">259686</td> | ||
378 | <td class="filedate">19/01/2023 23:18:08 +0000</td> | ||
379 | <td class="comment">MiniUPnP daemon release source code</td> | ||
380 | <td><a href="miniupnpd-2.3.2.tar.gz.sig">Signature</a></td> | ||
381 | </tr> | ||
382 | <tr> | ||
383 | <td class="filename"><a href='download.php?file=minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td> | ||
384 | <td class="filesize">39077</td> | ||
385 | <td class="filedate">22/10/2022 18:41:54 +0000</td> | ||
386 | <td class="comment">MiniSSDPd release source code</td> | ||
387 | <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td> | ||
388 | </tr> | ||
389 | <tr> | ||
390 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.4.tar.gz'>miniupnpc-2.2.4.tar.gz</a></td> | ||
391 | <td class="filesize">102932</td> | ||
392 | <td class="filedate">21/10/2022 21:01:01 +0000</td> | ||
393 | <td class="comment">MiniUPnP client release source code</td> | ||
394 | <td><a href="miniupnpc-2.2.4.tar.gz.sig">Signature</a></td> | ||
395 | </tr> | ||
396 | <tr> | ||
397 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.1.tar.gz'>miniupnpd-2.3.1.tar.gz</a></td> | ||
398 | <td class="filesize">258050</td> | ||
399 | <td class="filedate">16/10/2022 05:58:44 +0000</td> | ||
400 | <td class="comment">MiniUPnP daemon release source code</td> | ||
401 | <td><a href="miniupnpd-2.3.1.tar.gz.sig">Signature</a></td> | ||
402 | </tr> | ||
403 | <tr> | ||
404 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td> | ||
405 | <td class="filesize">69503</td> | ||
406 | <td class="filedate">15/05/2022 14:31:25 +0000</td> | ||
407 | <td class="comment">Windows executable</td> | ||
408 | <td></td> | ||
409 | </tr> | ||
410 | <tr> | ||
411 | <td class="filename"><a href='download.php?file=hexchat-2.16.patch'>hexchat-2.16.patch</a></td> | ||
412 | <td class="filesize">8147</td> | ||
413 | <td class="filedate">19/03/2022 16:52:05 +0000</td> | ||
414 | <td class="comment"></td> | ||
415 | <td></td> | ||
416 | </tr> | ||
417 | <tr> | ||
418 | <td class="filename"><a href='download.php?file=miniupnpd-2.3.0.tar.gz'>miniupnpd-2.3.0.tar.gz</a></td> | ||
419 | <td class="filesize">256069</td> | ||
420 | <td class="filedate">23/01/2022 00:23:32 +0000</td> | ||
421 | <td class="comment">MiniUPnP daemon release source code</td> | ||
422 | <td><a href="miniupnpd-2.3.0.tar.gz.sig">Signature</a></td> | ||
423 | </tr> | ||
424 | <tr> | ||
425 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td> | ||
426 | <td class="filesize">38870</td> | ||
427 | <td class="filedate">04/11/2021 23:34:49 +0000</td> | ||
428 | <td class="comment">MiniSSDPd source code</td> | ||
429 | <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td> | ||
430 | </tr> | ||
431 | <tr> | ||
432 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.3.tar.gz'>miniupnpc-2.2.3.tar.gz</a></td> | ||
433 | <td class="filesize">101360</td> | ||
434 | <td class="filedate">28/09/2021 21:43:32 +0000</td> | ||
435 | <td class="comment">MiniUPnP client release source code</td> | ||
436 | <td><a href="miniupnpc-2.2.3.tar.gz.sig">Signature</a></td> | ||
437 | </tr> | ||
438 | <tr> | ||
439 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.3.tar.gz'>miniupnpd-2.2.3.tar.gz</a></td> | ||
440 | <td class="filesize">254752</td> | ||
441 | <td class="filedate">21/08/2021 08:35:13 +0000</td> | ||
442 | <td class="comment">MiniUPnP daemon release source code</td> | ||
443 | <td><a href="miniupnpd-2.2.3.tar.gz.sig">Signature</a></td> | ||
444 | </tr> | ||
445 | <tr> | ||
446 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.2.tar.gz'>miniupnpd-2.2.2.tar.gz</a></td> | ||
447 | <td class="filesize">250649</td> | ||
448 | <td class="filedate">13/05/2021 11:30:11 +0000</td> | ||
449 | <td class="comment">MiniUPnP daemon release source code</td> | ||
450 | <td><a href="miniupnpd-2.2.2.tar.gz.sig">Signature</a></td> | ||
451 | </tr> | ||
452 | <tr> | ||
453 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.2.tar.gz'>miniupnpc-2.2.2.tar.gz</a></td> | ||
454 | <td class="filesize">100008</td> | ||
455 | <td class="filedate">02/03/2021 23:44:52 +0000</td> | ||
456 | <td class="comment">MiniUPnP client release source code</td> | ||
457 | <td><a href="miniupnpc-2.2.2.tar.gz.sig">Signature</a></td> | ||
458 | </tr> | ||
459 | <tr> | ||
460 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.1.tar.gz'>miniupnpd-2.2.1.tar.gz</a></td> | ||
461 | <td class="filesize">250023</td> | ||
462 | <td class="filedate">20/12/2020 18:08:08 +0000</td> | ||
463 | <td class="comment">MiniUPnP daemon release source code</td> | ||
464 | <td><a href="miniupnpd-2.2.1.tar.gz.sig">Signature</a></td> | ||
465 | </tr> | ||
466 | <tr> | ||
467 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.1.tar.gz'>miniupnpc-2.2.1.tar.gz</a></td> | ||
468 | <td class="filesize">99595</td> | ||
469 | <td class="filedate">20/12/2020 18:08:02 +0000</td> | ||
470 | <td class="comment">MiniUPnP client release source code</td> | ||
471 | <td><a href="miniupnpc-2.2.1.tar.gz.sig">Signature</a></td> | ||
472 | </tr> | ||
473 | <tr> | ||
474 | <td class="filename"><a href='download.php?file=miniupnpc-2.2.0.tar.gz'>miniupnpc-2.2.0.tar.gz</a></td> | ||
475 | <td class="filesize">98348</td> | ||
476 | <td class="filedate">09/11/2020 19:51:24 +0000</td> | ||
477 | <td class="comment">MiniUPnP client release source code</td> | ||
478 | <td><a href="miniupnpc-2.2.0.tar.gz.sig">Signature</a></td> | ||
479 | </tr> | ||
480 | <tr> | ||
481 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.0.tar.gz'>miniupnpd-2.2.0.tar.gz</a></td> | ||
482 | <td class="filesize">249858</td> | ||
483 | <td class="filedate">31/10/2020 09:20:59 +0000</td> | ||
484 | <td class="comment">MiniUPnP daemon release source code</td> | ||
485 | <td><a href="miniupnpd-2.2.0.tar.gz.sig">Signature</a></td> | ||
486 | </tr> | ||
487 | <tr> | ||
488 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC3.tar.gz'>miniupnpd-2.2.0-RC3.tar.gz</a></td> | ||
489 | <td class="filesize">249879</td> | ||
490 | <td class="filedate">30/10/2020 21:49:49 +0000</td> | ||
491 | <td class="comment">MiniUPnP daemon release source code</td> | ||
492 | <td><a href="miniupnpd-2.2.0-RC3.tar.gz.sig">Signature</a></td> | ||
493 | </tr> | ||
494 | <tr> | ||
495 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td> | ||
496 | <td class="filesize">97682</td> | ||
497 | <td class="filedate">15/10/2020 22:31:09 +0000</td> | ||
498 | <td class="comment">MiniUPnP client source code</td> | ||
499 | <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td> | ||
500 | </tr> | ||
501 | <tr> | ||
502 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC2.tar.gz'>miniupnpd-2.2.0-RC2.tar.gz</a></td> | ||
503 | <td class="filesize">248756</td> | ||
504 | <td class="filedate">28/09/2020 21:57:22 +0000</td> | ||
505 | <td class="comment">MiniUPnP daemon release source code</td> | ||
506 | <td><a href="miniupnpd-2.2.0-RC2.tar.gz.sig">Signature</a></td> | ||
507 | </tr> | ||
508 | <tr> | ||
509 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20200928.tar.gz'>miniupnpc-2.1.20200928.tar.gz</a></td> | ||
510 | <td class="filesize">96508</td> | ||
511 | <td class="filedate">28/09/2020 21:56:09 +0000</td> | ||
512 | <td class="comment">MiniUPnP client source code</td> | ||
513 | <td><a href="miniupnpc-2.1.20200928.tar.gz.sig">Signature</a></td> | ||
514 | </tr> | ||
515 | <tr> | ||
516 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20200928.tar.gz'>minissdpd-1.5.20200928.tar.gz</a></td> | ||
517 | <td class="filesize">37860</td> | ||
518 | <td class="filedate">28/09/2020 21:55:40 +0000</td> | ||
519 | <td class="comment">MiniSSDPd source code</td> | ||
520 | <td><a href="minissdpd-1.5.20200928.tar.gz.sig">Signature</a></td> | ||
521 | </tr> | ||
522 | <tr> | ||
523 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC1.tar.gz'>miniupnpd-2.2.0-RC1.tar.gz</a></td> | ||
524 | <td class="filesize">247772</td> | ||
525 | <td class="filedate">06/06/2020 18:34:50 +0000</td> | ||
526 | <td class="comment">MiniUPnP daemon release source code</td> | ||
527 | <td><a href="miniupnpd-2.2.0-RC1.tar.gz.sig">Signature</a></td> | ||
528 | </tr> | ||
529 | <tr> | ||
530 | <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC0.tar.gz'>miniupnpd-2.2.0-RC0.tar.gz</a></td> | ||
531 | <td class="filesize">245507</td> | ||
532 | <td class="filedate">16/05/2020 18:03:17 +0000</td> | ||
533 | <td class="comment">MiniUPnP daemon release source code</td> | ||
534 | <td><a href="miniupnpd-2.2.0-RC0.tar.gz.sig">Signature</a></td> | ||
535 | </tr> | ||
536 | <tr> | ||
537 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td> | ||
538 | <td class="filesize">245426</td> | ||
539 | <td class="filedate">10/05/2020 18:23:13 +0000</td> | ||
540 | <td class="comment">MiniUPnP daemon source code</td> | ||
541 | <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td> | ||
542 | </tr> | ||
543 | <tr> | ||
544 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200329.tar.gz'>miniupnpd-2.1.20200329.tar.gz</a></td> | ||
545 | <td class="filesize">243725</td> | ||
546 | <td class="filedate">29/03/2020 09:11:02 +0000</td> | ||
547 | <td class="comment">MiniUPnP daemon source code</td> | ||
548 | <td><a href="miniupnpd-2.1.20200329.tar.gz.sig">Signature</a></td> | ||
549 | </tr> | ||
550 | <tr> | ||
551 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20191224.tar.gz'>miniupnpc-2.1.20191224.tar.gz</a></td> | ||
552 | <td class="filesize">94740</td> | ||
553 | <td class="filedate">23/12/2019 23:37:32 +0000</td> | ||
554 | <td class="comment">MiniUPnP client source code</td> | ||
555 | <td><a href="miniupnpc-2.1.20191224.tar.gz.sig">Signature</a></td> | ||
556 | </tr> | ||
557 | <tr> | ||
558 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191006.tar.gz'>miniupnpd-2.1.20191006.tar.gz</a></td> | ||
559 | <td class="filesize">243255</td> | ||
560 | <td class="filedate">06/10/2019 21:02:31 +0000</td> | ||
561 | <td class="comment">MiniUPnP daemon source code</td> | ||
562 | <td><a href="miniupnpd-2.1.20191006.tar.gz.sig">Signature</a></td> | ||
563 | </tr> | ||
564 | <tr> | ||
565 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191005.tar.gz'>miniupnpd-2.1.20191005.tar.gz</a></td> | ||
566 | <td class="filesize">244100</td> | ||
567 | <td class="filedate">05/10/2019 21:33:08 +0000</td> | ||
568 | <td class="comment">MiniUPnP daemon source code</td> | ||
569 | <td><a href="miniupnpd-2.1.20191005.tar.gz.sig">Signature</a></td> | ||
570 | </tr> | ||
571 | <tr> | ||
572 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191003.tar.gz'>miniupnpd-2.1.20191003.tar.gz</a></td> | ||
573 | <td class="filesize">243287</td> | ||
574 | <td class="filedate">02/10/2019 22:23:51 +0000</td> | ||
575 | <td class="comment">MiniUPnP daemon source code</td> | ||
576 | <td><a href="miniupnpd-2.1.20191003.tar.gz.sig">Signature</a></td> | ||
577 | </tr> | ||
578 | <tr> | ||
579 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190924.tar.gz'>miniupnpd-2.1.20190924.tar.gz</a></td> | ||
580 | <td class="filesize">241008</td> | ||
581 | <td class="filedate">24/09/2019 11:58:15 +0000</td> | ||
582 | <td class="comment">MiniUPnP daemon source code</td> | ||
583 | <td><a href="miniupnpd-2.1.20190924.tar.gz.sig">Signature</a></td> | ||
584 | </tr> | ||
585 | <tr> | ||
586 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190902.tar.gz'>miniupnpd-2.1.20190902.tar.gz</a></td> | ||
587 | <td class="filesize">240742</td> | ||
588 | <td class="filedate">01/09/2019 23:03:03 +0000</td> | ||
589 | <td class="comment">MiniUPnP daemon source code</td> | ||
590 | <td><a href="miniupnpd-2.1.20190902.tar.gz.sig">Signature</a></td> | ||
591 | </tr> | ||
592 | <tr> | ||
593 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190824.tar.gz'>miniupnpd-2.1.20190824.tar.gz</a></td> | ||
594 | <td class="filesize">240490</td> | ||
595 | <td class="filedate">24/08/2019 09:21:52 +0000</td> | ||
596 | <td class="comment">MiniUPnP daemon source code</td> | ||
597 | <td><a href="miniupnpd-2.1.20190824.tar.gz.sig">Signature</a></td> | ||
598 | </tr> | ||
599 | <tr> | ||
600 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20190824.tar.gz'>minissdpd-1.5.20190824.tar.gz</a></td> | ||
601 | <td class="filesize">37300</td> | ||
602 | <td class="filedate">24/08/2019 09:17:32 +0000</td> | ||
603 | <td class="comment">MiniSSDPd source code</td> | ||
604 | <td><a href="minissdpd-1.5.20190824.tar.gz.sig">Signature</a></td> | ||
605 | </tr> | ||
606 | <tr> | ||
607 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190824.tar.gz'>miniupnpc-2.1.20190824.tar.gz</a></td> | ||
608 | <td class="filesize">94564</td> | ||
609 | <td class="filedate">24/08/2019 09:12:50 +0000</td> | ||
610 | <td class="comment">MiniUPnP client source code</td> | ||
611 | <td><a href="miniupnpc-2.1.20190824.tar.gz.sig">Signature</a></td> | ||
612 | </tr> | ||
613 | <tr> | ||
614 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190630.tar.gz'>miniupnpd-2.1.20190630.tar.gz</a></td> | ||
615 | <td class="filesize">240466</td> | ||
616 | <td class="filedate">30/06/2019 20:27:38 +0000</td> | ||
617 | <td class="comment">MiniUPnP daemon source code</td> | ||
618 | <td><a href="miniupnpd-2.1.20190630.tar.gz.sig">Signature</a></td> | ||
619 | </tr> | ||
620 | <tr> | ||
621 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190625.tar.gz'>miniupnpd-2.1.20190625.tar.gz</a></td> | ||
622 | <td class="filesize">240120</td> | ||
623 | <td class="filedate">25/06/2019 21:33:49 +0000</td> | ||
624 | <td class="comment">MiniUPnP daemon source code</td> | ||
625 | <td><a href="miniupnpd-2.1.20190625.tar.gz.sig">Signature</a></td> | ||
626 | </tr> | ||
627 | <tr> | ||
628 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190625.tar.gz'>miniupnpc-2.1.20190625.tar.gz</a></td> | ||
629 | <td class="filesize">94461</td> | ||
630 | <td class="filedate">25/06/2019 21:33:26 +0000</td> | ||
631 | <td class="comment">MiniUPnP client source code</td> | ||
632 | <td><a href="miniupnpc-2.1.20190625.tar.gz.sig">Signature</a></td> | ||
633 | </tr> | ||
634 | <tr> | ||
635 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190502.tar.gz'>miniupnpd-2.1.20190502.tar.gz</a></td> | ||
636 | <td class="filesize">236052</td> | ||
637 | <td class="filedate">02/05/2019 17:22:23 +0000</td> | ||
638 | <td class="comment">MiniUPnP daemon source code</td> | ||
639 | <td><a href="miniupnpd-2.1.20190502.tar.gz.sig">Signature</a></td> | ||
640 | </tr> | ||
641 | <tr> | ||
642 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190408.tar.gz'>miniupnpc-2.1.20190408.tar.gz</a></td> | ||
643 | <td class="filesize">94216</td> | ||
644 | <td class="filedate">08/04/2019 12:50:21 +0000</td> | ||
645 | <td class="comment">MiniUPnP client source code</td> | ||
646 | <td><a href="miniupnpc-2.1.20190408.tar.gz.sig">Signature</a></td> | ||
647 | </tr> | ||
648 | <tr> | ||
649 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190408.tar.gz'>miniupnpd-2.1.20190408.tar.gz</a></td> | ||
650 | <td class="filesize">235989</td> | ||
651 | <td class="filedate">08/04/2019 12:50:01 +0000</td> | ||
652 | <td class="comment">MiniUPnP daemon source code</td> | ||
653 | <td><a href="miniupnpd-2.1.20190408.tar.gz.sig">Signature</a></td> | ||
654 | </tr> | ||
655 | <tr> | ||
656 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190403.tar.gz'>miniupnpc-2.1.20190403.tar.gz</a></td> | ||
657 | <td class="filesize">94204</td> | ||
658 | <td class="filedate">03/04/2019 15:41:36 +0000</td> | ||
659 | <td class="comment">MiniUPnP client source code</td> | ||
660 | <td><a href="miniupnpc-2.1.20190403.tar.gz.sig">Signature</a></td> | ||
661 | </tr> | ||
662 | <tr> | ||
663 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190403.tar.gz'>miniupnpd-2.1.20190403.tar.gz</a></td> | ||
664 | <td class="filesize">235909</td> | ||
665 | <td class="filedate">03/04/2019 15:41:17 +0000</td> | ||
666 | <td class="comment">MiniUPnP daemon source code</td> | ||
667 | <td><a href="miniupnpd-2.1.20190403.tar.gz.sig">Signature</a></td> | ||
668 | </tr> | ||
669 | <tr> | ||
670 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20190210.tar.gz'>minissdpd-1.5.20190210.tar.gz</a></td> | ||
671 | <td class="filesize">37227</td> | ||
672 | <td class="filedate">10/02/2019 15:21:49 +0000</td> | ||
673 | <td class="comment">MiniSSDPd source code</td> | ||
674 | <td><a href="minissdpd-1.5.20190210.tar.gz.sig">Signature</a></td> | ||
675 | </tr> | ||
676 | <tr> | ||
677 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190210.tar.gz'>miniupnpc-2.1.20190210.tar.gz</a></td> | ||
678 | <td class="filesize">94125</td> | ||
679 | <td class="filedate">10/02/2019 12:46:09 +0000</td> | ||
680 | <td class="comment">MiniUPnP client source code</td> | ||
681 | <td><a href="miniupnpc-2.1.20190210.tar.gz.sig">Signature</a></td> | ||
682 | </tr> | ||
683 | <tr> | ||
684 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190210.tar.gz'>miniupnpd-2.1.20190210.tar.gz</a></td> | ||
685 | <td class="filesize">235093</td> | ||
686 | <td class="filedate">10/02/2019 11:20:11 +0000</td> | ||
687 | <td class="comment">MiniUPnP daemon source code</td> | ||
688 | <td><a href="miniupnpd-2.1.20190210.tar.gz.sig">Signature</a></td> | ||
689 | </tr> | ||
690 | <tr> | ||
691 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.20180706.tar.gz'>miniupnpd-2.1.20180706.tar.gz</a></td> | ||
692 | <td class="filesize">233675</td> | ||
693 | <td class="filedate">06/07/2018 12:44:24 +0000</td> | ||
694 | <td class="comment">MiniUPnP daemon source code</td> | ||
695 | <td><a href="miniupnpd-2.1.20180706.tar.gz.sig">Signature</a></td> | ||
696 | </tr> | ||
697 | <tr> | ||
698 | <td class="filename"><a href='download.php?file=miniupnpd-2.1.tar.gz'>miniupnpd-2.1.tar.gz</a></td> | ||
699 | <td class="filesize">225458</td> | ||
700 | <td class="filedate">08/05/2018 21:50:32 +0000</td> | ||
701 | <td class="comment">MiniUPnP daemon release source code</td> | ||
702 | <td><a href="miniupnpd-2.1.tar.gz.sig">Signature</a></td> | ||
703 | </tr> | ||
704 | <tr> | ||
705 | <td class="filename"><a href='download.php?file=miniupnpc-2.1.tar.gz'>miniupnpc-2.1.tar.gz</a></td> | ||
706 | <td class="filesize">91914</td> | ||
707 | <td class="filedate">07/05/2018 11:10:59 +0000</td> | ||
708 | <td class="comment">MiniUPnP client release source code</td> | ||
709 | <td><a href="miniupnpc-2.1.tar.gz.sig">Signature</a></td> | ||
710 | </tr> | ||
711 | <tr> | ||
712 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180503.tar.gz'>miniupnpd-2.0.20180503.tar.gz</a></td> | ||
713 | <td class="filesize">225454</td> | ||
714 | <td class="filedate">03/05/2018 08:33:10 +0000</td> | ||
715 | <td class="comment">MiniUPnP daemon source code</td> | ||
716 | <td></td> | ||
717 | </tr> | ||
718 | <tr> | ||
719 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180503.tar.gz'>miniupnpc-2.0.20180503.tar.gz</a></td> | ||
720 | <td class="filesize">88207</td> | ||
721 | <td class="filedate">03/05/2018 08:31:22 +0000</td> | ||
722 | <td class="comment">MiniUPnP client source code</td> | ||
723 | <td></td> | ||
724 | </tr> | ||
725 | <tr> | ||
726 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180422.tar.gz'>miniupnpd-2.0.20180422.tar.gz</a></td> | ||
727 | <td class="filesize">224942</td> | ||
728 | <td class="filedate">22/04/2018 19:48:54 +0000</td> | ||
729 | <td class="comment">MiniUPnP daemon source code</td> | ||
730 | <td></td> | ||
731 | </tr> | ||
732 | <tr> | ||
733 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180412.tar.gz'>miniupnpd-2.0.20180412.tar.gz</a></td> | ||
734 | <td class="filesize">224831</td> | ||
735 | <td class="filedate">12/04/2018 08:16:25 +0000</td> | ||
736 | <td class="comment">MiniUPnP daemon source code</td> | ||
737 | <td></td> | ||
738 | </tr> | ||
739 | <tr> | ||
740 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180410.tar.gz'>miniupnpd-2.0.20180410.tar.gz</a></td> | ||
741 | <td class="filesize">224736</td> | ||
742 | <td class="filedate">10/04/2018 07:58:28 +0000</td> | ||
743 | <td class="comment">MiniUPnP daemon source code</td> | ||
744 | <td></td> | ||
745 | </tr> | ||
746 | <tr> | ||
747 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180410.tar.gz'>miniupnpc-2.0.20180410.tar.gz</a></td> | ||
748 | <td class="filesize">87363</td> | ||
749 | <td class="filedate">10/04/2018 07:52:55 +0000</td> | ||
750 | <td class="comment">MiniUPnP client source code</td> | ||
751 | <td></td> | ||
752 | </tr> | ||
753 | <tr> | ||
754 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180406.tar.gz'>miniupnpc-2.0.20180406.tar.gz</a></td> | ||
755 | <td class="filesize">87374</td> | ||
756 | <td class="filedate">06/04/2018 10:55:21 +0000</td> | ||
757 | <td class="comment">MiniUPnP client source code</td> | ||
758 | <td></td> | ||
759 | </tr> | ||
760 | <tr> | ||
761 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20180223.tar.gz'>minissdpd-1.5.20180223.tar.gz</a></td> | ||
762 | <td class="filesize">36179</td> | ||
763 | <td class="filedate">23/02/2018 14:24:07 +0000</td> | ||
764 | <td class="comment">MiniSSDPd source code</td> | ||
765 | <td></td> | ||
766 | </tr> | ||
767 | <tr> | ||
768 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180222.tar.gz'>miniupnpc-2.0.20180222.tar.gz</a></td> | ||
769 | <td class="filesize">87018</td> | ||
770 | <td class="filedate">22/02/2018 15:09:24 +0000</td> | ||
771 | <td class="comment">MiniUPnP client source code</td> | ||
772 | <td></td> | ||
773 | </tr> | ||
774 | <tr> | ||
775 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180222.tar.gz'>miniupnpd-2.0.20180222.tar.gz</a></td> | ||
776 | <td class="filesize">223697</td> | ||
777 | <td class="filedate">22/02/2018 15:09:14 +0000</td> | ||
778 | <td class="comment">MiniUPnP daemon source code</td> | ||
779 | <td></td> | ||
780 | </tr> | ||
781 | <tr> | ||
782 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180203.tar.gz'>miniupnpd-2.0.20180203.tar.gz</a></td> | ||
783 | <td class="filesize">223084</td> | ||
784 | <td class="filedate">03/02/2018 22:34:46 +0000</td> | ||
785 | <td class="comment">MiniUPnP daemon source code</td> | ||
786 | <td></td> | ||
787 | </tr> | ||
788 | <tr> | ||
789 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180203.tar.gz'>miniupnpc-2.0.20180203.tar.gz</a></td> | ||
790 | <td class="filesize">86772</td> | ||
791 | <td class="filedate">03/02/2018 22:34:32 +0000</td> | ||
792 | <td class="comment">MiniUPnP client source code</td> | ||
793 | <td></td> | ||
794 | </tr> | ||
795 | <tr> | ||
796 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20180203.tar.gz'>minissdpd-1.5.20180203.tar.gz</a></td> | ||
797 | <td class="filesize">35848</td> | ||
798 | <td class="filedate">03/02/2018 22:33:08 +0000</td> | ||
799 | <td class="comment">MiniSSDPd source code</td> | ||
800 | <td></td> | ||
801 | </tr> | ||
802 | <tr> | ||
803 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171212.tar.gz'>miniupnpc-2.0.20171212.tar.gz</a></td> | ||
804 | <td class="filesize">86607</td> | ||
805 | <td class="filedate">12/12/2017 12:03:38 +0000</td> | ||
806 | <td class="comment">MiniUPnP client source code</td> | ||
807 | <td></td> | ||
808 | </tr> | ||
809 | <tr> | ||
810 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20171212.tar.gz'>miniupnpd-2.0.20171212.tar.gz</a></td> | ||
811 | <td class="filesize">222617</td> | ||
812 | <td class="filedate">12/12/2017 12:03:32 +0000</td> | ||
813 | <td class="comment">MiniUPnP daemon source code</td> | ||
814 | <td></td> | ||
815 | </tr> | ||
816 | <tr> | ||
817 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171102.tar.gz'>miniupnpc-2.0.20171102.tar.gz</a></td> | ||
818 | <td class="filesize">86363</td> | ||
819 | <td class="filedate">02/11/2017 17:58:34 +0000</td> | ||
820 | <td class="comment">MiniUPnP client source code</td> | ||
821 | <td></td> | ||
822 | </tr> | ||
823 | <tr> | ||
824 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170509.tar.gz'>miniupnpc-2.0.20170509.tar.gz</a></td> | ||
825 | <td class="filesize">86055</td> | ||
826 | <td class="filedate">09/05/2017 10:14:56 +0000</td> | ||
827 | <td class="comment">MiniUPnP client source code</td> | ||
828 | <td></td> | ||
829 | </tr> | ||
830 | <tr> | ||
831 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170421.tar.gz'>miniupnpc-2.0.20170421.tar.gz</a></td> | ||
832 | <td class="filesize">85984</td> | ||
833 | <td class="filedate">21/04/2017 12:02:26 +0000</td> | ||
834 | <td class="comment">MiniUPnP client source code</td> | ||
835 | <td></td> | ||
836 | </tr> | ||
837 | <tr> | ||
838 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20170421.tar.gz'>miniupnpd-2.0.20170421.tar.gz</a></td> | ||
839 | <td class="filesize">219191</td> | ||
840 | <td class="filedate">21/04/2017 12:02:06 +0000</td> | ||
841 | <td class="comment">MiniUPnP daemon source code</td> | ||
842 | <td></td> | ||
843 | </tr> | ||
844 | <tr> | ||
845 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.20161216.tar.gz'>miniupnpd-2.0.20161216.tar.gz</a></td> | ||
846 | <td class="filesize">218119</td> | ||
847 | <td class="filedate">16/12/2016 09:34:08 +0000</td> | ||
848 | <td class="comment">MiniUPnP daemon source code</td> | ||
849 | <td></td> | ||
850 | </tr> | ||
851 | <tr> | ||
852 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.20161216.tar.gz'>miniupnpc-2.0.20161216.tar.gz</a></td> | ||
853 | <td class="filesize">85780</td> | ||
854 | <td class="filedate">16/12/2016 09:34:03 +0000</td> | ||
855 | <td class="comment">MiniUPnP client source code</td> | ||
856 | <td></td> | ||
857 | </tr> | ||
858 | <tr> | ||
859 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20161216.tar.gz'>minissdpd-1.5.20161216.tar.gz</a></td> | ||
860 | <td class="filesize">35078</td> | ||
861 | <td class="filedate">16/12/2016 09:33:59 +0000</td> | ||
862 | <td class="comment">MiniSSDPd source code</td> | ||
863 | <td></td> | ||
864 | </tr> | ||
865 | <tr> | ||
866 | <td class="filename"><a href='download.php?file=miniupnpd-2.0.tar.gz'>miniupnpd-2.0.tar.gz</a></td> | ||
867 | <td class="filesize">217802</td> | ||
868 | <td class="filedate">19/04/2016 21:12:01 +0000</td> | ||
869 | <td class="comment">MiniUPnP daemon release source code</td> | ||
870 | <td><a href="miniupnpd-2.0.tar.gz.sig">Signature</a></td> | ||
871 | </tr> | ||
872 | <tr> | ||
873 | <td class="filename"><a href='download.php?file=miniupnpc-2.0.tar.gz'>miniupnpc-2.0.tar.gz</a></td> | ||
874 | <td class="filesize">85287</td> | ||
875 | <td class="filedate">19/04/2016 21:07:52 +0000</td> | ||
876 | <td class="comment">MiniUPnP client release source code</td> | ||
877 | <td></td> | ||
878 | </tr> | ||
879 | <tr> | ||
880 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20160301.tar.gz'>minissdpd-1.5.20160301.tar.gz</a></td> | ||
881 | <td class="filesize">34827</td> | ||
882 | <td class="filedate">01/03/2016 18:08:23 +0000</td> | ||
883 | <td class="comment">MiniSSDPd source code</td> | ||
884 | <td></td> | ||
885 | </tr> | ||
886 | <tr> | ||
887 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160222.tar.gz'>miniupnpd-1.9.20160222.tar.gz</a></td> | ||
888 | <td class="filesize">217541</td> | ||
889 | <td class="filedate">22/02/2016 10:21:40 +0000</td> | ||
890 | <td class="comment">MiniUPnP daemon source code</td> | ||
891 | <td></td> | ||
892 | </tr> | ||
893 | <tr> | ||
894 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160216.tar.gz'>miniupnpd-1.9.20160216.tar.gz</a></td> | ||
895 | <td class="filesize">217007</td> | ||
896 | <td class="filedate">16/02/2016 12:41:44 +0000</td> | ||
897 | <td class="comment">MiniUPnP daemon source code</td> | ||
898 | <td></td> | ||
899 | </tr> | ||
900 | <tr> | ||
901 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160212.tar.gz'>miniupnpd-1.9.20160212.tar.gz</a></td> | ||
902 | <td class="filesize">215866</td> | ||
903 | <td class="filedate">12/02/2016 15:22:04 +0000</td> | ||
904 | <td class="comment">MiniUPnP daemon source code</td> | ||
905 | <td></td> | ||
906 | </tr> | ||
907 | <tr> | ||
908 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160209.tar.gz'>miniupnpd-1.9.20160209.tar.gz</a></td> | ||
909 | <td class="filesize">213416</td> | ||
910 | <td class="filedate">09/02/2016 09:47:03 +0000</td> | ||
911 | <td class="comment">MiniUPnP daemon source code</td> | ||
912 | <td></td> | ||
913 | </tr> | ||
914 | <tr> | ||
915 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20160209.tar.gz'>miniupnpc-1.9.20160209.tar.gz</a></td> | ||
916 | <td class="filesize">85268</td> | ||
917 | <td class="filedate">09/02/2016 09:44:50 +0000</td> | ||
918 | <td class="comment">MiniUPnP client source code</td> | ||
919 | <td></td> | ||
920 | </tr> | ||
921 | <tr> | ||
922 | <td class="filename"><a href='download.php?file=minissdpd-1.5.20160119.tar.gz'>minissdpd-1.5.20160119.tar.gz</a></td> | ||
923 | <td class="filesize">34711</td> | ||
924 | <td class="filedate">19/01/2016 13:39:51 +0000</td> | ||
925 | <td class="comment">MiniSSDPd source code</td> | ||
926 | <td></td> | ||
927 | </tr> | ||
928 | <tr> | ||
929 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160113.tar.gz'>miniupnpd-1.9.20160113.tar.gz</a></td> | ||
930 | <td class="filesize">211437</td> | ||
931 | <td class="filedate">13/01/2016 16:03:14 +0000</td> | ||
932 | <td class="comment">MiniUPnP daemon source code</td> | ||
933 | <td></td> | ||
934 | </tr> | ||
935 | <tr> | ||
936 | <td class="filename"><a href='download.php?file=minissdpd-1.5.tar.gz'>minissdpd-1.5.tar.gz</a></td> | ||
937 | <td class="filesize">34404</td> | ||
938 | <td class="filedate">13/01/2016 15:26:53 +0000</td> | ||
939 | <td class="comment">MiniSSDPd release source code</td> | ||
940 | <td></td> | ||
941 | </tr> | ||
942 | <tr> | ||
943 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151212.tar.gz'>miniupnpd-1.9.20151212.tar.gz</a></td> | ||
944 | <td class="filesize">210912</td> | ||
945 | <td class="filedate">12/12/2015 10:06:07 +0000</td> | ||
946 | <td class="comment">MiniUPnP daemon source code</td> | ||
947 | <td></td> | ||
948 | </tr> | ||
949 | <tr> | ||
950 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151118.tar.gz'>miniupnpd-1.9.20151118.tar.gz</a></td> | ||
951 | <td class="filesize">210322</td> | ||
952 | <td class="filedate">18/11/2015 08:59:46 +0000</td> | ||
953 | <td class="comment">MiniUPnP daemon source code</td> | ||
954 | <td></td> | ||
955 | </tr> | ||
956 | <tr> | ||
957 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151026.tar.gz'>miniupnpc-1.9.20151026.tar.gz</a></td> | ||
958 | <td class="filesize">84208</td> | ||
959 | <td class="filedate">26/10/2015 17:07:34 +0000</td> | ||
960 | <td class="comment">MiniUPnP client source code</td> | ||
961 | <td></td> | ||
962 | </tr> | ||
963 | <tr> | ||
964 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151008.tar.gz'>miniupnpc-1.9.20151008.tar.gz</a></td> | ||
965 | <td class="filesize">83538</td> | ||
966 | <td class="filedate">08/10/2015 16:22:28 +0000</td> | ||
967 | <td class="comment">MiniUPnP client source code</td> | ||
968 | <td></td> | ||
969 | </tr> | ||
970 | <tr> | ||
971 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150922.tar.gz'>miniupnpd-1.9.20150922.tar.gz</a></td> | ||
972 | <td class="filesize">208700</td> | ||
973 | <td class="filedate">22/09/2015 10:21:50 +0000</td> | ||
974 | <td class="comment">MiniUPnP daemon source code</td> | ||
975 | <td></td> | ||
976 | </tr> | ||
977 | <tr> | ||
978 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150918.zip'>upnpc-exe-win32-20150918.zip</a></td> | ||
979 | <td class="filesize">100004</td> | ||
980 | <td class="filedate">18/09/2015 12:50:51 +0000</td> | ||
981 | <td class="comment">Windows executable</td> | ||
982 | <td></td> | ||
983 | </tr> | ||
984 | <tr> | ||
985 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150917.tar.gz'>miniupnpc-1.9.20150917.tar.gz</a></td> | ||
986 | <td class="filesize">82609</td> | ||
987 | <td class="filedate">17/09/2015 14:09:14 +0000</td> | ||
988 | <td class="comment">MiniUPnP client source code</td> | ||
989 | <td></td> | ||
990 | </tr> | ||
991 | <tr> | ||
992 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150824.zip'>upnpc-exe-win32-20150824.zip</a></td> | ||
993 | <td class="filesize">99520</td> | ||
994 | <td class="filedate">24/08/2015 15:25:18 +0000</td> | ||
995 | <td class="comment">Windows executable</td> | ||
996 | <td></td> | ||
997 | </tr> | ||
998 | <tr> | ||
999 | <td class="filename"><a href='download.php?file=minissdpd-1.4.tar.gz'>minissdpd-1.4.tar.gz</a></td> | ||
1000 | <td class="filesize">32017</td> | ||
1001 | <td class="filedate">06/08/2015 13:38:37 +0000</td> | ||
1002 | <td class="comment">MiniSSDPd release source code</td> | ||
1003 | <td></td> | ||
1004 | </tr> | ||
1005 | <tr> | ||
1006 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150730.tar.gz'>miniupnpc-1.9.20150730.tar.gz</a></td> | ||
1007 | <td class="filesize">81431</td> | ||
1008 | <td class="filedate">29/07/2015 22:10:00 +0000</td> | ||
1009 | <td class="comment">MiniUPnP client source code</td> | ||
1010 | <td></td> | ||
1011 | </tr> | ||
1012 | <tr> | ||
1013 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150721.tar.gz'>miniupnpd-1.9.20150721.tar.gz</a></td> | ||
1014 | <td class="filesize">207562</td> | ||
1015 | <td class="filedate">21/07/2015 13:35:51 +0000</td> | ||
1016 | <td class="comment">MiniUPnP daemon source code</td> | ||
1017 | <td></td> | ||
1018 | </tr> | ||
1019 | <tr> | ||
1020 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150721.tar.gz'>miniupnpc-1.9.20150721.tar.gz</a></td> | ||
1021 | <td class="filesize">80521</td> | ||
1022 | <td class="filedate">21/07/2015 13:27:00 +0000</td> | ||
1023 | <td class="comment">MiniUPnP client source code</td> | ||
1024 | <td></td> | ||
1025 | </tr> | ||
1026 | <tr> | ||
1027 | <td class="filename"><a href='download.php?file=libnatpmp-20150609.tar.gz'>libnatpmp-20150609.tar.gz</a></td> | ||
1028 | <td class="filesize">24392</td> | ||
1029 | <td class="filedate">09/06/2015 15:40:28 +0000</td> | ||
1030 | <td class="comment">libnatpmp source code</td> | ||
1031 | <td></td> | ||
1032 | </tr> | ||
1033 | <tr> | ||
1034 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150609.tar.gz'>miniupnpc-1.9.20150609.tar.gz</a></td> | ||
1035 | <td class="filesize">79311</td> | ||
1036 | <td class="filedate">09/06/2015 15:39:48 +0000</td> | ||
1037 | <td class="comment">MiniUPnP client source code</td> | ||
1038 | <td></td> | ||
1039 | </tr> | ||
1040 | <tr> | ||
1041 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150609.tar.gz'>miniupnpd-1.9.20150609.tar.gz</a></td> | ||
1042 | <td class="filesize">207088</td> | ||
1043 | <td class="filedate">09/06/2015 15:39:36 +0000</td> | ||
1044 | <td class="comment">MiniUPnP daemon source code</td> | ||
1045 | <td></td> | ||
1046 | </tr> | ||
1047 | <tr> | ||
1048 | <td class="filename"><a href='download.php?file=minissdpd-1.3.20150527.tar.gz'>minissdpd-1.3.20150527.tar.gz</a></td> | ||
1049 | <td class="filesize">31025</td> | ||
1050 | <td class="filedate">27/05/2015 09:17:15 +0000</td> | ||
1051 | <td class="comment">MiniSSDPd source code</td> | ||
1052 | <td></td> | ||
1053 | </tr> | ||
1054 | <tr> | ||
1055 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150522.tar.gz'>miniupnpc-1.9.20150522.tar.gz</a></td> | ||
1056 | <td class="filesize">79080</td> | ||
1057 | <td class="filedate">22/05/2015 11:02:27 +0000</td> | ||
1058 | <td class="comment">MiniUPnP client source code</td> | ||
1059 | <td></td> | ||
1060 | </tr> | ||
1061 | <tr> | ||
1062 | <td class="filename"><a href='download.php?file=minissdpd-1.3.20150522.tar.gz'>minissdpd-1.3.20150522.tar.gz</a></td> | ||
1063 | <td class="filesize">30334</td> | ||
1064 | <td class="filedate">22/05/2015 11:02:04 +0000</td> | ||
1065 | <td class="comment">MiniSSDPd source code</td> | ||
1066 | <td></td> | ||
1067 | </tr> | ||
1068 | <tr> | ||
1069 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150430.tar.gz'>miniupnpd-1.9.20150430.tar.gz</a></td> | ||
1070 | <td class="filesize">205930</td> | ||
1071 | <td class="filedate">30/04/2015 09:09:27 +0000</td> | ||
1072 | <td class="comment">MiniUPnP daemon source code</td> | ||
1073 | <td></td> | ||
1074 | </tr> | ||
1075 | <tr> | ||
1076 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150430.tar.gz'>miniupnpc-1.9.20150430.tar.gz</a></td> | ||
1077 | <td class="filesize">78459</td> | ||
1078 | <td class="filedate">30/04/2015 08:39:31 +0000</td> | ||
1079 | <td class="comment">MiniUPnP client source code</td> | ||
1080 | <td></td> | ||
1081 | </tr> | ||
1082 | <tr> | ||
1083 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150427.tar.gz'>miniupnpc-1.9.20150427.tar.gz</a></td> | ||
1084 | <td class="filesize">78424</td> | ||
1085 | <td class="filedate">27/04/2015 16:08:42 +0000</td> | ||
1086 | <td class="comment">MiniUPnP client source code</td> | ||
1087 | <td></td> | ||
1088 | </tr> | ||
1089 | <tr> | ||
1090 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150427.tar.gz'>miniupnpd-1.9.20150427.tar.gz</a></td> | ||
1091 | <td class="filesize">191157</td> | ||
1092 | <td class="filedate">27/04/2015 16:08:27 +0000</td> | ||
1093 | <td class="comment">MiniUPnP daemon source code</td> | ||
1094 | <td></td> | ||
1095 | </tr> | ||
1096 | <tr> | ||
1097 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150307.tar.gz'>miniupnpd-1.9.20150307.tar.gz</a></td> | ||
1098 | <td class="filesize">190913</td> | ||
1099 | <td class="filedate">07/03/2015 16:11:51 +0000</td> | ||
1100 | <td class="comment">MiniUPnP daemon source code</td> | ||
1101 | <td></td> | ||
1102 | </tr> | ||
1103 | <tr> | ||
1104 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150206.tar.gz'>miniupnpc-1.9.20150206.tar.gz</a></td> | ||
1105 | <td class="filesize">76864</td> | ||
1106 | <td class="filedate">06/02/2015 14:38:00 +0000</td> | ||
1107 | <td class="comment">MiniUPnP client source code</td> | ||
1108 | <td></td> | ||
1109 | </tr> | ||
1110 | <tr> | ||
1111 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141209.tar.gz'>miniupnpd-1.9.20141209.tar.gz</a></td> | ||
1112 | <td class="filesize">193183</td> | ||
1113 | <td class="filedate">09/12/2014 09:58:34 +0000</td> | ||
1114 | <td class="comment">MiniUPnP daemon source code</td> | ||
1115 | <td></td> | ||
1116 | </tr> | ||
1117 | <tr> | ||
1118 | <td class="filename"><a href='download.php?file=minissdpd-1.3.tar.gz'>minissdpd-1.3.tar.gz</a></td> | ||
1119 | <td class="filesize">30326</td> | ||
1120 | <td class="filedate">09/12/2014 09:57:30 +0000</td> | ||
1121 | <td class="comment">MiniSSDPd release source code</td> | ||
1122 | <td></td> | ||
1123 | </tr> | ||
1124 | <tr> | ||
1125 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20141204.tar.gz'>minissdpd-1.2.20141204.tar.gz</a></td> | ||
1126 | <td class="filesize">26978</td> | ||
1127 | <td class="filedate">04/12/2014 10:55:26 +0000</td> | ||
1128 | <td class="comment">MiniSSDPd source code</td> | ||
1129 | <td></td> | ||
1130 | </tr> | ||
1131 | <tr> | ||
1132 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141204.tar.gz'>miniupnpd-1.9.20141204.tar.gz</a></td> | ||
1133 | <td class="filesize">192597</td> | ||
1134 | <td class="filedate">04/12/2014 10:55:03 +0000</td> | ||
1135 | <td class="comment">MiniUPnP daemon source code</td> | ||
1136 | <td></td> | ||
1137 | </tr> | ||
1138 | <tr> | ||
1139 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20141128.tar.gz'>minissdpd-1.2.20141128.tar.gz</a></td> | ||
1140 | <td class="filesize">26795</td> | ||
1141 | <td class="filedate">28/11/2014 16:33:10 +0000</td> | ||
1142 | <td class="comment">MiniSSDPd source code</td> | ||
1143 | <td></td> | ||
1144 | </tr> | ||
1145 | <tr> | ||
1146 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141128.tar.gz'>miniupnpd-1.9.20141128.tar.gz</a></td> | ||
1147 | <td class="filesize">192558</td> | ||
1148 | <td class="filedate">28/11/2014 13:31:36 +0000</td> | ||
1149 | <td class="comment">MiniUPnP daemon source code</td> | ||
1150 | <td></td> | ||
1151 | </tr> | ||
1152 | <tr> | ||
1153 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141128.tar.gz'>miniupnpc-1.9.20141128.tar.gz</a></td> | ||
1154 | <td class="filesize">76541</td> | ||
1155 | <td class="filedate">28/11/2014 13:31:15 +0000</td> | ||
1156 | <td class="comment">MiniUPnP client source code</td> | ||
1157 | <td></td> | ||
1158 | </tr> | ||
1159 | <tr> | ||
1160 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141117.tar.gz'>miniupnpc-1.9.20141117.tar.gz</a></td> | ||
1161 | <td class="filesize">73865</td> | ||
1162 | <td class="filedate">17/11/2014 09:51:36 +0000</td> | ||
1163 | <td class="comment">MiniUPnP client source code</td> | ||
1164 | <td></td> | ||
1165 | </tr> | ||
1166 | <tr> | ||
1167 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141113.tar.gz'>miniupnpc-1.9.20141113.tar.gz</a></td> | ||
1168 | <td class="filesize">72857</td> | ||
1169 | <td class="filedate">13/11/2014 10:36:44 +0000</td> | ||
1170 | <td class="comment">MiniUPnP client source code</td> | ||
1171 | <td></td> | ||
1172 | </tr> | ||
1173 | <tr> | ||
1174 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20141108.tar.gz'>minissdpd-1.2.20141108.tar.gz</a></td> | ||
1175 | <td class="filesize">22001</td> | ||
1176 | <td class="filedate">08/11/2014 13:55:41 +0000</td> | ||
1177 | <td class="comment">MiniSSDPd source code</td> | ||
1178 | <td></td> | ||
1179 | </tr> | ||
1180 | <tr> | ||
1181 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141108.tar.gz'>miniupnpc-1.9.20141108.tar.gz</a></td> | ||
1182 | <td class="filesize">72781</td> | ||
1183 | <td class="filedate">08/11/2014 13:53:48 +0000</td> | ||
1184 | <td class="comment">MiniUPnP client source code</td> | ||
1185 | <td></td> | ||
1186 | </tr> | ||
1187 | <tr> | ||
1188 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141108.tar.gz'>miniupnpd-1.9.20141108.tar.gz</a></td> | ||
1189 | <td class="filesize">192413</td> | ||
1190 | <td class="filedate">08/11/2014 13:53:38 +0000</td> | ||
1191 | <td class="comment">MiniUPnP daemon source code</td> | ||
1192 | <td></td> | ||
1193 | </tr> | ||
1194 | <tr> | ||
1195 | <td class="filename"><a href='download.php?file=miniupnpd-1.9.tar.gz'>miniupnpd-1.9.tar.gz</a></td> | ||
1196 | <td class="filesize">192183</td> | ||
1197 | <td class="filedate">27/10/2014 16:45:34 +0000</td> | ||
1198 | <td class="comment">MiniUPnP daemon release source code</td> | ||
1199 | <td></td> | ||
1200 | </tr> | ||
1201 | <tr> | ||
1202 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141027.tar.gz'>miniupnpc-1.9.20141027.tar.gz</a></td> | ||
1203 | <td class="filesize">76763</td> | ||
1204 | <td class="filedate">27/10/2014 16:45:25 +0000</td> | ||
1205 | <td class="comment">MiniUPnP client source code</td> | ||
1206 | <td></td> | ||
1207 | </tr> | ||
1208 | <tr> | ||
1209 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141022.tar.gz'>miniupnpd-1.8.20141022.tar.gz</a></td> | ||
1210 | <td class="filesize">191630</td> | ||
1211 | <td class="filedate">22/10/2014 09:17:41 +0000</td> | ||
1212 | <td class="comment">MiniUPnP daemon source code</td> | ||
1213 | <td></td> | ||
1214 | </tr> | ||
1215 | <tr> | ||
1216 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141021.tar.gz'>miniupnpd-1.8.20141021.tar.gz</a></td> | ||
1217 | <td class="filesize">191270</td> | ||
1218 | <td class="filedate">21/10/2014 14:18:58 +0000</td> | ||
1219 | <td class="comment">MiniUPnP daemon source code</td> | ||
1220 | <td></td> | ||
1221 | </tr> | ||
1222 | <tr> | ||
1223 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140911.tar.gz'>miniupnpc-1.9.20140911.tar.gz</a></td> | ||
1224 | <td class="filesize">76855</td> | ||
1225 | <td class="filedate">11/09/2014 14:15:23 +0000</td> | ||
1226 | <td class="comment">MiniUPnP client source code</td> | ||
1227 | <td></td> | ||
1228 | </tr> | ||
1229 | <tr> | ||
1230 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20140906.tar.gz'>minissdpd-1.2.20140906.tar.gz</a></td> | ||
1231 | <td class="filesize">21956</td> | ||
1232 | <td class="filedate">06/09/2014 08:34:10 +0000</td> | ||
1233 | <td class="comment">MiniSSDPd source code</td> | ||
1234 | <td></td> | ||
1235 | </tr> | ||
1236 | <tr> | ||
1237 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140906.tar.gz'>miniupnpd-1.8.20140906.tar.gz</a></td> | ||
1238 | <td class="filesize">191183</td> | ||
1239 | <td class="filedate">06/09/2014 08:34:02 +0000</td> | ||
1240 | <td class="comment">MiniUPnP daemon source code</td> | ||
1241 | <td></td> | ||
1242 | </tr> | ||
1243 | <tr> | ||
1244 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140906.tar.gz'>miniupnpc-1.9.20140906.tar.gz</a></td> | ||
1245 | <td class="filesize">76791</td> | ||
1246 | <td class="filedate">06/09/2014 08:33:45 +0000</td> | ||
1247 | <td class="comment">MiniUPnP client source code</td> | ||
1248 | <td></td> | ||
1249 | </tr> | ||
1250 | <tr> | ||
1251 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140701.tar.gz'>miniupnpc-1.9.20140701.tar.gz</a></td> | ||
1252 | <td class="filesize">76735</td> | ||
1253 | <td class="filedate">01/07/2014 13:06:51 +0000</td> | ||
1254 | <td class="comment">MiniUPnP client source code</td> | ||
1255 | <td></td> | ||
1256 | </tr> | ||
1257 | <tr> | ||
1258 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140610.tar.gz'>miniupnpc-1.9.20140610.tar.gz</a></td> | ||
1259 | <td class="filesize">76674</td> | ||
1260 | <td class="filedate">10/06/2014 10:28:27 +0000</td> | ||
1261 | <td class="comment">MiniUPnP client source code</td> | ||
1262 | <td></td> | ||
1263 | </tr> | ||
1264 | <tr> | ||
1265 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20140610.tar.gz'>minissdpd-1.2.20140610.tar.gz</a></td> | ||
1266 | <td class="filesize">21909</td> | ||
1267 | <td class="filedate">10/06/2014 10:03:29 +0000</td> | ||
1268 | <td class="comment">MiniSSDPd source code</td> | ||
1269 | <td></td> | ||
1270 | </tr> | ||
1271 | <tr> | ||
1272 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140523.tar.gz'>miniupnpd-1.8.20140523.tar.gz</a></td> | ||
1273 | <td class="filesize">190936</td> | ||
1274 | <td class="filedate">23/05/2014 15:48:03 +0000</td> | ||
1275 | <td class="comment">MiniUPnP daemon source code</td> | ||
1276 | <td></td> | ||
1277 | </tr> | ||
1278 | <tr> | ||
1279 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140422.zip'>upnpc-exe-win32-20140422.zip</a></td> | ||
1280 | <td class="filesize">97505</td> | ||
1281 | <td class="filedate">22/04/2014 10:10:07 +0000</td> | ||
1282 | <td class="comment">Windows executable</td> | ||
1283 | <td></td> | ||
1284 | </tr> | ||
1285 | <tr> | ||
1286 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140422.tar.gz'>miniupnpd-1.8.20140422.tar.gz</a></td> | ||
1287 | <td class="filesize">187225</td> | ||
1288 | <td class="filedate">22/04/2014 08:58:56 +0000</td> | ||
1289 | <td class="comment">MiniUPnP daemon source code</td> | ||
1290 | <td></td> | ||
1291 | </tr> | ||
1292 | <tr> | ||
1293 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140401.tar.gz'>miniupnpd-1.8.20140401.tar.gz</a></td> | ||
1294 | <td class="filesize">183131</td> | ||
1295 | <td class="filedate">01/04/2014 10:07:20 +0000</td> | ||
1296 | <td class="comment">MiniUPnP daemon source code</td> | ||
1297 | <td></td> | ||
1298 | </tr> | ||
1299 | <tr> | ||
1300 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140401.tar.gz'>miniupnpc-1.9.20140401.tar.gz</a></td> | ||
1301 | <td class="filesize">74703</td> | ||
1302 | <td class="filedate">01/04/2014 09:49:46 +0000</td> | ||
1303 | <td class="comment">MiniUPnP client source code</td> | ||
1304 | <td></td> | ||
1305 | </tr> | ||
1306 | <tr> | ||
1307 | <td class="filename"><a href='download.php?file=libnatpmp-20140401.tar.gz'>libnatpmp-20140401.tar.gz</a></td> | ||
1308 | <td class="filesize">23302</td> | ||
1309 | <td class="filedate">01/04/2014 09:49:44 +0000</td> | ||
1310 | <td class="comment">libnatpmp source code</td> | ||
1311 | <td></td> | ||
1312 | </tr> | ||
1313 | <tr> | ||
1314 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140313.tar.gz'>miniupnpd-1.8.20140313.tar.gz</a></td> | ||
1315 | <td class="filesize">177120</td> | ||
1316 | <td class="filedate">13/03/2014 10:39:11 +0000</td> | ||
1317 | <td class="comment">MiniUPnP daemon source code</td> | ||
1318 | <td></td> | ||
1319 | </tr> | ||
1320 | <tr> | ||
1321 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140310.tar.gz'>miniupnpd-1.8.20140310.tar.gz</a></td> | ||
1322 | <td class="filesize">176585</td> | ||
1323 | <td class="filedate">09/03/2014 23:16:49 +0000</td> | ||
1324 | <td class="comment">MiniUPnP daemon source code</td> | ||
1325 | <td></td> | ||
1326 | </tr> | ||
1327 | <tr> | ||
1328 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140225.tar.gz'>miniupnpd-1.8.20140225.tar.gz</a></td> | ||
1329 | <td class="filesize">175183</td> | ||
1330 | <td class="filedate">25/02/2014 11:01:29 +0000</td> | ||
1331 | <td class="comment">MiniUPnP daemon source code</td> | ||
1332 | <td></td> | ||
1333 | </tr> | ||
1334 | <tr> | ||
1335 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140203.tar.gz'>miniupnpd-1.8.20140203.tar.gz</a></td> | ||
1336 | <td class="filesize">170112</td> | ||
1337 | <td class="filedate">03/02/2014 09:56:05 +0000</td> | ||
1338 | <td class="comment">MiniUPnP daemon source code</td> | ||
1339 | <td></td> | ||
1340 | </tr> | ||
1341 | <tr> | ||
1342 | <td class="filename"><a href='download.php?file=miniupnpc-1.9.tar.gz'>miniupnpc-1.9.tar.gz</a></td> | ||
1343 | <td class="filesize">74230</td> | ||
1344 | <td class="filedate">31/01/2014 13:57:40 +0000</td> | ||
1345 | <td class="comment">MiniUPnP client release source code</td> | ||
1346 | <td></td> | ||
1347 | </tr> | ||
1348 | <tr> | ||
1349 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140127.tar.gz'>miniupnpd-1.8.20140127.tar.gz</a></td> | ||
1350 | <td class="filesize">170467</td> | ||
1351 | <td class="filedate">27/01/2014 11:25:34 +0000</td> | ||
1352 | <td class="comment">MiniUPnP daemon source code</td> | ||
1353 | <td></td> | ||
1354 | </tr> | ||
1355 | <tr> | ||
1356 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140117.zip'>upnpc-exe-win32-20140117.zip</a></td> | ||
1357 | <td class="filesize">97270</td> | ||
1358 | <td class="filedate">17/01/2014 11:37:53 +0000</td> | ||
1359 | <td class="comment">Windows executable</td> | ||
1360 | <td></td> | ||
1361 | </tr> | ||
1362 | <tr> | ||
1363 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131216.tar.gz'>miniupnpd-1.8.20131216.tar.gz</a></td> | ||
1364 | <td class="filesize">170277</td> | ||
1365 | <td class="filedate">16/12/2013 16:15:40 +0000</td> | ||
1366 | <td class="comment">MiniUPnP daemon source code</td> | ||
1367 | <td></td> | ||
1368 | </tr> | ||
1369 | <tr> | ||
1370 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131213.tar.gz'>miniupnpd-1.8.20131213.tar.gz</a></td> | ||
1371 | <td class="filesize">169753</td> | ||
1372 | <td class="filedate">13/12/2013 16:18:10 +0000</td> | ||
1373 | <td class="comment">MiniUPnP daemon source code</td> | ||
1374 | <td></td> | ||
1375 | </tr> | ||
1376 | <tr> | ||
1377 | <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131209.tar.gz'>miniupnpc-1.8.20131209.tar.gz</a></td> | ||
1378 | <td class="filesize">73900</td> | ||
1379 | <td class="filedate">09/12/2013 20:52:54 +0000</td> | ||
1380 | <td class="comment">MiniUPnP client source code</td> | ||
1381 | <td></td> | ||
1382 | </tr> | ||
1383 | <tr> | ||
1384 | <td class="filename"><a href='download.php?file=libnatpmp-20131126.tar.gz'>libnatpmp-20131126.tar.gz</a></td> | ||
1385 | <td class="filesize">22972</td> | ||
1386 | <td class="filedate">26/11/2013 08:51:36 +0000</td> | ||
1387 | <td class="comment">libnatpmp source code</td> | ||
1388 | <td></td> | ||
1389 | </tr> | ||
1390 | <tr> | ||
1391 | <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131007.tar.gz'>miniupnpc-1.8.20131007.tar.gz</a></td> | ||
1392 | <td class="filesize">73750</td> | ||
1393 | <td class="filedate">07/10/2013 10:10:25 +0000</td> | ||
1394 | <td class="comment">MiniUPnP client source code</td> | ||
1395 | <td></td> | ||
1396 | </tr> | ||
1397 | <tr> | ||
1398 | <td class="filename"><a href='download.php?file=libnatpmp-20130911.tar.gz'>libnatpmp-20130911.tar.gz</a></td> | ||
1399 | <td class="filesize">18744</td> | ||
1400 | <td class="filedate">11/09/2013 07:35:51 +0000</td> | ||
1401 | <td class="comment">libnatpmp source code</td> | ||
1402 | <td></td> | ||
1403 | </tr> | ||
1404 | <tr> | ||
1405 | <td class="filename"><a href='download.php?file=libnatpmp-20130910.tar.gz'>libnatpmp-20130910.tar.gz</a></td> | ||
1406 | <td class="filesize">18734</td> | ||
1407 | <td class="filedate">10/09/2013 20:15:34 +0000</td> | ||
1408 | <td class="comment">libnatpmp source code</td> | ||
1409 | <td></td> | ||
1410 | </tr> | ||
1411 | <tr> | ||
1412 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20130907.tar.gz'>minissdpd-1.2.20130907.tar.gz</a></td> | ||
1413 | <td class="filesize">20237</td> | ||
1414 | <td class="filedate">07/09/2013 06:46:31 +0000</td> | ||
1415 | <td class="comment">MiniSSDPd source code</td> | ||
1416 | <td></td> | ||
1417 | </tr> | ||
1418 | <tr> | ||
1419 | <td class="filename"><a href='download.php?file=minissdpd-1.2.20130819.tar.gz'>minissdpd-1.2.20130819.tar.gz</a></td> | ||
1420 | <td class="filesize">20772</td> | ||
1421 | <td class="filedate">19/08/2013 16:50:29 +0000</td> | ||
1422 | <td class="comment">MiniSSDPd source code</td> | ||
1423 | <td></td> | ||
1424 | </tr> | ||
1425 | <tr> | ||
1426 | <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130801.tar.gz'>miniupnpc-1.8.20130801.tar.gz</a></td> | ||
1427 | <td class="filesize">73426</td> | ||
1428 | <td class="filedate">01/08/2013 21:38:05 +0000</td> | ||
1429 | <td class="comment">MiniUPnP client source code</td> | ||
1430 | <td></td> | ||
1431 | </tr> | ||
1432 | <tr> | ||
1433 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130730.tar.gz'>miniupnpd-1.8.20130730.tar.gz</a></td> | ||
1434 | <td class="filesize">149904</td> | ||
1435 | <td class="filedate">30/07/2013 11:37:48 +0000</td> | ||
1436 | <td class="comment">MiniUPnP daemon source code</td> | ||
1437 | <td></td> | ||
1438 | </tr> | ||
1439 | <tr> | ||
1440 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130607.tar.gz'>miniupnpd-1.8.20130607.tar.gz</a></td> | ||
1441 | <td class="filesize">149521</td> | ||
1442 | <td class="filedate">07/06/2013 08:46:17 +0000</td> | ||
1443 | <td class="comment">MiniUPnP daemon source code</td> | ||
1444 | <td></td> | ||
1445 | </tr> | ||
1446 | <tr> | ||
1447 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130521.tar.gz'>miniupnpd-1.8.20130521.tar.gz</a></td> | ||
1448 | <td class="filesize">149276</td> | ||
1449 | <td class="filedate">21/05/2013 09:01:33 +0000</td> | ||
1450 | <td class="comment">MiniUPnP daemon source code</td> | ||
1451 | <td></td> | ||
1452 | </tr> | ||
1453 | <tr> | ||
1454 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130503.tar.gz'>miniupnpd-1.8.20130503.tar.gz</a></td> | ||
1455 | <td class="filesize">148420</td> | ||
1456 | <td class="filedate">03/05/2013 19:27:16 +0000</td> | ||
1457 | <td class="comment">MiniUPnP daemon source code</td> | ||
1458 | <td></td> | ||
1459 | </tr> | ||
1460 | <tr> | ||
1461 | <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130503.tar.gz'>miniupnpc-1.8.20130503.tar.gz</a></td> | ||
1462 | <td class="filesize">71858</td> | ||
1463 | <td class="filedate">03/05/2013 19:27:07 +0000</td> | ||
1464 | <td class="comment">MiniUPnP client source code</td> | ||
1465 | <td></td> | ||
1466 | </tr> | ||
1467 | <tr> | ||
1468 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130426.tar.gz'>miniupnpd-1.8.20130426.tar.gz</a></td> | ||
1469 | <td class="filesize">147890</td> | ||
1470 | <td class="filedate">26/04/2013 16:57:20 +0000</td> | ||
1471 | <td class="comment">MiniUPnP daemon source code</td> | ||
1472 | <td></td> | ||
1473 | </tr> | ||
1474 | <tr> | ||
1475 | <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130211.tar.gz'>miniupnpc-1.8.20130211.tar.gz</a></td> | ||
1476 | <td class="filesize">70723</td> | ||
1477 | <td class="filedate">11/02/2013 10:32:44 +0000</td> | ||
1478 | <td class="comment">MiniUPnP client source code</td> | ||
1479 | <td></td> | ||
1480 | </tr> | ||
1481 | <tr> | ||
1482 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130207.tar.gz'>miniupnpd-1.8.20130207.tar.gz</a></td> | ||
1483 | <td class="filesize">147325</td> | ||
1484 | <td class="filedate">07/02/2013 12:29:32 +0000</td> | ||
1485 | <td class="comment">MiniUPnP daemon source code</td> | ||
1486 | <td></td> | ||
1487 | </tr> | ||
1488 | <tr> | ||
1489 | <td class="filename"><a href='download.php?file=miniupnpc-1.8.tar.gz'>miniupnpc-1.8.tar.gz</a></td> | ||
1490 | <td class="filesize">70624</td> | ||
1491 | <td class="filedate">06/02/2013 14:31:06 +0000</td> | ||
1492 | <td class="comment">MiniUPnP client release source code</td> | ||
1493 | <td></td> | ||
1494 | </tr> | ||
1495 | <tr> | ||
1496 | <td class="filename"><a href='download.php?file=miniupnpd-1.8.tar.gz'>miniupnpd-1.8.tar.gz</a></td> | ||
1497 | <td class="filesize">146679</td> | ||
1498 | <td class="filedate">06/02/2013 14:30:59 +0000</td> | ||
1499 | <td class="comment">MiniUPnP daemon release source code</td> | ||
1500 | <td></td> | ||
1501 | </tr> | ||
1502 | <tr> | ||
1503 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20121009.zip'>upnpc-exe-win32-20121009.zip</a></td> | ||
1504 | <td class="filesize">96513</td> | ||
1505 | <td class="filedate">09/10/2012 17:54:12 +0000</td> | ||
1506 | <td class="comment">Windows executable</td> | ||
1507 | <td></td> | ||
1508 | </tr> | ||
1509 | <tr> | ||
1510 | <td class="filename"><a href='download.php?file=miniupnpd-1.7.20121005.tar.gz'>miniupnpd-1.7.20121005.tar.gz</a></td> | ||
1511 | <td class="filesize">144393</td> | ||
1512 | <td class="filedate">04/10/2012 22:39:05 +0000</td> | ||
1513 | <td class="comment">MiniUPnP daemon source code</td> | ||
1514 | <td></td> | ||
1515 | </tr> | ||
1516 | <tr> | ||
1517 | <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120830.tar.gz'>miniupnpc-1.7.20120830.tar.gz</a></td> | ||
1518 | <td class="filesize">70074</td> | ||
1519 | <td class="filedate">30/08/2012 08:41:51 +0000</td> | ||
1520 | <td class="comment">MiniUPnP client source code</td> | ||
1521 | <td></td> | ||
1522 | </tr> | ||
1523 | <tr> | ||
1524 | <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120824.tar.gz'>miniupnpd-1.7.20120824.tar.gz</a></td> | ||
1525 | <td class="filesize">141960</td> | ||
1526 | <td class="filedate">24/08/2012 18:15:01 +0000</td> | ||
1527 | <td class="comment">MiniUPnP daemon source code</td> | ||
1528 | <td></td> | ||
1529 | </tr> | ||
1530 | <tr> | ||
1531 | <td class="filename"><a href='download.php?file=libnatpmp-20120821.tar.gz'>libnatpmp-20120821.tar.gz</a></td> | ||
1532 | <td class="filesize">17832</td> | ||
1533 | <td class="filedate">21/08/2012 17:24:46 +0000</td> | ||
1534 | <td class="comment">libnatpmp source code</td> | ||
1535 | <td></td> | ||
1536 | </tr> | ||
1537 | <tr> | ||
1538 | <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120714.tar.gz'>miniupnpc-1.7.20120714.tar.gz</a></td> | ||
1539 | <td class="filesize">69570</td> | ||
1540 | <td class="filedate">14/07/2012 14:40:47 +0000</td> | ||
1541 | <td class="comment">MiniUPnP client source code</td> | ||
1542 | <td></td> | ||
1543 | </tr> | ||
1544 | <tr> | ||
1545 | <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120711.tar.gz'>miniupnpc-1.7.20120711.tar.gz</a></td> | ||
1546 | <td class="filesize">69580</td> | ||
1547 | <td class="filedate">10/07/2012 22:27:05 +0000</td> | ||
1548 | <td class="comment">MiniUPnP client source code</td> | ||
1549 | <td></td> | ||
1550 | </tr> | ||
1551 | <tr> | ||
1552 | <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120711.tar.gz'>miniupnpd-1.7.20120711.tar.gz</a></td> | ||
1553 | <td class="filesize">141380</td> | ||
1554 | <td class="filedate">10/07/2012 22:26:58 +0000</td> | ||
1555 | <td class="comment">MiniUPnP daemon source code</td> | ||
1556 | <td></td> | ||
1557 | </tr> | ||
1558 | <tr> | ||
1559 | <td class="filename"><a href='download.php?file=miniupnpd-1.7.tar.gz'>miniupnpd-1.7.tar.gz</a></td> | ||
1560 | <td class="filesize">138047</td> | ||
1561 | <td class="filedate">27/05/2012 23:13:30 +0000</td> | ||
1562 | <td class="comment">MiniUPnP daemon release source code</td> | ||
1563 | <td></td> | ||
1564 | </tr> | ||
1565 | <tr> | ||
1566 | <td class="filename"><a href='download.php?file=miniupnpc-1.7.tar.gz'>miniupnpc-1.7.tar.gz</a></td> | ||
1567 | <td class="filesize">68327</td> | ||
1568 | <td class="filedate">24/05/2012 18:17:48 +0000</td> | ||
1569 | <td class="comment">MiniUPnP client release source code</td> | ||
1570 | <td></td> | ||
1571 | </tr> | ||
1572 | <tr> | ||
1573 | <td class="filename"><a href='download.php?file=minissdpd-1.2.tar.gz'>minissdpd-1.2.tar.gz</a></td> | ||
1574 | <td class="filesize">19874</td> | ||
1575 | <td class="filedate">24/05/2012 18:06:24 +0000</td> | ||
1576 | <td class="comment">MiniSSDPd release source code</td> | ||
1577 | <td></td> | ||
1578 | </tr> | ||
1579 | <tr> | ||
1580 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120509.tar.gz'>miniupnpd-1.6.20120509.tar.gz</a></td> | ||
1581 | <td class="filesize">137147</td> | ||
1582 | <td class="filedate">09/05/2012 10:45:44 +0000</td> | ||
1583 | <td class="comment">MiniUPnP daemon source code</td> | ||
1584 | <td></td> | ||
1585 | </tr> | ||
1586 | <tr> | ||
1587 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120509.tar.gz'>miniupnpc-1.6.20120509.tar.gz</a></td> | ||
1588 | <td class="filesize">68205</td> | ||
1589 | <td class="filedate">09/05/2012 10:45:41 +0000</td> | ||
1590 | <td class="comment">MiniUPnP client source code</td> | ||
1591 | <td></td> | ||
1592 | </tr> | ||
1593 | <tr> | ||
1594 | <td class="filename"><a href='download.php?file=minissdpd-1.1.20120509.tar.gz'>minissdpd-1.1.20120509.tar.gz</a></td> | ||
1595 | <td class="filesize">18123</td> | ||
1596 | <td class="filedate">09/05/2012 10:45:39 +0000</td> | ||
1597 | <td class="comment">MiniSSDPd source code</td> | ||
1598 | <td></td> | ||
1599 | </tr> | ||
1600 | <tr> | ||
1601 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120502.tar.gz'>miniupnpd-1.6.20120502.tar.gz</a></td> | ||
1602 | <td class="filesize">136688</td> | ||
1603 | <td class="filedate">01/05/2012 22:51:18 +0000</td> | ||
1604 | <td class="comment">MiniUPnP daemon source code</td> | ||
1605 | <td></td> | ||
1606 | </tr> | ||
1607 | <tr> | ||
1608 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120502.tar.gz'>miniupnpc-1.6.20120502.tar.gz</a></td> | ||
1609 | <td class="filesize">68170</td> | ||
1610 | <td class="filedate">01/05/2012 22:51:11 +0000</td> | ||
1611 | <td class="comment">MiniUPnP client source code</td> | ||
1612 | <td></td> | ||
1613 | </tr> | ||
1614 | <tr> | ||
1615 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120426.tar.gz'>miniupnpd-1.6.20120426.tar.gz</a></td> | ||
1616 | <td class="filesize">134764</td> | ||
1617 | <td class="filedate">26/04/2012 16:24:29 +0000</td> | ||
1618 | <td class="comment">MiniUPnP daemon source code</td> | ||
1619 | <td></td> | ||
1620 | </tr> | ||
1621 | <tr> | ||
1622 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120424.tar.gz'>miniupnpd-1.6.20120424.tar.gz</a></td> | ||
1623 | <td class="filesize">132522</td> | ||
1624 | <td class="filedate">23/04/2012 22:43:17 +0000</td> | ||
1625 | <td class="comment">MiniUPnP daemon source code</td> | ||
1626 | <td></td> | ||
1627 | </tr> | ||
1628 | <tr> | ||
1629 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120424.tar.gz'>miniupnpc-1.6.20120424.tar.gz</a></td> | ||
1630 | <td class="filesize">68067</td> | ||
1631 | <td class="filedate">23/04/2012 22:43:10 +0000</td> | ||
1632 | <td class="comment">MiniUPnP client source code</td> | ||
1633 | <td></td> | ||
1634 | </tr> | ||
1635 | <tr> | ||
1636 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120420.tar.gz'>miniupnpd-1.6.20120420.tar.gz</a></td> | ||
1637 | <td class="filesize">131972</td> | ||
1638 | <td class="filedate">20/04/2012 14:58:57 +0000</td> | ||
1639 | <td class="comment">MiniUPnP daemon source code</td> | ||
1640 | <td></td> | ||
1641 | </tr> | ||
1642 | <tr> | ||
1643 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120420.tar.gz'>miniupnpc-1.6.20120420.tar.gz</a></td> | ||
1644 | <td class="filesize">68068</td> | ||
1645 | <td class="filedate">20/04/2012 14:58:39 +0000</td> | ||
1646 | <td class="comment">MiniUPnP client source code</td> | ||
1647 | <td></td> | ||
1648 | </tr> | ||
1649 | <tr> | ||
1650 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120419.tar.gz'>miniupnpd-1.6.20120419.tar.gz</a></td> | ||
1651 | <td class="filesize">131088</td> | ||
1652 | <td class="filedate">18/04/2012 23:41:36 +0000</td> | ||
1653 | <td class="comment">MiniUPnP daemon source code</td> | ||
1654 | <td></td> | ||
1655 | </tr> | ||
1656 | <tr> | ||
1657 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120418.tar.gz'>miniupnpd-1.6.20120418.tar.gz</a></td> | ||
1658 | <td class="filesize">130879</td> | ||
1659 | <td class="filedate">18/04/2012 21:01:10 +0000</td> | ||
1660 | <td class="comment">MiniUPnP daemon source code</td> | ||
1661 | <td></td> | ||
1662 | </tr> | ||
1663 | <tr> | ||
1664 | <td class="filename"><a href='download.php?file=minissdpd-1.1.20120410.tar.gz'>minissdpd-1.1.20120410.tar.gz</a></td> | ||
1665 | <td class="filesize">18059</td> | ||
1666 | <td class="filedate">09/04/2012 22:45:38 +0000</td> | ||
1667 | <td class="comment">MiniSSDPd source code</td> | ||
1668 | <td></td> | ||
1669 | </tr> | ||
1670 | <tr> | ||
1671 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120410.tar.gz'>miniupnpc-1.6.20120410.tar.gz</a></td> | ||
1672 | <td class="filesize">67934</td> | ||
1673 | <td class="filedate">09/04/2012 22:45:10 +0000</td> | ||
1674 | <td class="comment">MiniUPnP client source code</td> | ||
1675 | <td></td> | ||
1676 | </tr> | ||
1677 | <tr> | ||
1678 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120406.tar.gz'>miniupnpd-1.6.20120406.tar.gz</a></td> | ||
1679 | <td class="filesize">128992</td> | ||
1680 | <td class="filedate">06/04/2012 17:52:57 +0000</td> | ||
1681 | <td class="comment">MiniUPnP daemon source code</td> | ||
1682 | <td></td> | ||
1683 | </tr> | ||
1684 | <tr> | ||
1685 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120320.tar.gz'>miniupnpc-1.6.20120320.tar.gz</a></td> | ||
1686 | <td class="filesize">67374</td> | ||
1687 | <td class="filedate">20/03/2012 16:55:48 +0000</td> | ||
1688 | <td class="comment">MiniUPnP client source code</td> | ||
1689 | <td></td> | ||
1690 | </tr> | ||
1691 | <tr> | ||
1692 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120320.tar.gz'>miniupnpd-1.6.20120320.tar.gz</a></td> | ||
1693 | <td class="filesize">127968</td> | ||
1694 | <td class="filedate">20/03/2012 16:46:07 +0000</td> | ||
1695 | <td class="comment">MiniUPnP daemon source code</td> | ||
1696 | <td></td> | ||
1697 | </tr> | ||
1698 | <tr> | ||
1699 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120305.tar.gz'>miniupnpd-1.6.20120305.tar.gz</a></td> | ||
1700 | <td class="filesize">126985</td> | ||
1701 | <td class="filedate">05/03/2012 20:42:01 +0000</td> | ||
1702 | <td class="comment">MiniUPnP daemon source code</td> | ||
1703 | <td></td> | ||
1704 | </tr> | ||
1705 | <tr> | ||
1706 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120207.tar.gz'>miniupnpd-1.6.20120207.tar.gz</a></td> | ||
1707 | <td class="filesize">127425</td> | ||
1708 | <td class="filedate">07/02/2012 10:21:16 +0000</td> | ||
1709 | <td class="comment">MiniUPnP daemon source code</td> | ||
1710 | <td></td> | ||
1711 | </tr> | ||
1712 | <tr> | ||
1713 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120203.tar.gz'>miniupnpd-1.6.20120203.tar.gz</a></td> | ||
1714 | <td class="filesize">126599</td> | ||
1715 | <td class="filedate">03/02/2012 15:14:13 +0000</td> | ||
1716 | <td class="comment">MiniUPnP daemon source code</td> | ||
1717 | <td></td> | ||
1718 | </tr> | ||
1719 | <tr> | ||
1720 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120125.tar.gz'>miniupnpc-1.6.20120125.tar.gz</a></td> | ||
1721 | <td class="filesize">67354</td> | ||
1722 | <td class="filedate">25/01/2012 21:12:28 +0000</td> | ||
1723 | <td class="comment">MiniUPnP client source code</td> | ||
1724 | <td></td> | ||
1725 | </tr> | ||
1726 | <tr> | ||
1727 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120121.tar.gz'>miniupnpc-1.6.20120121.tar.gz</a></td> | ||
1728 | <td class="filesize">67347</td> | ||
1729 | <td class="filedate">21/01/2012 14:07:41 +0000</td> | ||
1730 | <td class="comment">MiniUPnP client source code</td> | ||
1731 | <td></td> | ||
1732 | </tr> | ||
1733 | <tr> | ||
1734 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120121.tar.gz'>miniupnpd-1.6.20120121.tar.gz</a></td> | ||
1735 | <td class="filesize">126021</td> | ||
1736 | <td class="filedate">21/01/2012 14:07:33 +0000</td> | ||
1737 | <td class="comment">MiniUPnP daemon source code</td> | ||
1738 | <td></td> | ||
1739 | </tr> | ||
1740 | <tr> | ||
1741 | <td class="filename"><a href='download.php?file=minissdpd-1.1.20120121.tar.gz'>minissdpd-1.1.20120121.tar.gz</a></td> | ||
1742 | <td class="filesize">17762</td> | ||
1743 | <td class="filedate">21/01/2012 14:07:16 +0000</td> | ||
1744 | <td class="comment">MiniSSDPd source code</td> | ||
1745 | <td></td> | ||
1746 | </tr> | ||
1747 | <tr> | ||
1748 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20120121.zip'>upnpc-exe-win32-20120121.zip</a></td> | ||
1749 | <td class="filesize">94575</td> | ||
1750 | <td class="filedate">21/01/2012 13:59:11 +0000</td> | ||
1751 | <td class="comment">Windows executable</td> | ||
1752 | <td></td> | ||
1753 | </tr> | ||
1754 | <tr> | ||
1755 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20111212.zip'>upnpc-exe-win32-20111212.zip</a></td> | ||
1756 | <td class="filesize">94507</td> | ||
1757 | <td class="filedate">12/12/2011 12:33:48 +0000</td> | ||
1758 | <td class="comment">Windows executable</td> | ||
1759 | <td></td> | ||
1760 | </tr> | ||
1761 | <tr> | ||
1762 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20111118.tar.gz'>miniupnpd-1.6.20111118.tar.gz</a></td> | ||
1763 | <td class="filesize">125683</td> | ||
1764 | <td class="filedate">18/11/2011 11:26:12 +0000</td> | ||
1765 | <td class="comment">MiniUPnP daemon source code</td> | ||
1766 | <td></td> | ||
1767 | </tr> | ||
1768 | <tr> | ||
1769 | <td class="filename"><a href='download.php?file=minissdpd-1.1.20111007.tar.gz'>minissdpd-1.1.20111007.tar.gz</a></td> | ||
1770 | <td class="filesize">17611</td> | ||
1771 | <td class="filedate">07/10/2011 09:47:51 +0000</td> | ||
1772 | <td class="comment">MiniSSDPd source code</td> | ||
1773 | <td></td> | ||
1774 | </tr> | ||
1775 | <tr> | ||
1776 | <td class="filename"><a href='download.php?file=xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td> | ||
1777 | <td class="filesize">10329</td> | ||
1778 | <td class="filedate">11/08/2011 15:18:25 +0000</td> | ||
1779 | <td class="comment">Patch to add UPnP capabilities to xchat</td> | ||
1780 | <td></td> | ||
1781 | </tr> | ||
1782 | <tr> | ||
1783 | <td class="filename"><a href='download.php?file=xchat-upnp20110811-2.8.8.patch'>xchat-upnp20110811-2.8.8.patch</a></td> | ||
1784 | <td class="filesize">11529</td> | ||
1785 | <td class="filedate">11/08/2011 15:18:23 +0000</td> | ||
1786 | <td class="comment">Patch to add UPnP capabilities to xchat</td> | ||
1787 | <td></td> | ||
1788 | </tr> | ||
1789 | <tr> | ||
1790 | <td class="filename"><a href='download.php?file=libnatpmp-20110808.tar.gz'>libnatpmp-20110808.tar.gz</a></td> | ||
1791 | <td class="filesize">17762</td> | ||
1792 | <td class="filedate">08/08/2011 21:21:34 +0000</td> | ||
1793 | <td class="comment">libnatpmp source code</td> | ||
1794 | <td></td> | ||
1795 | </tr> | ||
1796 | <tr> | ||
1797 | <td class="filename"><a href='download.php?file=libnatpmp-20110730.tar.gz'>libnatpmp-20110730.tar.gz</a></td> | ||
1798 | <td class="filesize">17687</td> | ||
1799 | <td class="filedate">30/07/2011 13:19:31 +0000</td> | ||
1800 | <td class="comment">libnatpmp source code</td> | ||
1801 | <td></td> | ||
1802 | </tr> | ||
1803 | <tr> | ||
1804 | <td class="filename"><a href='download.php?file=minissdpd-1.1.tar.gz'>minissdpd-1.1.tar.gz</a></td> | ||
1805 | <td class="filesize">17481</td> | ||
1806 | <td class="filedate">30/07/2011 13:17:30 +0000</td> | ||
1807 | <td class="comment">MiniSSDPd release source code</td> | ||
1808 | <td></td> | ||
1809 | </tr> | ||
1810 | <tr> | ||
1811 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.20110730.tar.gz'>miniupnpd-1.6.20110730.tar.gz</a></td> | ||
1812 | <td class="filesize">125583</td> | ||
1813 | <td class="filedate">30/07/2011 13:17:09 +0000</td> | ||
1814 | <td class="comment">MiniUPnP daemon source code</td> | ||
1815 | <td></td> | ||
1816 | </tr> | ||
1817 | <tr> | ||
1818 | <td class="filename"><a href='download.php?file=minissdpd-1.0.20110729.tar.gz'>minissdpd-1.0.20110729.tar.gz</a></td> | ||
1819 | <td class="filesize">15898</td> | ||
1820 | <td class="filedate">29/07/2011 08:47:26 +0000</td> | ||
1821 | <td class="comment">MiniSSDPd source code</td> | ||
1822 | <td></td> | ||
1823 | </tr> | ||
1824 | <tr> | ||
1825 | <td class="filename"><a href='download.php?file=miniupnpc-1.6.tar.gz'>miniupnpc-1.6.tar.gz</a></td> | ||
1826 | <td class="filesize">66454</td> | ||
1827 | <td class="filedate">25/07/2011 18:03:09 +0000</td> | ||
1828 | <td class="comment">MiniUPnP client release source code</td> | ||
1829 | <td></td> | ||
1830 | </tr> | ||
1831 | <tr> | ||
1832 | <td class="filename"><a href='download.php?file=miniupnpd-1.6.tar.gz'>miniupnpd-1.6.tar.gz</a></td> | ||
1833 | <td class="filesize">124917</td> | ||
1834 | <td class="filedate">25/07/2011 16:37:57 +0000</td> | ||
1835 | <td class="comment">MiniUPnP daemon release source code</td> | ||
1836 | <td></td> | ||
1837 | </tr> | ||
1838 | <tr> | ||
1839 | <td class="filename"><a href='download.php?file=minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td> | ||
1840 | <td class="filesize">7598</td> | ||
1841 | <td class="filedate">25/07/2011 14:57:50 +0000</td> | ||
1842 | <td class="comment">Patch for MiniDLNA to use miniSSDPD</td> | ||
1843 | <td></td> | ||
1844 | </tr> | ||
1845 | <tr> | ||
1846 | <td class="filename"><a href='download.php?file=libnatpmp-20110715.tar.gz'>libnatpmp-20110715.tar.gz</a></td> | ||
1847 | <td class="filesize">17943</td> | ||
1848 | <td class="filedate">15/07/2011 08:31:40 +0000</td> | ||
1849 | <td class="comment">libnatpmp source code</td> | ||
1850 | <td></td> | ||
1851 | </tr> | ||
1852 | <tr> | ||
1853 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110715.tar.gz'>miniupnpd-1.5.20110715.tar.gz</a></td> | ||
1854 | <td class="filesize">124519</td> | ||
1855 | <td class="filedate">15/07/2011 07:55:17 +0000</td> | ||
1856 | <td class="comment">MiniUPnP daemon source code</td> | ||
1857 | <td></td> | ||
1858 | </tr> | ||
1859 | <tr> | ||
1860 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110714.zip'>upnpc-exe-win32-20110714.zip</a></td> | ||
1861 | <td class="filesize">94236</td> | ||
1862 | <td class="filedate">13/07/2011 23:16:01 +0000</td> | ||
1863 | <td class="comment">Windows executable</td> | ||
1864 | <td></td> | ||
1865 | </tr> | ||
1866 | <tr> | ||
1867 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110623.tar.gz'>miniupnpd-1.5.20110623.tar.gz</a></td> | ||
1868 | <td class="filesize">123529</td> | ||
1869 | <td class="filedate">22/06/2011 22:29:15 +0000</td> | ||
1870 | <td class="comment">MiniUPnP daemon source code</td> | ||
1871 | <td></td> | ||
1872 | </tr> | ||
1873 | <tr> | ||
1874 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110620.tar.gz'>miniupnpd-1.5.20110620.tar.gz</a></td> | ||
1875 | <td class="filesize">123221</td> | ||
1876 | <td class="filedate">20/06/2011 14:11:11 +0000</td> | ||
1877 | <td class="comment">MiniUPnP daemon source code</td> | ||
1878 | <td></td> | ||
1879 | </tr> | ||
1880 | <tr> | ||
1881 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110618.tar.gz'>miniupnpd-1.5.20110618.tar.gz</a></td> | ||
1882 | <td class="filesize">123176</td> | ||
1883 | <td class="filedate">17/06/2011 23:29:18 +0000</td> | ||
1884 | <td class="comment">MiniUPnP daemon source code</td> | ||
1885 | <td></td> | ||
1886 | </tr> | ||
1887 | <tr> | ||
1888 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110618.tar.gz'>miniupnpc-1.5.20110618.tar.gz</a></td> | ||
1889 | <td class="filesize">66401</td> | ||
1890 | <td class="filedate">17/06/2011 23:29:17 +0000</td> | ||
1891 | <td class="comment">MiniUPnP client source code</td> | ||
1892 | <td></td> | ||
1893 | </tr> | ||
1894 | <tr> | ||
1895 | <td class="filename"><a href='download.php?file=libnatpmp-20110618.tar.gz'>libnatpmp-20110618.tar.gz</a></td> | ||
1896 | <td class="filesize">17901</td> | ||
1897 | <td class="filedate">17/06/2011 23:29:16 +0000</td> | ||
1898 | <td class="comment">libnatpmp source code</td> | ||
1899 | <td></td> | ||
1900 | </tr> | ||
1901 | <tr> | ||
1902 | <td class="filename"><a href='download.php?file=minissdpd-1.0.20110618.tar.gz'>minissdpd-1.0.20110618.tar.gz</a></td> | ||
1903 | <td class="filesize">15193</td> | ||
1904 | <td class="filedate">17/06/2011 23:29:16 +0000</td> | ||
1905 | <td class="comment">MiniSSDPd source code</td> | ||
1906 | <td></td> | ||
1907 | </tr> | ||
1908 | <tr> | ||
1909 | <td class="filename" colspan="2"><a href='download.php?file=minidlna_cvs20110529_minissdp1.patch'>minidlna_cvs20110529_minissdp1.patch</a></td> | ||
1910 | <td class="filedate">29/05/2011 21:19:09 +0000</td> | ||
1911 | <td class="comment">Patch for MiniDLNA to use miniSSDPD</td> | ||
1912 | <td></td> | ||
1913 | </tr> | ||
1914 | <tr> | ||
1915 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110528.tar.gz'>miniupnpd-1.5.20110528.tar.gz</a></td> | ||
1916 | <td class="filesize">121985</td> | ||
1917 | <td class="filedate">28/05/2011 09:39:04 +0000</td> | ||
1918 | <td class="comment">MiniUPnP daemon source code</td> | ||
1919 | <td></td> | ||
1920 | </tr> | ||
1921 | <tr> | ||
1922 | <td class="filename"><a href='download.php?file=minidlna_1.0.19_minissdp1.patch'>minidlna_1.0.19_minissdp1.patch</a></td> | ||
1923 | <td class="filesize">9080</td> | ||
1924 | <td class="filedate">27/05/2011 09:55:04 +0000</td> | ||
1925 | <td class="comment">Patch for MiniDLNA to use miniSSDPD</td> | ||
1926 | <td></td> | ||
1927 | </tr> | ||
1928 | <tr> | ||
1929 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110527.tar.gz'>miniupnpd-1.5.20110527.tar.gz</a></td> | ||
1930 | <td class="filesize">120896</td> | ||
1931 | <td class="filedate">27/05/2011 08:28:35 +0000</td> | ||
1932 | <td class="comment">MiniUPnP daemon source code</td> | ||
1933 | <td></td> | ||
1934 | </tr> | ||
1935 | <tr> | ||
1936 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110527.tar.gz'>miniupnpc-1.5.20110527.tar.gz</a></td> | ||
1937 | <td class="filesize">66279</td> | ||
1938 | <td class="filedate">27/05/2011 08:28:34 +0000</td> | ||
1939 | <td class="comment">MiniUPnP client source code</td> | ||
1940 | <td></td> | ||
1941 | </tr> | ||
1942 | <tr> | ||
1943 | <td class="filename"><a href='download.php?file=libnatpmp-20110527.tar.gz'>libnatpmp-20110527.tar.gz</a></td> | ||
1944 | <td class="filesize">17627</td> | ||
1945 | <td class="filedate">27/05/2011 08:28:33 +0000</td> | ||
1946 | <td class="comment">libnatpmp source code</td> | ||
1947 | <td></td> | ||
1948 | </tr> | ||
1949 | <tr> | ||
1950 | <td class="filename"><a href='download.php?file=minissdpd-1.0.20110523.tar.gz'>minissdpd-1.0.20110523.tar.gz</a></td> | ||
1951 | <td class="filesize">15024</td> | ||
1952 | <td class="filedate">23/05/2011 12:55:31 +0000</td> | ||
1953 | <td class="comment">MiniSSDPd source code</td> | ||
1954 | <td></td> | ||
1955 | </tr> | ||
1956 | <tr> | ||
1957 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110520.tar.gz'>miniupnpd-1.5.20110520.tar.gz</a></td> | ||
1958 | <td class="filesize">119227</td> | ||
1959 | <td class="filedate">20/05/2011 18:00:41 +0000</td> | ||
1960 | <td class="comment">MiniUPnP daemon source code</td> | ||
1961 | <td></td> | ||
1962 | </tr> | ||
1963 | <tr> | ||
1964 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110519.tar.gz'>miniupnpd-1.5.20110519.tar.gz</a></td> | ||
1965 | <td class="filesize">114735</td> | ||
1966 | <td class="filedate">18/05/2011 22:29:06 +0000</td> | ||
1967 | <td class="comment">MiniUPnP daemon source code</td> | ||
1968 | <td></td> | ||
1969 | </tr> | ||
1970 | <tr> | ||
1971 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110516.tar.gz'>miniupnpd-1.5.20110516.tar.gz</a></td> | ||
1972 | <td class="filesize">113348</td> | ||
1973 | <td class="filedate">16/05/2011 09:32:51 +0000</td> | ||
1974 | <td class="comment">MiniUPnP daemon source code</td> | ||
1975 | <td></td> | ||
1976 | </tr> | ||
1977 | <tr> | ||
1978 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110515.tar.gz'>miniupnpd-1.5.20110515.tar.gz</a></td> | ||
1979 | <td class="filesize">113135</td> | ||
1980 | <td class="filedate">15/05/2011 21:51:29 +0000</td> | ||
1981 | <td class="comment">MiniUPnP daemon source code</td> | ||
1982 | <td></td> | ||
1983 | </tr> | ||
1984 | <tr> | ||
1985 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110515.tar.gz'>miniupnpc-1.5.20110515.tar.gz</a></td> | ||
1986 | <td class="filesize">66112</td> | ||
1987 | <td class="filedate">15/05/2011 21:51:28 +0000</td> | ||
1988 | <td class="comment">MiniUPnP client source code</td> | ||
1989 | <td></td> | ||
1990 | </tr> | ||
1991 | <tr> | ||
1992 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110513.tar.gz'>miniupnpd-1.5.20110513.tar.gz</a></td> | ||
1993 | <td class="filesize">111029</td> | ||
1994 | <td class="filedate">13/05/2011 14:03:12 +0000</td> | ||
1995 | <td class="comment">MiniUPnP daemon source code</td> | ||
1996 | <td></td> | ||
1997 | </tr> | ||
1998 | <tr> | ||
1999 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110506.tar.gz'>miniupnpc-1.5.20110506.tar.gz</a></td> | ||
2000 | <td class="filesize">65536</td> | ||
2001 | <td class="filedate">06/05/2011 16:35:38 +0000</td> | ||
2002 | <td class="comment">MiniUPnP client source code</td> | ||
2003 | <td></td> | ||
2004 | </tr> | ||
2005 | <tr> | ||
2006 | <td class="filename"><a href='download.php?file=miniupnpc-1.4-v6.20100505.zip'>miniupnpc-1.4-v6.20100505.zip</a></td> | ||
2007 | <td class="filesize">91833</td> | ||
2008 | <td class="filedate">18/04/2011 20:14:11 +0000</td> | ||
2009 | <td class="comment"></td> | ||
2010 | <td></td> | ||
2011 | </tr> | ||
2012 | <tr> | ||
2013 | <td class="filename"><a href='download.php?file=miniupnpd-1.4-v6.20100823.zip'>miniupnpd-1.4-v6.20100823.zip</a></td> | ||
2014 | <td class="filesize">222235</td> | ||
2015 | <td class="filedate">18/04/2011 20:14:07 +0000</td> | ||
2016 | <td class="comment"></td> | ||
2017 | <td></td> | ||
2018 | </tr> | ||
2019 | <tr> | ||
2020 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110418.tar.gz'>miniupnpc-1.5.20110418.tar.gz</a></td> | ||
2021 | <td class="filesize">61820</td> | ||
2022 | <td class="filedate">18/04/2011 20:09:22 +0000</td> | ||
2023 | <td class="comment">MiniUPnP client source code</td> | ||
2024 | <td></td> | ||
2025 | </tr> | ||
2026 | <tr> | ||
2027 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110418.zip'>upnpc-exe-win32-20110418.zip</a></td> | ||
2028 | <td class="filesize">94183</td> | ||
2029 | <td class="filedate">18/04/2011 17:53:26 +0000</td> | ||
2030 | <td class="comment">Windows executable</td> | ||
2031 | <td></td> | ||
2032 | </tr> | ||
2033 | <tr> | ||
2034 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110314.tar.gz'>miniupnpc-1.5.20110314.tar.gz</a></td> | ||
2035 | <td class="filesize">57210</td> | ||
2036 | <td class="filedate">14/03/2011 14:27:29 +0000</td> | ||
2037 | <td class="comment">MiniUPnP client source code</td> | ||
2038 | <td></td> | ||
2039 | </tr> | ||
2040 | <tr> | ||
2041 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110309.tar.gz'>miniupnpd-1.5.20110309.tar.gz</a></td> | ||
2042 | <td class="filesize">100073</td> | ||
2043 | <td class="filedate">09/03/2011 15:36:12 +0000</td> | ||
2044 | <td class="comment">MiniUPnP daemon source code</td> | ||
2045 | <td></td> | ||
2046 | </tr> | ||
2047 | <tr> | ||
2048 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110302.tar.gz'>miniupnpd-1.5.20110302.tar.gz</a></td> | ||
2049 | <td class="filesize">100756</td> | ||
2050 | <td class="filedate">02/03/2011 16:17:44 +0000</td> | ||
2051 | <td class="comment">MiniUPnP daemon source code</td> | ||
2052 | <td></td> | ||
2053 | </tr> | ||
2054 | <tr> | ||
2055 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110221.tar.gz'>miniupnpd-1.5.20110221.tar.gz</a></td> | ||
2056 | <td class="filesize">100092</td> | ||
2057 | <td class="filedate">20/02/2011 23:48:17 +0000</td> | ||
2058 | <td class="comment">MiniUPnP daemon source code</td> | ||
2059 | <td></td> | ||
2060 | </tr> | ||
2061 | <tr> | ||
2062 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110215.zip'>upnpc-exe-win32-20110215.zip</a></td> | ||
2063 | <td class="filesize">55409</td> | ||
2064 | <td class="filedate">15/02/2011 23:05:00 +0000</td> | ||
2065 | <td class="comment">Windows executable</td> | ||
2066 | <td></td> | ||
2067 | </tr> | ||
2068 | <tr> | ||
2069 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110215.tar.gz'>miniupnpc-1.5.20110215.tar.gz</a></td> | ||
2070 | <td class="filesize">54880</td> | ||
2071 | <td class="filedate">15/02/2011 11:16:04 +0000</td> | ||
2072 | <td class="comment">MiniUPnP client source code</td> | ||
2073 | <td></td> | ||
2074 | </tr> | ||
2075 | <tr> | ||
2076 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110214.tar.gz'>miniupnpd-1.5.20110214.tar.gz</a></td> | ||
2077 | <td class="filesize">99629</td> | ||
2078 | <td class="filedate">14/02/2011 18:00:43 +0000</td> | ||
2079 | <td class="comment">MiniUPnP daemon source code</td> | ||
2080 | <td></td> | ||
2081 | </tr> | ||
2082 | <tr> | ||
2083 | <td class="filename"><a href='download.php?file=minidlna_1.0.18_minissdp1.patch'>minidlna_1.0.18_minissdp1.patch</a></td> | ||
2084 | <td class="filesize">9747</td> | ||
2085 | <td class="filedate">02/02/2011 15:12:19 +0000</td> | ||
2086 | <td class="comment">Patch for MiniDLNA to use miniSSDPD</td> | ||
2087 | <td></td> | ||
2088 | </tr> | ||
2089 | <tr> | ||
2090 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110127.tar.gz'>miniupnpd-1.5.20110127.tar.gz</a></td> | ||
2091 | <td class="filesize">97421</td> | ||
2092 | <td class="filedate">27/01/2011 17:51:25 +0000</td> | ||
2093 | <td class="comment">MiniUPnP daemon source code</td> | ||
2094 | <td></td> | ||
2095 | </tr> | ||
2096 | <tr> | ||
2097 | <td class="filename"><a href='download.php?file=miniupnpd-1.5.tar.gz'>miniupnpd-1.5.tar.gz</a></td> | ||
2098 | <td class="filesize">98993</td> | ||
2099 | <td class="filedate">04/01/2011 09:45:10 +0000</td> | ||
2100 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2101 | <td></td> | ||
2102 | </tr> | ||
2103 | <tr> | ||
2104 | <td class="filename"><a href='download.php?file=miniupnpc-1.5.tar.gz'>miniupnpc-1.5.tar.gz</a></td> | ||
2105 | <td class="filesize">53309</td> | ||
2106 | <td class="filedate">04/01/2011 09:45:06 +0000</td> | ||
2107 | <td class="comment">MiniUPnP client release source code</td> | ||
2108 | <td></td> | ||
2109 | </tr> | ||
2110 | <tr> | ||
2111 | <td class="filename"><a href='download.php?file=libnatpmp-20110103.tar.gz'>libnatpmp-20110103.tar.gz</a></td> | ||
2112 | <td class="filesize">17529</td> | ||
2113 | <td class="filedate">03/01/2011 17:33:16 +0000</td> | ||
2114 | <td class="comment">libnatpmp source code</td> | ||
2115 | <td></td> | ||
2116 | </tr> | ||
2117 | <tr> | ||
2118 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101221.tar.gz'>miniupnpc-1.4.20101221.tar.gz</a></td> | ||
2119 | <td class="filesize">52342</td> | ||
2120 | <td class="filedate">21/12/2010 16:15:38 +0000</td> | ||
2121 | <td class="comment">MiniUPnP client source code</td> | ||
2122 | <td></td> | ||
2123 | </tr> | ||
2124 | <tr> | ||
2125 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20101213.zip'>upnpc-exe-win32-20101213.zip</a></td> | ||
2126 | <td class="filesize">52359</td> | ||
2127 | <td class="filedate">12/12/2010 23:44:01 +0000</td> | ||
2128 | <td class="comment">Windows executable</td> | ||
2129 | <td></td> | ||
2130 | </tr> | ||
2131 | <tr> | ||
2132 | <td class="filename"><a href='download.php?file=libnatpmp-20101211.tar.gz'>libnatpmp-20101211.tar.gz</a></td> | ||
2133 | <td class="filesize">17324</td> | ||
2134 | <td class="filedate">11/12/2010 17:20:36 +0000</td> | ||
2135 | <td class="comment">libnatpmp source code</td> | ||
2136 | <td></td> | ||
2137 | </tr> | ||
2138 | <tr> | ||
2139 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101209.tar.gz'>miniupnpc-1.4.20101209.tar.gz</a></td> | ||
2140 | <td class="filesize">51900</td> | ||
2141 | <td class="filedate">09/12/2010 16:17:30 +0000</td> | ||
2142 | <td class="comment">MiniUPnP client source code</td> | ||
2143 | <td></td> | ||
2144 | </tr> | ||
2145 | <tr> | ||
2146 | <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100921.tar.gz'>miniupnpd-1.4.20100921.tar.gz</a></td> | ||
2147 | <td class="filesize">95483</td> | ||
2148 | <td class="filedate">21/09/2010 15:50:00 +0000</td> | ||
2149 | <td class="comment">MiniUPnP daemon source code</td> | ||
2150 | <td></td> | ||
2151 | </tr> | ||
2152 | <tr> | ||
2153 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100825.zip'>upnpc-exe-win32-20100825.zip</a></td> | ||
2154 | <td class="filesize">50636</td> | ||
2155 | <td class="filedate">25/08/2010 08:42:59 +0000</td> | ||
2156 | <td class="comment">Windows executable</td> | ||
2157 | <td></td> | ||
2158 | </tr> | ||
2159 | <tr> | ||
2160 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100609.tar.gz'>miniupnpc-1.4.20100609.tar.gz</a></td> | ||
2161 | <td class="filesize">50390</td> | ||
2162 | <td class="filedate">09/06/2010 11:03:11 +0000</td> | ||
2163 | <td class="comment">MiniUPnP client source code</td> | ||
2164 | <td></td> | ||
2165 | </tr> | ||
2166 | <tr> | ||
2167 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100513.zip'>upnpc-exe-win32-20100513.zip</a></td> | ||
2168 | <td class="filesize">50950</td> | ||
2169 | <td class="filedate">13/05/2010 16:54:33 +0000</td> | ||
2170 | <td class="comment">Windows executable</td> | ||
2171 | <td></td> | ||
2172 | </tr> | ||
2173 | <tr> | ||
2174 | <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100511.tar.gz'>miniupnpd-1.4.20100511.tar.gz</a></td> | ||
2175 | <td class="filesize">93281</td> | ||
2176 | <td class="filedate">11/05/2010 16:22:33 +0000</td> | ||
2177 | <td class="comment">MiniUPnP daemon source code</td> | ||
2178 | <td></td> | ||
2179 | </tr> | ||
2180 | <tr> | ||
2181 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100418.zip'>upnpc-exe-win32-20100418.zip</a></td> | ||
2182 | <td class="filesize">40758</td> | ||
2183 | <td class="filedate">17/04/2010 23:00:37 +0000</td> | ||
2184 | <td class="comment">Windows executable</td> | ||
2185 | <td></td> | ||
2186 | </tr> | ||
2187 | <tr> | ||
2188 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100418.tar.gz'>miniupnpc-1.4.20100418.tar.gz</a></td> | ||
2189 | <td class="filesize">50245</td> | ||
2190 | <td class="filedate">17/04/2010 22:18:31 +0000</td> | ||
2191 | <td class="comment">MiniUPnP client source code</td> | ||
2192 | <td></td> | ||
2193 | </tr> | ||
2194 | <tr> | ||
2195 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100412.tar.gz'>miniupnpc-1.4.20100412.tar.gz</a></td> | ||
2196 | <td class="filesize">50145</td> | ||
2197 | <td class="filedate">12/04/2010 20:42:53 +0000</td> | ||
2198 | <td class="comment">MiniUPnP client source code</td> | ||
2199 | <td></td> | ||
2200 | </tr> | ||
2201 | <tr> | ||
2202 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100407.tar.gz'>miniupnpc-1.4.20100407.tar.gz</a></td> | ||
2203 | <td class="filesize">49756</td> | ||
2204 | <td class="filedate">07/04/2010 10:05:08 +0000</td> | ||
2205 | <td class="comment">MiniUPnP client source code</td> | ||
2206 | <td></td> | ||
2207 | </tr> | ||
2208 | <tr> | ||
2209 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100405.tar.gz'>miniupnpc-1.4.20100405.tar.gz</a></td> | ||
2210 | <td class="filesize">49549</td> | ||
2211 | <td class="filedate">05/04/2010 14:34:38 +0000</td> | ||
2212 | <td class="comment">MiniUPnP client source code</td> | ||
2213 | <td></td> | ||
2214 | </tr> | ||
2215 | <tr> | ||
2216 | <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100308.tar.gz'>miniupnpd-1.4.20100308.tar.gz</a></td> | ||
2217 | <td class="filesize">92889</td> | ||
2218 | <td class="filedate">08/03/2010 17:18:00 +0000</td> | ||
2219 | <td class="comment">MiniUPnP daemon source code</td> | ||
2220 | <td></td> | ||
2221 | </tr> | ||
2222 | <tr> | ||
2223 | <td class="filename"><a href='download.php?file=libnatpmp-20100202.tar.gz'>libnatpmp-20100202.tar.gz</a></td> | ||
2224 | <td class="filesize">17231</td> | ||
2225 | <td class="filedate">02/02/2010 18:41:13 +0000</td> | ||
2226 | <td class="comment">libnatpmp source code</td> | ||
2227 | <td></td> | ||
2228 | </tr> | ||
2229 | <tr> | ||
2230 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100202.tar.gz'>miniupnpc-1.4.20100202.tar.gz</a></td> | ||
2231 | <td class="filesize">46710</td> | ||
2232 | <td class="filedate">02/02/2010 18:41:13 +0000</td> | ||
2233 | <td class="comment">MiniUPnP client source code</td> | ||
2234 | <td></td> | ||
2235 | </tr> | ||
2236 | <tr> | ||
2237 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100106.tar.gz'>miniupnpc-1.4.20100106.tar.gz</a></td> | ||
2238 | <td class="filesize">46659</td> | ||
2239 | <td class="filedate">06/01/2010 10:08:21 +0000</td> | ||
2240 | <td class="comment">MiniUPnP client source code</td> | ||
2241 | <td></td> | ||
2242 | </tr> | ||
2243 | <tr> | ||
2244 | <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091222.tar.gz'>miniupnpd-1.4.20091222.tar.gz</a></td> | ||
2245 | <td class="filesize">90993</td> | ||
2246 | <td class="filedate">22/12/2009 17:23:48 +0000</td> | ||
2247 | <td class="comment">MiniUPnP daemon source code</td> | ||
2248 | <td></td> | ||
2249 | </tr> | ||
2250 | <tr> | ||
2251 | <td class="filename"><a href='download.php?file=libnatpmp-20091219.tar.gz'>libnatpmp-20091219.tar.gz</a></td> | ||
2252 | <td class="filesize">16839</td> | ||
2253 | <td class="filedate">19/12/2009 14:35:22 +0000</td> | ||
2254 | <td class="comment">libnatpmp source code</td> | ||
2255 | <td></td> | ||
2256 | </tr> | ||
2257 | <tr> | ||
2258 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091213.tar.gz'>miniupnpc-1.4.20091213.tar.gz</a></td> | ||
2259 | <td class="filesize">46510</td> | ||
2260 | <td class="filedate">12/12/2009 23:05:40 +0000</td> | ||
2261 | <td class="comment">MiniUPnP client source code</td> | ||
2262 | <td></td> | ||
2263 | </tr> | ||
2264 | <tr> | ||
2265 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091211.tar.gz'>miniupnpc-1.4.20091211.tar.gz</a></td> | ||
2266 | <td class="filesize">45852</td> | ||
2267 | <td class="filedate">11/12/2009 16:43:01 +0000</td> | ||
2268 | <td class="comment">MiniUPnP client source code</td> | ||
2269 | <td></td> | ||
2270 | </tr> | ||
2271 | <tr> | ||
2272 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20091210.zip'>upnpc-exe-win32-20091210.zip</a></td> | ||
2273 | <td class="filesize">38666</td> | ||
2274 | <td class="filedate">10/12/2009 18:50:27 +0000</td> | ||
2275 | <td class="comment">Windows executable</td> | ||
2276 | <td></td> | ||
2277 | </tr> | ||
2278 | <tr> | ||
2279 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091208.tar.gz'>miniupnpc-1.4.20091208.tar.gz</a></td> | ||
2280 | <td class="filesize">43392</td> | ||
2281 | <td class="filedate">08/12/2009 10:58:26 +0000</td> | ||
2282 | <td class="comment">MiniUPnP client source code</td> | ||
2283 | <td></td> | ||
2284 | </tr> | ||
2285 | <tr> | ||
2286 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091203.tar.gz'>miniupnpc-1.4.20091203.tar.gz</a></td> | ||
2287 | <td class="filesize">42040</td> | ||
2288 | <td class="filedate">03/12/2009 13:56:28 +0000</td> | ||
2289 | <td class="comment">MiniUPnP client source code</td> | ||
2290 | <td></td> | ||
2291 | </tr> | ||
2292 | <tr> | ||
2293 | <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091106.tar.gz'>miniupnpd-1.4.20091106.tar.gz</a></td> | ||
2294 | <td class="filesize">90787</td> | ||
2295 | <td class="filedate">06/11/2009 21:18:50 +0000</td> | ||
2296 | <td class="comment">MiniUPnP daemon source code</td> | ||
2297 | <td></td> | ||
2298 | </tr> | ||
2299 | <tr> | ||
2300 | <td class="filename"><a href='download.php?file=miniupnpd-1.4.tar.gz'>miniupnpd-1.4.tar.gz</a></td> | ||
2301 | <td class="filesize">90071</td> | ||
2302 | <td class="filedate">30/10/2009 09:20:05 +0000</td> | ||
2303 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2304 | <td></td> | ||
2305 | </tr> | ||
2306 | <tr> | ||
2307 | <td class="filename"><a href='download.php?file=miniupnpc-1.4.tar.gz'>miniupnpc-1.4.tar.gz</a></td> | ||
2308 | <td class="filesize">41790</td> | ||
2309 | <td class="filedate">30/10/2009 09:20:04 +0000</td> | ||
2310 | <td class="comment">MiniUPnP client release source code</td> | ||
2311 | <td></td> | ||
2312 | </tr> | ||
2313 | <tr> | ||
2314 | <td class="filename"><a href='download.php?file=miniupnpc-20091016.tar.gz'>miniupnpc-20091016.tar.gz</a></td> | ||
2315 | <td class="filesize">41792</td> | ||
2316 | <td class="filedate">16/10/2009 09:04:35 +0000</td> | ||
2317 | <td class="comment">MiniUPnP client source code</td> | ||
2318 | <td></td> | ||
2319 | </tr> | ||
2320 | <tr> | ||
2321 | <td class="filename"><a href='download.php?file=miniupnpd-20091010.tar.gz'>miniupnpd-20091010.tar.gz</a></td> | ||
2322 | <td class="filesize">90043</td> | ||
2323 | <td class="filedate">10/10/2009 19:21:30 +0000</td> | ||
2324 | <td class="comment">MiniUPnP daemon source code</td> | ||
2325 | <td></td> | ||
2326 | </tr> | ||
2327 | <tr> | ||
2328 | <td class="filename"><a href='download.php?file=miniupnpc-20091010.tar.gz'>miniupnpc-20091010.tar.gz</a></td> | ||
2329 | <td class="filesize">41671</td> | ||
2330 | <td class="filedate">10/10/2009 19:21:28 +0000</td> | ||
2331 | <td class="comment">MiniUPnP client source code</td> | ||
2332 | <td></td> | ||
2333 | </tr> | ||
2334 | <tr> | ||
2335 | <td class="filename"><a href='download.php?file=miniupnpd-20090921.tar.gz'>miniupnpd-20090921.tar.gz</a></td> | ||
2336 | <td class="filesize">89476</td> | ||
2337 | <td class="filedate">21/09/2009 13:00:04 +0000</td> | ||
2338 | <td class="comment">MiniUPnP daemon source code</td> | ||
2339 | <td></td> | ||
2340 | </tr> | ||
2341 | <tr> | ||
2342 | <td class="filename"><a href='download.php?file=miniupnpc-20090921.tar.gz'>miniupnpc-20090921.tar.gz</a></td> | ||
2343 | <td class="filesize">41630</td> | ||
2344 | <td class="filedate">21/09/2009 13:00:03 +0000</td> | ||
2345 | <td class="comment">MiniUPnP client source code</td> | ||
2346 | <td></td> | ||
2347 | </tr> | ||
2348 | <tr> | ||
2349 | <td class="filename"><a href='download.php?file=miniupnpd-20090904.tar.gz'>miniupnpd-20090904.tar.gz</a></td> | ||
2350 | <td class="filesize">89344</td> | ||
2351 | <td class="filedate">04/09/2009 16:24:26 +0000</td> | ||
2352 | <td class="comment">MiniUPnP daemon source code</td> | ||
2353 | <td></td> | ||
2354 | </tr> | ||
2355 | <tr> | ||
2356 | <td class="filename"><a href='download.php?file=miniupnpd-20090820.tar.gz'>miniupnpd-20090820.tar.gz</a></td> | ||
2357 | <td class="filesize">89149</td> | ||
2358 | <td class="filedate">20/08/2009 09:35:58 +0000</td> | ||
2359 | <td class="comment">MiniUPnP daemon source code</td> | ||
2360 | <td></td> | ||
2361 | </tr> | ||
2362 | <tr> | ||
2363 | <td class="filename"><a href='download.php?file=miniupnpc-20090807.tar.gz'>miniupnpc-20090807.tar.gz</a></td> | ||
2364 | <td class="filesize">41288</td> | ||
2365 | <td class="filedate">07/08/2009 14:46:11 +0000</td> | ||
2366 | <td class="comment">MiniUPnP client source code</td> | ||
2367 | <td></td> | ||
2368 | </tr> | ||
2369 | <tr> | ||
2370 | <td class="filename"><a href='download.php?file=miniupnpc-20090729.tar.gz'>miniupnpc-20090729.tar.gz</a></td> | ||
2371 | <td class="filesize">40588</td> | ||
2372 | <td class="filedate">29/07/2009 08:47:43 +0000</td> | ||
2373 | <td class="comment">MiniUPnP client source code</td> | ||
2374 | <td></td> | ||
2375 | </tr> | ||
2376 | <tr> | ||
2377 | <td class="filename"><a href='download.php?file=xchat-upnp20061022.patch'>xchat-upnp20061022.patch</a></td> | ||
2378 | <td class="filesize">10258</td> | ||
2379 | <td class="filedate">17/07/2009 15:49:46 +0000</td> | ||
2380 | <td class="comment">Patch to add UPnP capabilities to xchat</td> | ||
2381 | <td></td> | ||
2382 | </tr> | ||
2383 | <tr> | ||
2384 | <td class="filename"><a href='download.php?file=miniupnpc-20090713.tar.gz'>miniupnpc-20090713.tar.gz</a></td> | ||
2385 | <td class="filesize">40206</td> | ||
2386 | <td class="filedate">13/07/2009 08:53:49 +0000</td> | ||
2387 | <td class="comment">MiniUPnP client source code</td> | ||
2388 | <td></td> | ||
2389 | </tr> | ||
2390 | <tr> | ||
2391 | <td class="filename"><a href='download.php?file=libnatpmp-20090713.tar.gz'>libnatpmp-20090713.tar.gz</a></td> | ||
2392 | <td class="filesize">14262</td> | ||
2393 | <td class="filedate">13/07/2009 08:53:49 +0000</td> | ||
2394 | <td class="comment">libnatpmp source code</td> | ||
2395 | <td></td> | ||
2396 | </tr> | ||
2397 | <tr> | ||
2398 | <td class="filename"><a href='download.php?file=miniupnpd-20090605.tar.gz'>miniupnpd-20090605.tar.gz</a></td> | ||
2399 | <td class="filesize">83774</td> | ||
2400 | <td class="filedate">04/06/2009 23:32:20 +0000</td> | ||
2401 | <td class="comment">MiniUPnP daemon source code</td> | ||
2402 | <td></td> | ||
2403 | </tr> | ||
2404 | <tr> | ||
2405 | <td class="filename"><a href='download.php?file=miniupnpc-20090605.tar.gz'>miniupnpc-20090605.tar.gz</a></td> | ||
2406 | <td class="filesize">40077</td> | ||
2407 | <td class="filedate">04/06/2009 23:32:16 +0000</td> | ||
2408 | <td class="comment">MiniUPnP client source code</td> | ||
2409 | <td></td> | ||
2410 | </tr> | ||
2411 | <tr> | ||
2412 | <td class="filename"><a href='download.php?file=libnatpmp-20090605.tar.gz'>libnatpmp-20090605.tar.gz</a></td> | ||
2413 | <td class="filesize">13817</td> | ||
2414 | <td class="filedate">04/06/2009 23:32:15 +0000</td> | ||
2415 | <td class="comment">libnatpmp source code</td> | ||
2416 | <td></td> | ||
2417 | </tr> | ||
2418 | <tr> | ||
2419 | <td class="filename"><a href='download.php?file=miniupnpd-20090516.tar.gz'>miniupnpd-20090516.tar.gz</a></td> | ||
2420 | <td class="filesize">83689</td> | ||
2421 | <td class="filedate">16/05/2009 08:47:31 +0000</td> | ||
2422 | <td class="comment">MiniUPnP daemon source code</td> | ||
2423 | <td></td> | ||
2424 | </tr> | ||
2425 | <tr> | ||
2426 | <td class="filename"><a href='download.php?file=miniupnpc-1.3.tar.gz'>miniupnpc-1.3.tar.gz</a></td> | ||
2427 | <td class="filesize">40058</td> | ||
2428 | <td class="filedate">17/04/2009 21:27:55 +0000</td> | ||
2429 | <td class="comment">MiniUPnP client release source code</td> | ||
2430 | <td></td> | ||
2431 | </tr> | ||
2432 | <tr> | ||
2433 | <td class="filename"><a href='download.php?file=miniupnpd-1.3.tar.gz'>miniupnpd-1.3.tar.gz</a></td> | ||
2434 | <td class="filesize">83464</td> | ||
2435 | <td class="filedate">17/04/2009 20:11:21 +0000</td> | ||
2436 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2437 | <td></td> | ||
2438 | </tr> | ||
2439 | <tr> | ||
2440 | <td class="filename"><a href='download.php?file=libnatpmp-20090310.tar.gz'>libnatpmp-20090310.tar.gz</a></td> | ||
2441 | <td class="filesize">11847</td> | ||
2442 | <td class="filedate">10/03/2009 10:19:45 +0000</td> | ||
2443 | <td class="comment">libnatpmp source code</td> | ||
2444 | <td></td> | ||
2445 | </tr> | ||
2446 | <tr> | ||
2447 | <td class="filename"><a href='download.php?file=miniupnpd-20090214.tar.gz'>miniupnpd-20090214.tar.gz</a></td> | ||
2448 | <td class="filesize">82921</td> | ||
2449 | <td class="filedate">14/02/2009 11:27:03 +0000</td> | ||
2450 | <td class="comment">MiniUPnP daemon source code</td> | ||
2451 | <td></td> | ||
2452 | </tr> | ||
2453 | <tr> | ||
2454 | <td class="filename"><a href='download.php?file=miniupnpd-20090213.tar.gz'>miniupnpd-20090213.tar.gz</a></td> | ||
2455 | <td class="filesize">82594</td> | ||
2456 | <td class="filedate">13/02/2009 19:48:01 +0000</td> | ||
2457 | <td class="comment">MiniUPnP daemon source code</td> | ||
2458 | <td></td> | ||
2459 | </tr> | ||
2460 | <tr> | ||
2461 | <td class="filename"><a href='download.php?file=libnatpmp-20090129.tar.gz'>libnatpmp-20090129.tar.gz</a></td> | ||
2462 | <td class="filesize">11748</td> | ||
2463 | <td class="filedate">29/01/2009 21:50:31 +0000</td> | ||
2464 | <td class="comment">libnatpmp source code</td> | ||
2465 | <td></td> | ||
2466 | </tr> | ||
2467 | <tr> | ||
2468 | <td class="filename"><a href='download.php?file=miniupnpc-20090129.tar.gz'>miniupnpc-20090129.tar.gz</a></td> | ||
2469 | <td class="filesize">39976</td> | ||
2470 | <td class="filedate">29/01/2009 21:50:30 +0000</td> | ||
2471 | <td class="comment">MiniUPnP client source code</td> | ||
2472 | <td></td> | ||
2473 | </tr> | ||
2474 | <tr> | ||
2475 | <td class="filename"><a href='download.php?file=miniupnpd-20090129.tar.gz'>miniupnpd-20090129.tar.gz</a></td> | ||
2476 | <td class="filesize">82487</td> | ||
2477 | <td class="filedate">29/01/2009 21:50:27 +0000</td> | ||
2478 | <td class="comment">MiniUPnP daemon source code</td> | ||
2479 | <td></td> | ||
2480 | </tr> | ||
2481 | <tr> | ||
2482 | <td class="filename"><a href='download.php?file=miniupnpd-20081009.tar.gz'>miniupnpd-20081009.tar.gz</a></td> | ||
2483 | <td class="filesize">81732</td> | ||
2484 | <td class="filedate">09/10/2008 12:53:02 +0000</td> | ||
2485 | <td class="comment">MiniUPnP daemon source code</td> | ||
2486 | <td></td> | ||
2487 | </tr> | ||
2488 | <tr> | ||
2489 | <td class="filename"><a href='download.php?file=minissdpd-1.0.tar.gz'>minissdpd-1.0.tar.gz</a></td> | ||
2490 | <td class="filesize">12996</td> | ||
2491 | <td class="filedate">07/10/2008 14:03:49 +0000</td> | ||
2492 | <td class="comment">MiniSSDPd release source code</td> | ||
2493 | <td></td> | ||
2494 | </tr> | ||
2495 | <tr> | ||
2496 | <td class="filename"><a href='download.php?file=miniupnpc-1.2.tar.gz'>miniupnpc-1.2.tar.gz</a></td> | ||
2497 | <td class="filesize">38787</td> | ||
2498 | <td class="filedate">07/10/2008 14:03:47 +0000</td> | ||
2499 | <td class="comment">MiniUPnP client release source code</td> | ||
2500 | <td></td> | ||
2501 | </tr> | ||
2502 | <tr> | ||
2503 | <td class="filename"><a href='download.php?file=miniupnpd-1.2.tar.gz'>miniupnpd-1.2.tar.gz</a></td> | ||
2504 | <td class="filesize">81025</td> | ||
2505 | <td class="filedate">07/10/2008 14:03:45 +0000</td> | ||
2506 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2507 | <td></td> | ||
2508 | </tr> | ||
2509 | <tr> | ||
2510 | <td class="filename"><a href='download.php?file=miniupnpd-20081006.tar.gz'>miniupnpd-20081006.tar.gz</a></td> | ||
2511 | <td class="filesize">80510</td> | ||
2512 | <td class="filedate">06/10/2008 15:50:34 +0000</td> | ||
2513 | <td class="comment">MiniUPnP daemon source code</td> | ||
2514 | <td></td> | ||
2515 | </tr> | ||
2516 | <tr> | ||
2517 | <td class="filename"><a href='download.php?file=minissdpd-20081006.tar.gz'>minissdpd-20081006.tar.gz</a></td> | ||
2518 | <td class="filesize">12230</td> | ||
2519 | <td class="filedate">06/10/2008 15:50:33 +0000</td> | ||
2520 | <td class="comment">MiniSSDPd source code</td> | ||
2521 | <td></td> | ||
2522 | </tr> | ||
2523 | <tr> | ||
2524 | <td class="filename"><a href='download.php?file=libnatpmp-20081006.tar.gz'>libnatpmp-20081006.tar.gz</a></td> | ||
2525 | <td class="filesize">11710</td> | ||
2526 | <td class="filedate">06/10/2008 15:50:31 +0000</td> | ||
2527 | <td class="comment">libnatpmp source code</td> | ||
2528 | <td></td> | ||
2529 | </tr> | ||
2530 | <tr> | ||
2531 | <td class="filename" colspan="2"><a href='download.php?file=mediatomb_minissdp-20081006.patch'>mediatomb_minissdp-20081006.patch</a></td> | ||
2532 | <td class="filedate">06/10/2008 15:48:18 +0000</td> | ||
2533 | <td class="comment"></td> | ||
2534 | <td></td> | ||
2535 | </tr> | ||
2536 | <tr> | ||
2537 | <td class="filename"><a href='download.php?file=miniupnpc-20081002.tar.gz'>miniupnpc-20081002.tar.gz</a></td> | ||
2538 | <td class="filesize">38291</td> | ||
2539 | <td class="filedate">02/10/2008 09:20:18 +0000</td> | ||
2540 | <td class="comment">MiniUPnP client source code</td> | ||
2541 | <td></td> | ||
2542 | </tr> | ||
2543 | <tr> | ||
2544 | <td class="filename"><a href='download.php?file=miniupnpd-20081001.tar.gz'>miniupnpd-20081001.tar.gz</a></td> | ||
2545 | <td class="filesize">79696</td> | ||
2546 | <td class="filedate">01/10/2008 13:11:20 +0000</td> | ||
2547 | <td class="comment">MiniUPnP daemon source code</td> | ||
2548 | <td></td> | ||
2549 | </tr> | ||
2550 | <tr> | ||
2551 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080925.zip'>upnpc-exe-win32-20080925.zip</a></td> | ||
2552 | <td class="filesize">36602</td> | ||
2553 | <td class="filedate">25/09/2008 06:59:33 +0000</td> | ||
2554 | <td class="comment">Windows executable</td> | ||
2555 | <td></td> | ||
2556 | </tr> | ||
2557 | <tr> | ||
2558 | <td class="filename"><a href='download.php?file=miniupnpd-20080710.tar.gz'>miniupnpd-20080710.tar.gz</a></td> | ||
2559 | <td class="filesize">78898</td> | ||
2560 | <td class="filedate">10/07/2008 09:38:41 +0000</td> | ||
2561 | <td class="comment">MiniUPnP daemon source code</td> | ||
2562 | <td></td> | ||
2563 | </tr> | ||
2564 | <tr> | ||
2565 | <td class="filename"><a href='download.php?file=libnatpmp-20080707.tar.gz'>libnatpmp-20080707.tar.gz</a></td> | ||
2566 | <td class="filesize">11679</td> | ||
2567 | <td class="filedate">06/07/2008 22:05:23 +0000</td> | ||
2568 | <td class="comment">libnatpmp source code</td> | ||
2569 | <td></td> | ||
2570 | </tr> | ||
2571 | <tr> | ||
2572 | <td class="filename"><a href='download.php?file=miniupnpc-1.1.tar.gz'>miniupnpc-1.1.tar.gz</a></td> | ||
2573 | <td class="filesize">38235</td> | ||
2574 | <td class="filedate">04/07/2008 16:45:24 +0000</td> | ||
2575 | <td class="comment">MiniUPnP client release source code</td> | ||
2576 | <td></td> | ||
2577 | </tr> | ||
2578 | <tr> | ||
2579 | <td class="filename"><a href='download.php?file=miniupnpc-20080703.tar.gz'>miniupnpc-20080703.tar.gz</a></td> | ||
2580 | <td class="filesize">38204</td> | ||
2581 | <td class="filedate">03/07/2008 15:47:37 +0000</td> | ||
2582 | <td class="comment">MiniUPnP client source code</td> | ||
2583 | <td></td> | ||
2584 | </tr> | ||
2585 | <tr> | ||
2586 | <td class="filename"><a href='download.php?file=libnatpmp-20080703.tar.gz'>libnatpmp-20080703.tar.gz</a></td> | ||
2587 | <td class="filesize">11570</td> | ||
2588 | <td class="filedate">03/07/2008 15:47:25 +0000</td> | ||
2589 | <td class="comment">libnatpmp source code</td> | ||
2590 | <td></td> | ||
2591 | </tr> | ||
2592 | <tr> | ||
2593 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080703.zip'>upnpc-exe-win32-20080703.zip</a></td> | ||
2594 | <td class="filesize">36137</td> | ||
2595 | <td class="filedate">02/07/2008 23:35:14 +0000</td> | ||
2596 | <td class="comment">Windows executable</td> | ||
2597 | <td></td> | ||
2598 | </tr> | ||
2599 | <tr> | ||
2600 | <td class="filename"><a href='download.php?file=libnatpmp-20080702.tar.gz'>libnatpmp-20080702.tar.gz</a></td> | ||
2601 | <td class="filesize">8873</td> | ||
2602 | <td class="filedate">02/07/2008 17:32:35 +0000</td> | ||
2603 | <td class="comment">libnatpmp source code</td> | ||
2604 | <td></td> | ||
2605 | </tr> | ||
2606 | <tr> | ||
2607 | <td class="filename"><a href='download.php?file=libnatpmp-20080630.tar.gz'>libnatpmp-20080630.tar.gz</a></td> | ||
2608 | <td class="filesize">8864</td> | ||
2609 | <td class="filedate">30/06/2008 14:20:16 +0000</td> | ||
2610 | <td class="comment">libnatpmp source code</td> | ||
2611 | <td></td> | ||
2612 | </tr> | ||
2613 | <tr> | ||
2614 | <td class="filename"><a href='download.php?file=libnatpmp-20080529.tar.gz'>libnatpmp-20080529.tar.gz</a></td> | ||
2615 | <td class="filesize">7397</td> | ||
2616 | <td class="filedate">29/05/2008 09:06:25 +0000</td> | ||
2617 | <td class="comment">libnatpmp source code</td> | ||
2618 | <td></td> | ||
2619 | </tr> | ||
2620 | <tr> | ||
2621 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080514.zip'>upnpc-exe-win32-20080514.zip</a></td> | ||
2622 | <td class="filesize">14227</td> | ||
2623 | <td class="filedate">14/05/2008 20:23:19 +0000</td> | ||
2624 | <td class="comment">Windows executable</td> | ||
2625 | <td></td> | ||
2626 | </tr> | ||
2627 | <tr> | ||
2628 | <td class="filename"><a href='download.php?file=libnatpmp-20080428.tar.gz'>libnatpmp-20080428.tar.gz</a></td> | ||
2629 | <td class="filesize">7295</td> | ||
2630 | <td class="filedate">28/04/2008 03:09:14 +0000</td> | ||
2631 | <td class="comment">libnatpmp source code</td> | ||
2632 | <td></td> | ||
2633 | </tr> | ||
2634 | <tr> | ||
2635 | <td class="filename"><a href='download.php?file=miniupnpd-20080427.tar.gz'>miniupnpd-20080427.tar.gz</a></td> | ||
2636 | <td class="filesize">78765</td> | ||
2637 | <td class="filedate">27/04/2008 18:16:36 +0000</td> | ||
2638 | <td class="comment">MiniUPnP daemon source code</td> | ||
2639 | <td></td> | ||
2640 | </tr> | ||
2641 | <tr> | ||
2642 | <td class="filename"><a href='download.php?file=miniupnpc-20080427.tar.gz'>miniupnpc-20080427.tar.gz</a></td> | ||
2643 | <td class="filesize">37610</td> | ||
2644 | <td class="filedate">27/04/2008 18:16:35 +0000</td> | ||
2645 | <td class="comment">MiniUPnP client source code</td> | ||
2646 | <td></td> | ||
2647 | </tr> | ||
2648 | <tr> | ||
2649 | <td class="filename"><a href='download.php?file=miniupnpd-1.1.tar.gz'>miniupnpd-1.1.tar.gz</a></td> | ||
2650 | <td class="filesize">78594</td> | ||
2651 | <td class="filedate">25/04/2008 17:38:05 +0000</td> | ||
2652 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2653 | <td></td> | ||
2654 | </tr> | ||
2655 | <tr> | ||
2656 | <td class="filename"><a href='download.php?file=miniupnpc-20080423.tar.gz'>miniupnpc-20080423.tar.gz</a></td> | ||
2657 | <td class="filesize">36818</td> | ||
2658 | <td class="filedate">23/04/2008 11:57:36 +0000</td> | ||
2659 | <td class="comment">MiniUPnP client source code</td> | ||
2660 | <td></td> | ||
2661 | </tr> | ||
2662 | <tr> | ||
2663 | <td class="filename"><a href='download.php?file=miniupnpd-20080308.tar.gz'>miniupnpd-20080308.tar.gz</a></td> | ||
2664 | <td class="filesize">75679</td> | ||
2665 | <td class="filedate">08/03/2008 11:13:29 +0000</td> | ||
2666 | <td class="comment">MiniUPnP daemon source code</td> | ||
2667 | <td></td> | ||
2668 | </tr> | ||
2669 | <tr> | ||
2670 | <td class="filename"><a href='download.php?file=miniupnpd-20080303.tar.gz'>miniupnpd-20080303.tar.gz</a></td> | ||
2671 | <td class="filesize">74202</td> | ||
2672 | <td class="filedate">03/03/2008 01:43:16 +0000</td> | ||
2673 | <td class="comment">MiniUPnP daemon source code</td> | ||
2674 | <td></td> | ||
2675 | </tr> | ||
2676 | <tr> | ||
2677 | <td class="filename"><a href='download.php?file=miniupnpd-20080224.tar.gz'>miniupnpd-20080224.tar.gz</a></td> | ||
2678 | <td class="filesize">72773</td> | ||
2679 | <td class="filedate">24/02/2008 11:23:17 +0000</td> | ||
2680 | <td class="comment">MiniUPnP daemon source code</td> | ||
2681 | <td></td> | ||
2682 | </tr> | ||
2683 | <tr> | ||
2684 | <td class="filename"><a href='download.php?file=miniupnpc-1.0.tar.gz'>miniupnpc-1.0.tar.gz</a></td> | ||
2685 | <td class="filesize">36223</td> | ||
2686 | <td class="filedate">21/02/2008 13:26:46 +0000</td> | ||
2687 | <td class="comment">MiniUPnP client release source code</td> | ||
2688 | <td></td> | ||
2689 | </tr> | ||
2690 | <tr> | ||
2691 | <td class="filename"><a href='download.php?file=miniupnpd-20080221.tar.gz'>miniupnpd-20080221.tar.gz</a></td> | ||
2692 | <td class="filesize">70823</td> | ||
2693 | <td class="filedate">21/02/2008 10:23:46 +0000</td> | ||
2694 | <td class="comment">MiniUPnP daemon source code</td> | ||
2695 | <td></td> | ||
2696 | </tr> | ||
2697 | <tr> | ||
2698 | <td class="filename"><a href='download.php?file=miniupnpc-20080217.tar.gz'>miniupnpc-20080217.tar.gz</a></td> | ||
2699 | <td class="filesize">35243</td> | ||
2700 | <td class="filedate">16/02/2008 23:47:59 +0000</td> | ||
2701 | <td class="comment">MiniUPnP client source code</td> | ||
2702 | <td></td> | ||
2703 | </tr> | ||
2704 | <tr> | ||
2705 | <td class="filename"><a href='download.php?file=miniupnpd-20080207.tar.gz'>miniupnpd-20080207.tar.gz</a></td> | ||
2706 | <td class="filesize">70647</td> | ||
2707 | <td class="filedate">07/02/2008 21:21:00 +0000</td> | ||
2708 | <td class="comment">MiniUPnP daemon source code</td> | ||
2709 | <td></td> | ||
2710 | </tr> | ||
2711 | <tr> | ||
2712 | <td class="filename"><a href='download.php?file=miniupnpc-20080203.tar.gz'>miniupnpc-20080203.tar.gz</a></td> | ||
2713 | <td class="filesize">34921</td> | ||
2714 | <td class="filedate">03/02/2008 22:28:11 +0000</td> | ||
2715 | <td class="comment">MiniUPnP client source code</td> | ||
2716 | <td></td> | ||
2717 | </tr> | ||
2718 | <tr> | ||
2719 | <td class="filename"><a href='download.php?file=miniupnpd-1.0.tar.gz'>miniupnpd-1.0.tar.gz</a></td> | ||
2720 | <td class="filesize">69427</td> | ||
2721 | <td class="filedate">27/01/2008 22:41:25 +0000</td> | ||
2722 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2723 | <td></td> | ||
2724 | </tr> | ||
2725 | <tr> | ||
2726 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080118.zip'>upnpc-exe-win32-20080118.zip</a></td> | ||
2727 | <td class="filesize">13582</td> | ||
2728 | <td class="filedate">18/01/2008 11:42:16 +0000</td> | ||
2729 | <td class="comment">Windows executable</td> | ||
2730 | <td></td> | ||
2731 | </tr> | ||
2732 | <tr> | ||
2733 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC13.tar.gz'>miniupnpd-1.0-RC13.tar.gz</a></td> | ||
2734 | <td class="filesize">67892</td> | ||
2735 | <td class="filedate">03/01/2008 16:50:21 +0000</td> | ||
2736 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2737 | <td></td> | ||
2738 | </tr> | ||
2739 | <tr> | ||
2740 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC13.tar.gz'>miniupnpc-1.0-RC13.tar.gz</a></td> | ||
2741 | <td class="filesize">34820</td> | ||
2742 | <td class="filedate">03/01/2008 16:50:20 +0000</td> | ||
2743 | <td class="comment">MiniUPnP client release source code</td> | ||
2744 | <td></td> | ||
2745 | </tr> | ||
2746 | <tr> | ||
2747 | <td class="filename"><a href='download.php?file=miniupnpd-20071220.tar.gz'>miniupnpd-20071220.tar.gz</a></td> | ||
2748 | <td class="filesize">67211</td> | ||
2749 | <td class="filedate">20/12/2007 12:08:34 +0000</td> | ||
2750 | <td class="comment">MiniUPnP daemon source code</td> | ||
2751 | <td></td> | ||
2752 | </tr> | ||
2753 | <tr> | ||
2754 | <td class="filename"><a href='download.php?file=miniupnpc-20071219.tar.gz'>miniupnpc-20071219.tar.gz</a></td> | ||
2755 | <td class="filesize">34290</td> | ||
2756 | <td class="filedate">19/12/2007 18:31:47 +0000</td> | ||
2757 | <td class="comment">MiniUPnP client source code</td> | ||
2758 | <td></td> | ||
2759 | </tr> | ||
2760 | <tr> | ||
2761 | <td class="filename"><a href='download.php?file=minissdpd-1.0-RC12.tar.gz'>minissdpd-1.0-RC12.tar.gz</a></td> | ||
2762 | <td class="filesize">9956</td> | ||
2763 | <td class="filedate">19/12/2007 18:30:12 +0000</td> | ||
2764 | <td class="comment">MiniSSDPd release source code</td> | ||
2765 | <td></td> | ||
2766 | </tr> | ||
2767 | <tr> | ||
2768 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC12.tar.gz'>miniupnpd-1.0-RC12.tar.gz</a></td> | ||
2769 | <td class="filesize">66911</td> | ||
2770 | <td class="filedate">14/12/2007 17:39:20 +0000</td> | ||
2771 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2772 | <td></td> | ||
2773 | </tr> | ||
2774 | <tr> | ||
2775 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC12.tar.gz'>miniupnpc-1.0-RC12.tar.gz</a></td> | ||
2776 | <td class="filesize">32543</td> | ||
2777 | <td class="filedate">14/12/2007 17:39:19 +0000</td> | ||
2778 | <td class="comment">MiniUPnP client release source code</td> | ||
2779 | <td></td> | ||
2780 | </tr> | ||
2781 | <tr> | ||
2782 | <td class="filename"><a href='download.php?file=miniupnpc-20071213.tar.gz'>miniupnpc-20071213.tar.gz</a></td> | ||
2783 | <td class="filesize">32541</td> | ||
2784 | <td class="filedate">13/12/2007 17:09:51 +0000</td> | ||
2785 | <td class="comment">MiniUPnP client source code</td> | ||
2786 | <td></td> | ||
2787 | </tr> | ||
2788 | <tr> | ||
2789 | <td class="filename"><a href='download.php?file=miniupnpd-20071213.tar.gz'>miniupnpd-20071213.tar.gz</a></td> | ||
2790 | <td class="filesize">66826</td> | ||
2791 | <td class="filedate">13/12/2007 16:42:50 +0000</td> | ||
2792 | <td class="comment">MiniUPnP daemon source code</td> | ||
2793 | <td></td> | ||
2794 | </tr> | ||
2795 | <tr> | ||
2796 | <td class="filename"><a href='download.php?file=libnatpmp-20071213.tar.gz'>libnatpmp-20071213.tar.gz</a></td> | ||
2797 | <td class="filesize">5997</td> | ||
2798 | <td class="filedate">13/12/2007 14:56:30 +0000</td> | ||
2799 | <td class="comment">libnatpmp source code</td> | ||
2800 | <td></td> | ||
2801 | </tr> | ||
2802 | <tr> | ||
2803 | <td class="filename"><a href='download.php?file=libnatpmp-20071202.tar.gz'>libnatpmp-20071202.tar.gz</a></td> | ||
2804 | <td class="filesize">5664</td> | ||
2805 | <td class="filedate">02/12/2007 00:15:28 +0000</td> | ||
2806 | <td class="comment">libnatpmp source code</td> | ||
2807 | <td></td> | ||
2808 | </tr> | ||
2809 | <tr> | ||
2810 | <td class="filename"><a href='download.php?file=miniupnpd-20071103.tar.gz'>miniupnpd-20071103.tar.gz</a></td> | ||
2811 | <td class="filesize">65740</td> | ||
2812 | <td class="filedate">02/11/2007 23:58:38 +0000</td> | ||
2813 | <td class="comment">MiniUPnP daemon source code</td> | ||
2814 | <td></td> | ||
2815 | </tr> | ||
2816 | <tr> | ||
2817 | <td class="filename"><a href='download.php?file=miniupnpd-20071102.tar.gz'>miniupnpd-20071102.tar.gz</a></td> | ||
2818 | <td class="filesize">65733</td> | ||
2819 | <td class="filedate">02/11/2007 23:05:44 +0000</td> | ||
2820 | <td class="comment">MiniUPnP daemon source code</td> | ||
2821 | <td></td> | ||
2822 | </tr> | ||
2823 | <tr> | ||
2824 | <td class="filename"><a href='download.php?file=miniupnpc-20071103.tar.gz'>miniupnpc-20071103.tar.gz</a></td> | ||
2825 | <td class="filesize">32239</td> | ||
2826 | <td class="filedate">02/11/2007 23:05:34 +0000</td> | ||
2827 | <td class="comment">MiniUPnP client source code</td> | ||
2828 | <td></td> | ||
2829 | </tr> | ||
2830 | <tr> | ||
2831 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC11.tar.gz'>miniupnpd-1.0-RC11.tar.gz</a></td> | ||
2832 | <td class="filesize">64828</td> | ||
2833 | <td class="filedate">25/10/2007 13:27:18 +0000</td> | ||
2834 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2835 | <td></td> | ||
2836 | </tr> | ||
2837 | <tr> | ||
2838 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC11.tar.gz'>miniupnpc-1.0-RC11.tar.gz</a></td> | ||
2839 | <td class="filesize">32161</td> | ||
2840 | <td class="filedate">25/10/2007 13:27:17 +0000</td> | ||
2841 | <td class="comment">MiniUPnP client release source code</td> | ||
2842 | <td></td> | ||
2843 | </tr> | ||
2844 | <tr> | ||
2845 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071025.zip'>upnpc-exe-win32-20071025.zip</a></td> | ||
2846 | <td class="filesize">12809</td> | ||
2847 | <td class="filedate">24/10/2007 23:15:55 +0000</td> | ||
2848 | <td class="comment">Windows executable</td> | ||
2849 | <td></td> | ||
2850 | </tr> | ||
2851 | <tr> | ||
2852 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC10.tar.gz'>miniupnpd-1.0-RC10.tar.gz</a></td> | ||
2853 | <td class="filesize">62674</td> | ||
2854 | <td class="filedate">12/10/2007 08:38:33 +0000</td> | ||
2855 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2856 | <td></td> | ||
2857 | </tr> | ||
2858 | <tr> | ||
2859 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC10.tar.gz'>miniupnpc-1.0-RC10.tar.gz</a></td> | ||
2860 | <td class="filesize">31962</td> | ||
2861 | <td class="filedate">12/10/2007 08:38:31 +0000</td> | ||
2862 | <td class="comment">MiniUPnP client release source code</td> | ||
2863 | <td></td> | ||
2864 | </tr> | ||
2865 | <tr> | ||
2866 | <td class="filename"><a href='download.php?file=minissdpd-1.0-RC10.tar.gz'>minissdpd-1.0-RC10.tar.gz</a></td> | ||
2867 | <td class="filesize">9517</td> | ||
2868 | <td class="filedate">12/10/2007 08:38:30 +0000</td> | ||
2869 | <td class="comment">MiniSSDPd release source code</td> | ||
2870 | <td></td> | ||
2871 | </tr> | ||
2872 | <tr> | ||
2873 | <td class="filename"><a href='download.php?file=miniupnpc-20071003.tar.gz'>miniupnpc-20071003.tar.gz</a></td> | ||
2874 | <td class="filesize">31199</td> | ||
2875 | <td class="filedate">03/10/2007 15:30:13 +0000</td> | ||
2876 | <td class="comment">MiniUPnP client source code</td> | ||
2877 | <td></td> | ||
2878 | </tr> | ||
2879 | <tr> | ||
2880 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071001.zip'>upnpc-exe-win32-20071001.zip</a></td> | ||
2881 | <td class="filesize">12604</td> | ||
2882 | <td class="filedate">01/10/2007 17:09:22 +0000</td> | ||
2883 | <td class="comment">Windows executable</td> | ||
2884 | <td></td> | ||
2885 | </tr> | ||
2886 | <tr> | ||
2887 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC9.tar.gz'>miniupnpd-1.0-RC9.tar.gz</a></td> | ||
2888 | <td class="filesize">54778</td> | ||
2889 | <td class="filedate">27/09/2007 19:38:36 +0000</td> | ||
2890 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2891 | <td></td> | ||
2892 | </tr> | ||
2893 | <tr> | ||
2894 | <td class="filename"><a href='download.php?file=minissdpd-1.0-RC9.tar.gz'>minissdpd-1.0-RC9.tar.gz</a></td> | ||
2895 | <td class="filesize">9163</td> | ||
2896 | <td class="filedate">27/09/2007 17:00:03 +0000</td> | ||
2897 | <td class="comment">MiniSSDPd release source code</td> | ||
2898 | <td></td> | ||
2899 | </tr> | ||
2900 | <tr> | ||
2901 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC9.tar.gz'>miniupnpc-1.0-RC9.tar.gz</a></td> | ||
2902 | <td class="filesize">30538</td> | ||
2903 | <td class="filedate">27/09/2007 17:00:03 +0000</td> | ||
2904 | <td class="comment">MiniUPnP client release source code</td> | ||
2905 | <td></td> | ||
2906 | </tr> | ||
2907 | <tr> | ||
2908 | <td class="filename"><a href='download.php?file=miniupnpd-20070924.tar.gz'>miniupnpd-20070924.tar.gz</a></td> | ||
2909 | <td class="filesize">52338</td> | ||
2910 | <td class="filedate">24/09/2007 20:26:05 +0000</td> | ||
2911 | <td class="comment">MiniUPnP daemon source code</td> | ||
2912 | <td></td> | ||
2913 | </tr> | ||
2914 | <tr> | ||
2915 | <td class="filename"><a href='download.php?file=miniupnpd-20070923.tar.gz'>miniupnpd-20070923.tar.gz</a></td> | ||
2916 | <td class="filesize">51060</td> | ||
2917 | <td class="filedate">23/09/2007 21:13:34 +0000</td> | ||
2918 | <td class="comment">MiniUPnP daemon source code</td> | ||
2919 | <td></td> | ||
2920 | </tr> | ||
2921 | <tr> | ||
2922 | <td class="filename"><a href='download.php?file=miniupnpc-20070923.tar.gz'>miniupnpc-20070923.tar.gz</a></td> | ||
2923 | <td class="filesize">30246</td> | ||
2924 | <td class="filedate">23/09/2007 21:13:33 +0000</td> | ||
2925 | <td class="comment">MiniUPnP client source code</td> | ||
2926 | <td></td> | ||
2927 | </tr> | ||
2928 | <tr> | ||
2929 | <td class="filename"><a href='download.php?file=minissdpd-20070923.tar.gz'>minissdpd-20070923.tar.gz</a></td> | ||
2930 | <td class="filesize">8978</td> | ||
2931 | <td class="filedate">23/09/2007 21:13:32 +0000</td> | ||
2932 | <td class="comment">MiniSSDPd source code</td> | ||
2933 | <td></td> | ||
2934 | </tr> | ||
2935 | <tr> | ||
2936 | <td class="filename"><a href='download.php?file=miniupnpc-20070902.tar.gz'>miniupnpc-20070902.tar.gz</a></td> | ||
2937 | <td class="filesize">30205</td> | ||
2938 | <td class="filedate">01/09/2007 23:47:23 +0000</td> | ||
2939 | <td class="comment">MiniUPnP client source code</td> | ||
2940 | <td></td> | ||
2941 | </tr> | ||
2942 | <tr> | ||
2943 | <td class="filename"><a href='download.php?file=minissdpd-20070902.tar.gz'>minissdpd-20070902.tar.gz</a></td> | ||
2944 | <td class="filesize">6539</td> | ||
2945 | <td class="filedate">01/09/2007 23:47:20 +0000</td> | ||
2946 | <td class="comment">MiniSSDPd source code</td> | ||
2947 | <td></td> | ||
2948 | </tr> | ||
2949 | <tr> | ||
2950 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC8.tar.gz'>miniupnpd-1.0-RC8.tar.gz</a></td> | ||
2951 | <td class="filesize">50952</td> | ||
2952 | <td class="filedate">29/08/2007 10:56:09 +0000</td> | ||
2953 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2954 | <td></td> | ||
2955 | </tr> | ||
2956 | <tr> | ||
2957 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC8.tar.gz'>miniupnpc-1.0-RC8.tar.gz</a></td> | ||
2958 | <td class="filesize">29312</td> | ||
2959 | <td class="filedate">29/08/2007 10:56:08 +0000</td> | ||
2960 | <td class="comment">MiniUPnP client release source code</td> | ||
2961 | <td></td> | ||
2962 | </tr> | ||
2963 | <tr> | ||
2964 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC7.tar.gz'>miniupnpd-1.0-RC7.tar.gz</a></td> | ||
2965 | <td class="filesize">50613</td> | ||
2966 | <td class="filedate">20/07/2007 00:15:45 +0000</td> | ||
2967 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2968 | <td></td> | ||
2969 | </tr> | ||
2970 | <tr> | ||
2971 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC6.tar.gz'>miniupnpd-1.0-RC6.tar.gz</a></td> | ||
2972 | <td class="filesize">49986</td> | ||
2973 | <td class="filedate">12/06/2007 17:12:07 +0000</td> | ||
2974 | <td class="comment">MiniUPnP daemon release source code</td> | ||
2975 | <td></td> | ||
2976 | </tr> | ||
2977 | <tr> | ||
2978 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC6.tar.gz'>miniupnpc-1.0-RC6.tar.gz</a></td> | ||
2979 | <td class="filesize">29032</td> | ||
2980 | <td class="filedate">12/06/2007 17:12:06 +0000</td> | ||
2981 | <td class="comment">MiniUPnP client release source code</td> | ||
2982 | <td></td> | ||
2983 | </tr> | ||
2984 | <tr> | ||
2985 | <td class="filename"><a href='download.php?file=miniupnpd-20070607.tar.gz'>miniupnpd-20070607.tar.gz</a></td> | ||
2986 | <td class="filesize">49768</td> | ||
2987 | <td class="filedate">06/06/2007 23:12:00 +0000</td> | ||
2988 | <td class="comment">MiniUPnP daemon source code</td> | ||
2989 | <td></td> | ||
2990 | </tr> | ||
2991 | <tr> | ||
2992 | <td class="filename"><a href='download.php?file=miniupnpd-20070605.tar.gz'>miniupnpd-20070605.tar.gz</a></td> | ||
2993 | <td class="filesize">49710</td> | ||
2994 | <td class="filedate">05/06/2007 21:01:53 +0000</td> | ||
2995 | <td class="comment">MiniUPnP daemon source code</td> | ||
2996 | <td></td> | ||
2997 | </tr> | ||
2998 | <tr> | ||
2999 | <td class="filename"><a href='download.php?file=miniupnpd-20070521.tar.gz'>miniupnpd-20070521.tar.gz</a></td> | ||
3000 | <td class="filesize">48374</td> | ||
3001 | <td class="filedate">21/05/2007 13:07:43 +0000</td> | ||
3002 | <td class="comment">MiniUPnP daemon source code</td> | ||
3003 | <td></td> | ||
3004 | </tr> | ||
3005 | <tr> | ||
3006 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20070519.zip'>upnpc-exe-win32-20070519.zip</a></td> | ||
3007 | <td class="filesize">10836</td> | ||
3008 | <td class="filedate">19/05/2007 13:14:15 +0000</td> | ||
3009 | <td class="comment">Windows executable</td> | ||
3010 | <td></td> | ||
3011 | </tr> | ||
3012 | <tr> | ||
3013 | <td class="filename"><a href='download.php?file=miniupnpc-20070515.tar.gz'>miniupnpc-20070515.tar.gz</a></td> | ||
3014 | <td class="filesize">25802</td> | ||
3015 | <td class="filedate">15/05/2007 18:15:25 +0000</td> | ||
3016 | <td class="comment">MiniUPnP client source code</td> | ||
3017 | <td></td> | ||
3018 | </tr> | ||
3019 | <tr> | ||
3020 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC5.tar.gz'>miniupnpd-1.0-RC5.tar.gz</a></td> | ||
3021 | <td class="filesize">48064</td> | ||
3022 | <td class="filedate">10/05/2007 20:22:48 +0000</td> | ||
3023 | <td class="comment">MiniUPnP daemon release source code</td> | ||
3024 | <td></td> | ||
3025 | </tr> | ||
3026 | <tr> | ||
3027 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC5.tar.gz'>miniupnpc-1.0-RC5.tar.gz</a></td> | ||
3028 | <td class="filesize">25242</td> | ||
3029 | <td class="filedate">10/05/2007 20:22:46 +0000</td> | ||
3030 | <td class="comment">MiniUPnP client release source code</td> | ||
3031 | <td></td> | ||
3032 | </tr> | ||
3033 | <tr> | ||
3034 | <td class="filename"><a href='download.php?file=miniupnpd-20070412.tar.gz'>miniupnpd-20070412.tar.gz</a></td> | ||
3035 | <td class="filesize">47807</td> | ||
3036 | <td class="filedate">12/04/2007 20:21:48 +0000</td> | ||
3037 | <td class="comment">MiniUPnP daemon source code</td> | ||
3038 | <td></td> | ||
3039 | </tr> | ||
3040 | <tr> | ||
3041 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC4.tar.gz'>miniupnpd-1.0-RC4.tar.gz</a></td> | ||
3042 | <td class="filesize">47687</td> | ||
3043 | <td class="filedate">17/03/2007 11:43:13 +0000</td> | ||
3044 | <td class="comment">MiniUPnP daemon release source code</td> | ||
3045 | <td></td> | ||
3046 | </tr> | ||
3047 | <tr> | ||
3048 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC4.tar.gz'>miniupnpc-1.0-RC4.tar.gz</a></td> | ||
3049 | <td class="filesize">25085</td> | ||
3050 | <td class="filedate">17/03/2007 11:43:10 +0000</td> | ||
3051 | <td class="comment">MiniUPnP client release source code</td> | ||
3052 | <td></td> | ||
3053 | </tr> | ||
3054 | <tr> | ||
3055 | <td class="filename"><a href='download.php?file=miniupnpd-20070311.tar.gz'>miniupnpd-20070311.tar.gz</a></td> | ||
3056 | <td class="filesize">47599</td> | ||
3057 | <td class="filedate">11/03/2007 00:25:26 +0000</td> | ||
3058 | <td class="comment">MiniUPnP daemon source code</td> | ||
3059 | <td></td> | ||
3060 | </tr> | ||
3061 | <tr> | ||
3062 | <td class="filename"><a href='download.php?file=miniupnpd-20070208.tar.gz'>miniupnpd-20070208.tar.gz</a></td> | ||
3063 | <td class="filesize">45084</td> | ||
3064 | <td class="filedate">07/02/2007 23:04:06 +0000</td> | ||
3065 | <td class="comment">MiniUPnP daemon source code</td> | ||
3066 | <td></td> | ||
3067 | </tr> | ||
3068 | <tr> | ||
3069 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC3.tar.gz'>miniupnpd-1.0-RC3.tar.gz</a></td> | ||
3070 | <td class="filesize">44683</td> | ||
3071 | <td class="filedate">30/01/2007 23:00:44 +0000</td> | ||
3072 | <td class="comment">MiniUPnP daemon release source code</td> | ||
3073 | <td></td> | ||
3074 | </tr> | ||
3075 | <tr> | ||
3076 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC3.tar.gz'>miniupnpc-1.0-RC3.tar.gz</a></td> | ||
3077 | <td class="filesize">25055</td> | ||
3078 | <td class="filedate">30/01/2007 23:00:42 +0000</td> | ||
3079 | <td class="comment">MiniUPnP client release source code</td> | ||
3080 | <td></td> | ||
3081 | </tr> | ||
3082 | <tr> | ||
3083 | <td class="filename"><a href='download.php?file=miniupnpd-20070130.tar.gz'>miniupnpd-20070130.tar.gz</a></td> | ||
3084 | <td class="filesize">43735</td> | ||
3085 | <td class="filedate">29/01/2007 23:26:16 +0000</td> | ||
3086 | <td class="comment">MiniUPnP daemon source code</td> | ||
3087 | <td></td> | ||
3088 | </tr> | ||
3089 | <tr> | ||
3090 | <td class="filename"><a href='download.php?file=miniupnpc-20070130.tar.gz'>miniupnpc-20070130.tar.gz</a></td> | ||
3091 | <td class="filesize">24466</td> | ||
3092 | <td class="filedate">29/01/2007 23:26:13 +0000</td> | ||
3093 | <td class="comment">MiniUPnP client source code</td> | ||
3094 | <td></td> | ||
3095 | </tr> | ||
3096 | <tr> | ||
3097 | <td class="filename"><a href='download.php?file=miniupnpd-20070127.tar.gz'>miniupnpd-20070127.tar.gz</a></td> | ||
3098 | <td class="filesize">42643</td> | ||
3099 | <td class="filedate">27/01/2007 16:02:35 +0000</td> | ||
3100 | <td class="comment">MiniUPnP daemon source code</td> | ||
3101 | <td></td> | ||
3102 | </tr> | ||
3103 | <tr> | ||
3104 | <td class="filename"><a href='download.php?file=miniupnpc-20070127.tar.gz'>miniupnpc-20070127.tar.gz</a></td> | ||
3105 | <td class="filesize">24241</td> | ||
3106 | <td class="filedate">27/01/2007 16:02:33 +0000</td> | ||
3107 | <td class="comment">MiniUPnP client source code</td> | ||
3108 | <td></td> | ||
3109 | </tr> | ||
3110 | <tr> | ||
3111 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC2.tar.gz'>miniupnpd-1.0-RC2.tar.gz</a></td> | ||
3112 | <td class="filesize">40424</td> | ||
3113 | <td class="filedate">17/01/2007 16:13:05 +0000</td> | ||
3114 | <td class="comment">MiniUPnP daemon release source code</td> | ||
3115 | <td></td> | ||
3116 | </tr> | ||
3117 | <tr> | ||
3118 | <td class="filename"><a href='download.php?file=miniupnpd-20070112.tar.gz'>miniupnpd-20070112.tar.gz</a></td> | ||
3119 | <td class="filesize">40708</td> | ||
3120 | <td class="filedate">12/01/2007 13:40:50 +0000</td> | ||
3121 | <td class="comment">MiniUPnP daemon source code</td> | ||
3122 | <td></td> | ||
3123 | </tr> | ||
3124 | <tr> | ||
3125 | <td class="filename"><a href='download.php?file=miniupnpd-20070111.tar.gz'>miniupnpd-20070111.tar.gz</a></td> | ||
3126 | <td class="filesize">40651</td> | ||
3127 | <td class="filedate">11/01/2007 18:50:21 +0000</td> | ||
3128 | <td class="comment">MiniUPnP daemon source code</td> | ||
3129 | <td></td> | ||
3130 | </tr> | ||
3131 | <tr> | ||
3132 | <td class="filename"><a href='download.php?file=miniupnpd-20070108.tar.gz'>miniupnpd-20070108.tar.gz</a></td> | ||
3133 | <td class="filesize">40025</td> | ||
3134 | <td class="filedate">08/01/2007 10:02:14 +0000</td> | ||
3135 | <td class="comment">MiniUPnP daemon source code</td> | ||
3136 | <td></td> | ||
3137 | </tr> | ||
3138 | <tr> | ||
3139 | <td class="filename"><a href='download.php?file=miniupnpd-20070103.tar.gz'>miniupnpd-20070103.tar.gz</a></td> | ||
3140 | <td class="filesize">40065</td> | ||
3141 | <td class="filedate">03/01/2007 14:39:11 +0000</td> | ||
3142 | <td class="comment">MiniUPnP daemon source code</td> | ||
3143 | <td></td> | ||
3144 | </tr> | ||
3145 | <tr> | ||
3146 | <td class="filename"><a href='download.php?file=miniupnpc-20061214.tar.gz'>miniupnpc-20061214.tar.gz</a></td> | ||
3147 | <td class="filesize">24106</td> | ||
3148 | <td class="filedate">14/12/2006 15:43:54 +0000</td> | ||
3149 | <td class="comment">MiniUPnP client source code</td> | ||
3150 | <td></td> | ||
3151 | </tr> | ||
3152 | <tr> | ||
3153 | <td class="filename"><a href='download.php?file=miniupnpd-20061214.tar.gz'>miniupnpd-20061214.tar.gz</a></td> | ||
3154 | <td class="filesize">39750</td> | ||
3155 | <td class="filedate">14/12/2006 13:44:51 +0000</td> | ||
3156 | <td class="comment">MiniUPnP daemon source code</td> | ||
3157 | <td></td> | ||
3158 | </tr> | ||
3159 | <tr> | ||
3160 | <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC1.tar.gz'>miniupnpd-1.0-RC1.tar.gz</a></td> | ||
3161 | <td class="filesize">39572</td> | ||
3162 | <td class="filedate">07/12/2006 10:55:31 +0000</td> | ||
3163 | <td class="comment">MiniUPnP daemon release source code</td> | ||
3164 | <td></td> | ||
3165 | </tr> | ||
3166 | <tr> | ||
3167 | <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC1.tar.gz'>miniupnpc-1.0-RC1.tar.gz</a></td> | ||
3168 | <td class="filesize">23582</td> | ||
3169 | <td class="filedate">07/12/2006 10:55:30 +0000</td> | ||
3170 | <td class="comment">MiniUPnP client release source code</td> | ||
3171 | <td></td> | ||
3172 | </tr> | ||
3173 | <tr> | ||
3174 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061201.zip'>upnpc-exe-win32-20061201.zip</a></td> | ||
3175 | <td class="filesize">10378</td> | ||
3176 | <td class="filedate">01/12/2006 00:33:08 +0000</td> | ||
3177 | <td class="comment">Windows executable</td> | ||
3178 | <td></td> | ||
3179 | </tr> | ||
3180 | <tr> | ||
3181 | <td class="filename"><a href='download.php?file=miniupnpd20061130.tar.gz'>miniupnpd20061130.tar.gz</a></td> | ||
3182 | <td class="filesize">37184</td> | ||
3183 | <td class="filedate">30/11/2006 12:25:25 +0000</td> | ||
3184 | <td class="comment">MiniUPnP daemon source code</td> | ||
3185 | <td></td> | ||
3186 | </tr> | ||
3187 | <tr> | ||
3188 | <td class="filename"><a href='download.php?file=miniupnpd20061129.tar.gz'>miniupnpd20061129.tar.gz</a></td> | ||
3189 | <td class="filesize">36045</td> | ||
3190 | <td class="filedate">29/11/2006 00:10:49 +0000</td> | ||
3191 | <td class="comment">MiniUPnP daemon source code</td> | ||
3192 | <td></td> | ||
3193 | </tr> | ||
3194 | <tr> | ||
3195 | <td class="filename"><a href='download.php?file=miniupnpd20061127.tar.gz'>miniupnpd20061127.tar.gz</a></td> | ||
3196 | <td class="filesize">34155</td> | ||
3197 | <td class="filedate">26/11/2006 23:15:28 +0000</td> | ||
3198 | <td class="comment">MiniUPnP daemon source code</td> | ||
3199 | <td></td> | ||
3200 | </tr> | ||
3201 | <tr> | ||
3202 | <td class="filename"><a href='download.php?file=miniupnpc20061123.tar.gz'>miniupnpc20061123.tar.gz</a></td> | ||
3203 | <td class="filesize">21004</td> | ||
3204 | <td class="filedate">23/11/2006 22:41:46 +0000</td> | ||
3205 | <td class="comment">MiniUPnP client source code</td> | ||
3206 | <td></td> | ||
3207 | </tr> | ||
3208 | <tr> | ||
3209 | <td class="filename" colspan="2"><a href='download.php?file=miniupnpd-bin-openwrt20061123.tar.gz'>miniupnpd-bin-openwrt20061123.tar.gz</a></td> | ||
3210 | <td class="filedate">23/11/2006 22:41:44 +0000</td> | ||
3211 | <td class="comment">Precompiled binaries for openwrt</td> | ||
3212 | <td></td> | ||
3213 | </tr> | ||
3214 | <tr> | ||
3215 | <td class="filename"><a href='download.php?file=miniupnpd20061123.tar.gz'>miniupnpd20061123.tar.gz</a></td> | ||
3216 | <td class="filesize">33809</td> | ||
3217 | <td class="filedate">23/11/2006 22:28:29 +0000</td> | ||
3218 | <td class="comment">MiniUPnP daemon source code</td> | ||
3219 | <td></td> | ||
3220 | </tr> | ||
3221 | <tr> | ||
3222 | <td class="filename"><a href='download.php?file=miniupnpc20061119.tar.gz'>miniupnpc20061119.tar.gz</a></td> | ||
3223 | <td class="filesize">20897</td> | ||
3224 | <td class="filedate">19/11/2006 22:50:37 +0000</td> | ||
3225 | <td class="comment">MiniUPnP client source code</td> | ||
3226 | <td></td> | ||
3227 | </tr> | ||
3228 | <tr> | ||
3229 | <td class="filename"><a href='download.php?file=miniupnpd20061119.tar.gz'>miniupnpd20061119.tar.gz</a></td> | ||
3230 | <td class="filesize">32580</td> | ||
3231 | <td class="filedate">19/11/2006 22:50:36 +0000</td> | ||
3232 | <td class="comment">MiniUPnP daemon source code</td> | ||
3233 | <td></td> | ||
3234 | </tr> | ||
3235 | <tr> | ||
3236 | <td class="filename"><a href='download.php?file=miniupnpd20061117.tar.gz'>miniupnpd20061117.tar.gz</a></td> | ||
3237 | <td class="filesize">32646</td> | ||
3238 | <td class="filedate">17/11/2006 13:29:33 +0000</td> | ||
3239 | <td class="comment">MiniUPnP daemon source code</td> | ||
3240 | <td></td> | ||
3241 | </tr> | ||
3242 | <tr> | ||
3243 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061112.zip'>upnpc-exe-win32-20061112.zip</a></td> | ||
3244 | <td class="filesize">10262</td> | ||
3245 | <td class="filedate">12/11/2006 22:41:25 +0000</td> | ||
3246 | <td class="comment">Windows executable</td> | ||
3247 | <td></td> | ||
3248 | </tr> | ||
3249 | <tr> | ||
3250 | <td class="filename"><a href='download.php?file=miniupnpd20061112.tar.gz'>miniupnpd20061112.tar.gz</a></td> | ||
3251 | <td class="filesize">32023</td> | ||
3252 | <td class="filedate">12/11/2006 21:30:32 +0000</td> | ||
3253 | <td class="comment">MiniUPnP daemon source code</td> | ||
3254 | <td></td> | ||
3255 | </tr> | ||
3256 | <tr> | ||
3257 | <td class="filename"><a href='download.php?file=miniupnpc20061112.tar.gz'>miniupnpc20061112.tar.gz</a></td> | ||
3258 | <td class="filesize">21047</td> | ||
3259 | <td class="filedate">12/11/2006 21:30:31 +0000</td> | ||
3260 | <td class="comment">MiniUPnP client source code</td> | ||
3261 | <td></td> | ||
3262 | </tr> | ||
3263 | <tr> | ||
3264 | <td class="filename"><a href='download.php?file=miniupnpd20061110.tar.gz'>miniupnpd20061110.tar.gz</a></td> | ||
3265 | <td class="filesize">27926</td> | ||
3266 | <td class="filedate">09/11/2006 23:35:02 +0000</td> | ||
3267 | <td class="comment">MiniUPnP daemon source code</td> | ||
3268 | <td></td> | ||
3269 | </tr> | ||
3270 | <tr> | ||
3271 | <td class="filename"><a href='download.php?file=miniupnpc20061110.tar.gz'>miniupnpc20061110.tar.gz</a></td> | ||
3272 | <td class="filesize">21009</td> | ||
3273 | <td class="filedate">09/11/2006 23:32:19 +0000</td> | ||
3274 | <td class="comment">MiniUPnP client source code</td> | ||
3275 | <td></td> | ||
3276 | </tr> | ||
3277 | <tr> | ||
3278 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061101.zip'>upnpc-exe-win32-20061101.zip</a></td> | ||
3279 | <td class="filesize">10089</td> | ||
3280 | <td class="filedate">08/11/2006 20:35:09 +0000</td> | ||
3281 | <td class="comment">Windows executable</td> | ||
3282 | <td></td> | ||
3283 | </tr> | ||
3284 | <tr> | ||
3285 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061020.zip'>upnpc-exe-win32-20061020.zip</a></td> | ||
3286 | <td class="filesize">9183</td> | ||
3287 | <td class="filedate">08/11/2006 20:35:08 +0000</td> | ||
3288 | <td class="comment">Windows executable</td> | ||
3289 | <td></td> | ||
3290 | </tr> | ||
3291 | <tr> | ||
3292 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060909.zip'>upnpc-exe-win32-20060909.zip</a></td> | ||
3293 | <td class="filesize">9994</td> | ||
3294 | <td class="filedate">08/11/2006 20:35:07 +0000</td> | ||
3295 | <td class="comment">Windows executable</td> | ||
3296 | <td></td> | ||
3297 | </tr> | ||
3298 | <tr> | ||
3299 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060801.zip'>upnpc-exe-win32-20060801.zip</a></td> | ||
3300 | <td class="filesize">10002</td> | ||
3301 | <td class="filedate">08/11/2006 20:35:06 +0000</td> | ||
3302 | <td class="comment">Windows executable</td> | ||
3303 | <td></td> | ||
3304 | </tr> | ||
3305 | <tr> | ||
3306 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060711.zip'>upnpc-exe-win32-20060711.zip</a></td> | ||
3307 | <td class="filesize">13733</td> | ||
3308 | <td class="filedate">08/11/2006 20:35:05 +0000</td> | ||
3309 | <td class="comment">Windows executable</td> | ||
3310 | <td></td> | ||
3311 | </tr> | ||
3312 | <tr> | ||
3313 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060709.zip'>upnpc-exe-win32-20060709.zip</a></td> | ||
3314 | <td class="filesize">13713</td> | ||
3315 | <td class="filedate">08/11/2006 20:35:04 +0000</td> | ||
3316 | <td class="comment">Windows executable</td> | ||
3317 | <td></td> | ||
3318 | </tr> | ||
3319 | <tr> | ||
3320 | <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060704.zip'>upnpc-exe-win32-20060704.zip</a></td> | ||
3321 | <td class="filesize">13297</td> | ||
3322 | <td class="filedate">08/11/2006 20:35:03 +0000</td> | ||
3323 | <td class="comment">Windows executable</td> | ||
3324 | <td></td> | ||
3325 | </tr> | ||
3326 | <tr> | ||
3327 | <td class="filename"><a href='download.php?file=miniupnpc20061107.tar.gz'>miniupnpc20061107.tar.gz</a></td> | ||
3328 | <td class="filesize">20708</td> | ||
3329 | <td class="filedate">06/11/2006 23:36:57 +0000</td> | ||
3330 | <td class="comment">MiniUPnP client source code</td> | ||
3331 | <td></td> | ||
3332 | </tr> | ||
3333 | <tr> | ||
3334 | <td class="filename"><a href='download.php?file=miniupnpd20061107.tar.gz'>miniupnpd20061107.tar.gz</a></td> | ||
3335 | <td class="filesize">26992</td> | ||
3336 | <td class="filedate">06/11/2006 23:35:06 +0000</td> | ||
3337 | <td class="comment">MiniUPnP daemon source code</td> | ||
3338 | <td></td> | ||
3339 | </tr> | ||
3340 | <tr> | ||
3341 | <td class="filename"><a href='download.php?file=miniupnpc20061106.tar.gz'>miniupnpc20061106.tar.gz</a></td> | ||
3342 | <td class="filesize">20575</td> | ||
3343 | <td class="filedate">06/11/2006 17:02:15 +0000</td> | ||
3344 | <td class="comment">MiniUPnP client source code</td> | ||
3345 | <td></td> | ||
3346 | </tr> | ||
3347 | <tr> | ||
3348 | <td class="filename"><a href='download.php?file=miniupnpd20061106.tar.gz'>miniupnpd20061106.tar.gz</a></td> | ||
3349 | <td class="filesize">26597</td> | ||
3350 | <td class="filedate">06/11/2006 15:39:10 +0000</td> | ||
3351 | <td class="comment">MiniUPnP daemon source code</td> | ||
3352 | <td></td> | ||
3353 | </tr> | ||
3354 | <tr> | ||
3355 | <td class="filename"><a href='download.php?file=miniupnpc20061101.tar.gz'>miniupnpc20061101.tar.gz</a></td> | ||
3356 | <td class="filesize">20395</td> | ||
3357 | <td class="filedate">04/11/2006 18:16:15 +0000</td> | ||
3358 | <td class="comment">MiniUPnP client source code</td> | ||
3359 | <td></td> | ||
3360 | </tr> | ||
3361 | <tr> | ||
3362 | <td class="filename"><a href='download.php?file=miniupnpc20061031.tar.gz'>miniupnpc20061031.tar.gz</a></td> | ||
3363 | <td class="filesize">20396</td> | ||
3364 | <td class="filedate">04/11/2006 18:16:13 +0000</td> | ||
3365 | <td class="comment">MiniUPnP client source code</td> | ||
3366 | <td></td> | ||
3367 | </tr> | ||
3368 | <tr> | ||
3369 | <td class="filename"><a href='download.php?file=miniupnpc20061023.tar.gz'>miniupnpc20061023.tar.gz</a></td> | ||
3370 | <td class="filesize">20109</td> | ||
3371 | <td class="filedate">04/11/2006 18:16:12 +0000</td> | ||
3372 | <td class="comment">MiniUPnP client source code</td> | ||
3373 | <td></td> | ||
3374 | </tr> | ||
3375 | <tr> | ||
3376 | <td class="filename"><a href='download.php?file=miniupnpc20061020.tar.gz'>miniupnpc20061020.tar.gz</a></td> | ||
3377 | <td class="filesize">19739</td> | ||
3378 | <td class="filedate">04/11/2006 18:16:10 +0000</td> | ||
3379 | <td class="comment">MiniUPnP client source code</td> | ||
3380 | <td></td> | ||
3381 | </tr> | ||
3382 | <tr> | ||
3383 | <td class="filename"><a href='download.php?file=miniupnpc20060909.tar.gz'>miniupnpc20060909.tar.gz</a></td> | ||
3384 | <td class="filesize">19285</td> | ||
3385 | <td class="filedate">04/11/2006 18:16:09 +0000</td> | ||
3386 | <td class="comment">MiniUPnP client source code</td> | ||
3387 | <td></td> | ||
3388 | </tr> | ||
3389 | <tr> | ||
3390 | <td class="filename"><a href='download.php?file=miniupnpc20060731.tar.gz'>miniupnpc20060731.tar.gz</a></td> | ||
3391 | <td class="filesize">19032</td> | ||
3392 | <td class="filedate">04/11/2006 18:16:07 +0000</td> | ||
3393 | <td class="comment">MiniUPnP client source code</td> | ||
3394 | <td></td> | ||
3395 | </tr> | ||
3396 | <tr> | ||
3397 | <td class="filename"><a href='download.php?file=miniupnpc20060711.tar.gz'>miniupnpc20060711.tar.gz</a></td> | ||
3398 | <td class="filesize">19151</td> | ||
3399 | <td class="filedate">04/11/2006 18:16:06 +0000</td> | ||
3400 | <td class="comment">MiniUPnP client source code</td> | ||
3401 | <td></td> | ||
3402 | </tr> | ||
3403 | <tr> | ||
3404 | <td class="filename"><a href='download.php?file=miniupnpc20060709.tar.gz'>miniupnpc20060709.tar.gz</a></td> | ||
3405 | <td class="filesize">19080</td> | ||
3406 | <td class="filedate">04/11/2006 18:16:04 +0000</td> | ||
3407 | <td class="comment">MiniUPnP client source code</td> | ||
3408 | <td></td> | ||
3409 | </tr> | ||
3410 | <tr> | ||
3411 | <td class="filename"><a href='download.php?file=miniupnpc20060703.tar.gz'>miniupnpc20060703.tar.gz</a></td> | ||
3412 | <td class="filesize">17906</td> | ||
3413 | <td class="filedate">04/11/2006 18:16:03 +0000</td> | ||
3414 | <td class="comment">MiniUPnP client source code</td> | ||
3415 | <td></td> | ||
3416 | </tr> | ||
3417 | <tr> | ||
3418 | <td class="filename"><a href='download.php?file=miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td> | ||
3419 | <td class="filesize">14840</td> | ||
3420 | <td class="filedate">04/11/2006 18:16:01 +0000</td> | ||
3421 | <td class="comment">João Paulo Barraca version of the upnp client</td> | ||
3422 | <td></td> | ||
3423 | </tr> | ||
3424 | <tr> | ||
3425 | <td class="filename"><a href='download.php?file=miniupnpd20061029.tar.gz'>miniupnpd20061029.tar.gz</a></td> | ||
3426 | <td class="filesize">24197</td> | ||
3427 | <td class="filedate">03/11/2006 13:40:30 +0000</td> | ||
3428 | <td class="comment">MiniUPnP daemon source code</td> | ||
3429 | <td></td> | ||
3430 | </tr> | ||
3431 | <tr> | ||
3432 | <td class="filename"><a href='download.php?file=miniupnpd20061027.tar.gz'>miniupnpd20061027.tar.gz</a></td> | ||
3433 | <td class="filesize">23904</td> | ||
3434 | <td class="filedate">03/11/2006 13:40:29 +0000</td> | ||
3435 | <td class="comment">MiniUPnP daemon source code</td> | ||
3436 | <td></td> | ||
3437 | </tr> | ||
3438 | <tr> | ||
3439 | <td class="filename"><a href='download.php?file=miniupnpd20061028.tar.gz'>miniupnpd20061028.tar.gz</a></td> | ||
3440 | <td class="filesize">24383</td> | ||
3441 | <td class="filedate">03/11/2006 13:40:29 +0000</td> | ||
3442 | <td class="comment">MiniUPnP daemon source code</td> | ||
3443 | <td></td> | ||
3444 | </tr> | ||
3445 | <tr> | ||
3446 | <td class="filename"><a href='download.php?file=miniupnpd20061018.tar.gz'>miniupnpd20061018.tar.gz</a></td> | ||
3447 | <td class="filesize">23051</td> | ||
3448 | <td class="filedate">03/11/2006 13:40:28 +0000</td> | ||
3449 | <td class="comment">MiniUPnP daemon source code</td> | ||
3450 | <td></td> | ||
3451 | </tr> | ||
3452 | <tr> | ||
3453 | <td class="filename"><a href='download.php?file=miniupnpd20061023.tar.gz'>miniupnpd20061023.tar.gz</a></td> | ||
3454 | <td class="filesize">23478</td> | ||
3455 | <td class="filedate">03/11/2006 13:40:28 +0000</td> | ||
3456 | <td class="comment">MiniUPnP daemon source code</td> | ||
3457 | <td></td> | ||
3458 | </tr> | ||
3459 | <tr> | ||
3460 | <td class="filename"><a href='download.php?file=miniupnpd20060930.tar.gz'>miniupnpd20060930.tar.gz</a></td> | ||
3461 | <td class="filesize">22832</td> | ||
3462 | <td class="filedate">03/11/2006 13:40:28 +0000</td> | ||
3463 | <td class="comment">MiniUPnP daemon source code</td> | ||
3464 | <td></td> | ||
3465 | </tr> | ||
3466 | <tr> | ||
3467 | <td class="filename"><a href='download.php?file=miniupnpd20060924.tar.gz'>miniupnpd20060924.tar.gz</a></td> | ||
3468 | <td class="filesize">22038</td> | ||
3469 | <td class="filedate">03/11/2006 13:40:27 +0000</td> | ||
3470 | <td class="comment">MiniUPnP daemon source code</td> | ||
3471 | <td></td> | ||
3472 | </tr> | ||
3473 | <tr> | ||
3474 | <td class="filename"><a href='download.php?file=miniupnpd20060919.tar.gz'>miniupnpd20060919.tar.gz</a></td> | ||
3475 | <td class="filesize">21566</td> | ||
3476 | <td class="filedate">03/11/2006 13:40:27 +0000</td> | ||
3477 | <td class="comment">MiniUPnP daemon source code</td> | ||
3478 | <td></td> | ||
3479 | </tr> | ||
3480 | <tr> | ||
3481 | <td class="filename"><a href='download.php?file=miniupnpd20060729.tar.gz'>miniupnpd20060729.tar.gz</a></td> | ||
3482 | <td class="filesize">19202</td> | ||
3483 | <td class="filedate">03/11/2006 13:40:26 +0000</td> | ||
3484 | <td class="comment">MiniUPnP daemon source code</td> | ||
3485 | <td></td> | ||
3486 | </tr> | ||
3487 | <tr> | ||
3488 | <td class="filename"><a href='download.php?file=miniupnpd20060909.tar.gz'>miniupnpd20060909.tar.gz</a></td> | ||
3489 | <td class="filesize">19952</td> | ||
3490 | <td class="filedate">03/11/2006 13:40:26 +0000</td> | ||
3491 | <td class="comment">MiniUPnP daemon source code</td> | ||
3492 | <td></td> | ||
3493 | </tr> | ||
3494 | </table> | ||
3495 | |||
3496 | <p><a href="..">Home</a></p> | ||
3497 | <p>Contact: miniupnp _AT_ free _DOT_ fr</p> | ||
3498 | <p align="center"> | ||
3499 | <a href="https://validator.w3.org/check?uri=referer"><img src="https://www.w3.org/Icons/valid-xhtml10" alt="Valid XHTML 1.0 Transitional" height="31" width="88" /></a> | ||
3500 | <a href="https://jigsaw.w3.org/css-validator/check/referer"><img style="border:0;width:88px;height:31px" src="https://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!" /></a> | ||
3501 | <!-- | ||
3502 | <a href="https://freshmeat.net/projects/miniupnp"><img src="https://s3.amazonaws.com/entp-tender-production/assets/bc5be96f147ec8db3c10fc017f1f53889904ef5b/fm_logo_white_150_normal.png" border="0" alt="freshmeat.net" /></a> | ||
3503 | --> | ||
3504 | <!-- https://futuresimple.github.com/images/github_logo.png --> | ||
3505 | <!-- <a href="https://github.com/miniupnp/miniupnp"><img src="https://assets-cdn.github.com/images/modules/logos_page/GitHub-Logo.png" alt="github.com" height="31" /></a> --> | ||
3506 | <a href="https://github.com/miniupnp/miniupnp"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://github.blog/wp-content/uploads/2008/12/forkme_left_green_007200.png" alt="Fork me on GitHub" /></a> | ||
3507 | </p> | ||
3508 | |||
3509 | <script type="text/javascript"> | ||
3510 | var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www."); | ||
3511 | document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E")); | ||
3512 | </script> | ||
3513 | <script type="text/javascript"> | ||
3514 | try { | ||
3515 | var ua = 'UA-10295521'; | ||
3516 | if(window.location.hostname == 'miniupnp.free.fr') | ||
3517 | ua += '-1'; | ||
3518 | else if(window.location.hostname == 'miniupnp.tuxfamily.org') | ||
3519 | ua += '-2'; | ||
3520 | else ua = ''; | ||
3521 | if(ua != '') { | ||
3522 | var pageTracker = _gat._getTracker(ua); | ||
3523 | pageTracker._trackPageview(); | ||
3524 | } | ||
3525 | } catch(err) {}</script> | ||
3526 | </body> | ||
3527 | </html> | ||
3528 | |||
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py index 85c1f79ff3..077472b8b3 100644 --- a/bitbake/lib/bb/tests/fetch.py +++ b/bitbake/lib/bb/tests/fetch.py | |||
@@ -7,7 +7,10 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | import contextlib | 9 | import contextlib |
10 | import shutil | ||
10 | import unittest | 11 | import unittest |
12 | import unittest.mock | ||
13 | import urllib.parse | ||
11 | import hashlib | 14 | import hashlib |
12 | import tempfile | 15 | import tempfile |
13 | import collections | 16 | import collections |
@@ -17,6 +20,7 @@ import tarfile | |||
17 | from bb.fetch2 import URI | 20 | from bb.fetch2 import URI |
18 | from bb.fetch2 import FetchMethod | 21 | from bb.fetch2 import FetchMethod |
19 | import bb | 22 | import bb |
23 | import bb.utils | ||
20 | from bb.tests.support.httpserver import HTTPService | 24 | from bb.tests.support.httpserver import HTTPService |
21 | 25 | ||
22 | def skipIfNoNetwork(): | 26 | def skipIfNoNetwork(): |
@@ -24,6 +28,18 @@ def skipIfNoNetwork(): | |||
24 | return unittest.skip("network test") | 28 | return unittest.skip("network test") |
25 | return lambda f: f | 29 | return lambda f: f |
26 | 30 | ||
31 | |||
32 | @contextlib.contextmanager | ||
33 | def hide_directory(directory): | ||
34 | """Hide the given directory and restore it after the context is left""" | ||
35 | temp_name = directory + ".bak" | ||
36 | os.rename(directory, temp_name) | ||
37 | try: | ||
38 | yield | ||
39 | finally: | ||
40 | os.rename(temp_name, directory) | ||
41 | |||
42 | |||
27 | class TestTimeout(Exception): | 43 | class TestTimeout(Exception): |
28 | # Indicate to pytest that this is not a test suite | 44 | # Indicate to pytest that this is not a test suite |
29 | __test__ = False | 45 | __test__ = False |
@@ -323,6 +339,21 @@ class URITest(unittest.TestCase): | |||
323 | 'params': {"downloadfilename" : "EGPL-T101.zip"}, | 339 | 'params': {"downloadfilename" : "EGPL-T101.zip"}, |
324 | 'query': {"9BE0BF6657": None}, | 340 | 'query': {"9BE0BF6657": None}, |
325 | 'relative': False | 341 | 'relative': False |
342 | }, | ||
343 | "file://example@.service": { | ||
344 | 'uri': 'file:example%40.service', | ||
345 | 'scheme': 'file', | ||
346 | 'hostname': '', | ||
347 | 'port': None, | ||
348 | 'hostport': '', | ||
349 | 'path': 'example@.service', | ||
350 | 'userinfo': '', | ||
351 | 'userinfo': '', | ||
352 | 'username': '', | ||
353 | 'password': '', | ||
354 | 'params': {}, | ||
355 | 'query': {}, | ||
356 | 'relative': True | ||
326 | } | 357 | } |
327 | 358 | ||
328 | } | 359 | } |
@@ -459,16 +490,16 @@ class FetcherTest(unittest.TestCase): | |||
459 | class MirrorUriTest(FetcherTest): | 490 | class MirrorUriTest(FetcherTest): |
460 | 491 | ||
461 | replaceuris = { | 492 | replaceuris = { |
462 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") | 493 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "http://somewhere.org/somedir/") |
463 | : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", | 494 | : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", |
464 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") | 495 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") |
465 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 496 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
466 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") | 497 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") |
467 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 498 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
468 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") | 499 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") |
469 | : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 500 | : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
470 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") | 501 | ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") |
471 | : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", | 502 | : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", |
472 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") | 503 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") |
473 | : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", | 504 | : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", |
474 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") | 505 | ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") |
@@ -481,12 +512,12 @@ class MirrorUriTest(FetcherTest): | |||
481 | : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", | 512 | : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", |
482 | ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") | 513 | ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") |
483 | : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", | 514 | : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", |
484 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") | 515 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") |
485 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 516 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
486 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") | 517 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") |
487 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 518 | : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
488 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") | 519 | ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") |
489 | : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", | 520 | : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", |
490 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") | 521 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") |
491 | : "http://somewhere2.org/somefile_1.2.3.tar.gz", | 522 | : "http://somewhere2.org/somefile_1.2.3.tar.gz", |
492 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") | 523 | ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") |
@@ -502,6 +533,10 @@ class MirrorUriTest(FetcherTest): | |||
502 | : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", | 533 | : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", |
503 | ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz") | 534 | ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz") |
504 | : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", | 535 | : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", |
536 | ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/") | ||
537 | : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", | ||
538 | ("git://internal.git.server.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/") | ||
539 | : None, | ||
505 | 540 | ||
506 | #Renaming files doesn't work | 541 | #Renaming files doesn't work |
507 | #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" | 542 | #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" |
@@ -510,8 +545,8 @@ class MirrorUriTest(FetcherTest): | |||
510 | 545 | ||
511 | mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ | 546 | mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ |
512 | "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ | 547 | "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ |
513 | "https://.*/.* file:///someotherpath/downloads/ " \ | 548 | "https?://.*/.* file:///someotherpath/downloads/ " \ |
514 | "http://.*/.* file:///someotherpath/downloads/" | 549 | "svn://svn.server1.com/ svn://svn.server2.com/" |
515 | 550 | ||
516 | def test_urireplace(self): | 551 | def test_urireplace(self): |
517 | self.d.setVar("FILESPATH", ".") | 552 | self.d.setVar("FILESPATH", ".") |
@@ -520,7 +555,7 @@ class MirrorUriTest(FetcherTest): | |||
520 | ud.setup_localpath(self.d) | 555 | ud.setup_localpath(self.d) |
521 | mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2])) | 556 | mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2])) |
522 | newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d) | 557 | newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d) |
523 | self.assertEqual([v], newuris) | 558 | self.assertEqual([v] if v else [], newuris) |
524 | 559 | ||
525 | def test_urilist1(self): | 560 | def test_urilist1(self): |
526 | fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) | 561 | fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) |
@@ -535,6 +570,13 @@ class MirrorUriTest(FetcherTest): | |||
535 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) | 570 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) |
536 | self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) | 571 | self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) |
537 | 572 | ||
573 | def test_urilistsvn(self): | ||
574 | # Catch svn:// -> svn:// bug | ||
575 | fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d) | ||
576 | mirrors = bb.fetch2.mirror_from_string(self.mirrorvar) | ||
577 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) | ||
578 | self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2']) | ||
579 | |||
538 | def test_mirror_of_mirror(self): | 580 | def test_mirror_of_mirror(self): |
539 | # Test if mirror of a mirror works | 581 | # Test if mirror of a mirror works |
540 | mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" | 582 | mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" |
@@ -547,16 +589,16 @@ class MirrorUriTest(FetcherTest): | |||
547 | 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', | 589 | 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', |
548 | 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) | 590 | 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) |
549 | 591 | ||
550 | recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ " \ | 592 | recmirrorvar = "https://.*/[^/]* http://aaaa/A/A/A/ " \ |
551 | "https://.*/[^/]* https://BBBB/B/B/B/" | 593 | "https://.*/[^/]* https://bbbb/B/B/B/" |
552 | 594 | ||
553 | def test_recursive(self): | 595 | def test_recursive(self): |
554 | fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) | 596 | fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) |
555 | mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar) | 597 | mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar) |
556 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) | 598 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) |
557 | self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz', | 599 | self.assertEqual(uris, ['http://aaaa/A/A/A/bitbake/bitbake-1.0.tar.gz', |
558 | 'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz', | 600 | 'https://bbbb/B/B/B/bitbake/bitbake-1.0.tar.gz', |
559 | 'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz']) | 601 | 'http://aaaa/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz']) |
560 | 602 | ||
561 | 603 | ||
562 | class GitDownloadDirectoryNamingTest(FetcherTest): | 604 | class GitDownloadDirectoryNamingTest(FetcherTest): |
@@ -679,7 +721,7 @@ class GitShallowTarballNamingTest(FetcherTest): | |||
679 | class CleanTarballTest(FetcherTest): | 721 | class CleanTarballTest(FetcherTest): |
680 | def setUp(self): | 722 | def setUp(self): |
681 | super(CleanTarballTest, self).setUp() | 723 | super(CleanTarballTest, self).setUp() |
682 | self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https" | 724 | self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https;branch=master" |
683 | self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" | 725 | self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" |
684 | 726 | ||
685 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') | 727 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') |
@@ -720,6 +762,7 @@ class FetcherLocalTest(FetcherTest): | |||
720 | os.makedirs(self.localsrcdir) | 762 | os.makedirs(self.localsrcdir) |
721 | touch(os.path.join(self.localsrcdir, 'a')) | 763 | touch(os.path.join(self.localsrcdir, 'a')) |
722 | touch(os.path.join(self.localsrcdir, 'b')) | 764 | touch(os.path.join(self.localsrcdir, 'b')) |
765 | touch(os.path.join(self.localsrcdir, 'c@d')) | ||
723 | os.makedirs(os.path.join(self.localsrcdir, 'dir')) | 766 | os.makedirs(os.path.join(self.localsrcdir, 'dir')) |
724 | touch(os.path.join(self.localsrcdir, 'dir', 'c')) | 767 | touch(os.path.join(self.localsrcdir, 'dir', 'c')) |
725 | touch(os.path.join(self.localsrcdir, 'dir', 'd')) | 768 | touch(os.path.join(self.localsrcdir, 'dir', 'd')) |
@@ -751,6 +794,10 @@ class FetcherLocalTest(FetcherTest): | |||
751 | tree = self.fetchUnpack(['file://a', 'file://dir/c']) | 794 | tree = self.fetchUnpack(['file://a', 'file://dir/c']) |
752 | self.assertEqual(tree, ['a', 'dir/c']) | 795 | self.assertEqual(tree, ['a', 'dir/c']) |
753 | 796 | ||
797 | def test_local_at(self): | ||
798 | tree = self.fetchUnpack(['file://c@d']) | ||
799 | self.assertEqual(tree, ['c@d']) | ||
800 | |||
754 | def test_local_backslash(self): | 801 | def test_local_backslash(self): |
755 | tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device']) | 802 | tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device']) |
756 | self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device']) | 803 | self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device']) |
@@ -1056,12 +1103,6 @@ class FetcherNetworkTest(FetcherTest): | |||
1056 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) | 1103 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) |
1057 | 1104 | ||
1058 | @skipIfNoNetwork() | 1105 | @skipIfNoNetwork() |
1059 | def test_gitfetch_tagandrev(self): | ||
1060 | # SRCREV is set but does not match rev= parameter | ||
1061 | url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https" | ||
1062 | self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) | ||
1063 | |||
1064 | @skipIfNoNetwork() | ||
1065 | def test_gitfetch_usehead(self): | 1106 | def test_gitfetch_usehead(self): |
1066 | # Since self.gitfetcher() sets SRCREV we expect this to override | 1107 | # Since self.gitfetcher() sets SRCREV we expect this to override |
1067 | # `usehead=1' and instead fetch the specified SRCREV. See | 1108 | # `usehead=1' and instead fetch the specified SRCREV. See |
@@ -1095,7 +1136,7 @@ class FetcherNetworkTest(FetcherTest): | |||
1095 | @skipIfNoNetwork() | 1136 | @skipIfNoNetwork() |
1096 | def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): | 1137 | def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): |
1097 | realurl = "https://git.openembedded.org/bitbake" | 1138 | realurl = "https://git.openembedded.org/bitbake" |
1098 | recipeurl = "git://someserver.org/bitbake;protocol=https" | 1139 | recipeurl = "git://someserver.org/bitbake;protocol=https;branch=master" |
1099 | self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") | 1140 | self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") |
1100 | os.chdir(self.tempdir) | 1141 | os.chdir(self.tempdir) |
1101 | self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir) | 1142 | self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir) |
@@ -1250,7 +1291,6 @@ class FetcherNetworkTest(FetcherTest): | |||
1250 | 1291 | ||
1251 | class SVNTest(FetcherTest): | 1292 | class SVNTest(FetcherTest): |
1252 | def skipIfNoSvn(): | 1293 | def skipIfNoSvn(): |
1253 | import shutil | ||
1254 | if not shutil.which("svn"): | 1294 | if not shutil.which("svn"): |
1255 | return unittest.skip("svn not installed, tests being skipped") | 1295 | return unittest.skip("svn not installed, tests being skipped") |
1256 | 1296 | ||
@@ -1373,15 +1413,17 @@ class TrustedNetworksTest(FetcherTest): | |||
1373 | self.assertFalse(bb.fetch.trusted_network(self.d, url)) | 1413 | self.assertFalse(bb.fetch.trusted_network(self.d, url)) |
1374 | 1414 | ||
1375 | class URLHandle(unittest.TestCase): | 1415 | class URLHandle(unittest.TestCase): |
1376 | 1416 | # Quote password as per RFC3986 | |
1417 | password = urllib.parse.quote(r"!#$%^&*()-_={}[]\|:?,.<>~`", r"!$&'/()*+,;=") | ||
1377 | datatable = { | 1418 | datatable = { |
1378 | "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), | 1419 | "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), |
1379 | "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), | 1420 | "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), |
1380 | "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), | 1421 | "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), |
1381 | "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}), | 1422 | "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}), |
1382 | "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), | 1423 | "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), |
1424 | "file://example@.service": ('file', '', 'example@.service', '', '', {}), | ||
1383 | "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}), | 1425 | "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}), |
1384 | r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}), | 1426 | 'git://s.o-me_ONE:%s@git.openembedded.org/bitbake;branch=main;protocol=https' % password: ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', password, {'branch': 'main', 'protocol' : 'https'}), |
1385 | } | 1427 | } |
1386 | # we require a pathname to encodeurl but users can still pass such urls to | 1428 | # we require a pathname to encodeurl but users can still pass such urls to |
1387 | # decodeurl and we need to handle them | 1429 | # decodeurl and we need to handle them |
@@ -1399,6 +1441,8 @@ class URLHandle(unittest.TestCase): | |||
1399 | def test_encodeurl(self): | 1441 | def test_encodeurl(self): |
1400 | for k, v in self.datatable.items(): | 1442 | for k, v in self.datatable.items(): |
1401 | result = bb.fetch.encodeurl(v) | 1443 | result = bb.fetch.encodeurl(v) |
1444 | if result.startswith("file:"): | ||
1445 | result = urllib.parse.unquote(result) | ||
1402 | self.assertEqual(result, k) | 1446 | self.assertEqual(result, k) |
1403 | 1447 | ||
1404 | class FetchLatestVersionTest(FetcherTest): | 1448 | class FetchLatestVersionTest(FetcherTest): |
@@ -1419,12 +1463,12 @@ class FetchLatestVersionTest(FetcherTest): | |||
1419 | ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "") | 1463 | ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "") |
1420 | : "1.4.0", | 1464 | : "1.4.0", |
1421 | # combination version pattern | 1465 | # combination version pattern |
1422 | ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") | 1466 | ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") |
1423 | : "1.2.0", | 1467 | : "1.2.0", |
1424 | ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") | 1468 | ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") |
1425 | : "2014.01", | 1469 | : "2014.01", |
1426 | # version pattern "yyyymmdd" | 1470 | # version pattern "yyyymmdd" |
1427 | ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") | 1471 | ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") |
1428 | : "20120614", | 1472 | : "20120614", |
1429 | # packages with a valid UPSTREAM_CHECK_GITTAGREGEX | 1473 | # packages with a valid UPSTREAM_CHECK_GITTAGREGEX |
1430 | # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing | 1474 | # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing |
@@ -1440,98 +1484,126 @@ class FetchLatestVersionTest(FetcherTest): | |||
1440 | : "0.28.0", | 1484 | : "0.28.0", |
1441 | } | 1485 | } |
1442 | 1486 | ||
1487 | WgetTestData = collections.namedtuple("WgetTestData", ["pn", "path", "pv", "check_uri", "check_regex"], defaults=[None, None, None]) | ||
1443 | test_wget_uris = { | 1488 | test_wget_uris = { |
1444 | # | 1489 | # |
1445 | # packages with versions inside directory name | 1490 | # packages with versions inside directory name |
1446 | # | 1491 | # |
1447 | # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2 | 1492 | # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2 |
1448 | ("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "") | 1493 | WgetTestData("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2") |
1449 | : "2.24.2", | 1494 | : "2.24.2", |
1450 | # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz | 1495 | # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz |
1451 | ("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "") | 1496 | WgetTestData("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz") |
1452 | : "1.6.0", | 1497 | : "1.6.0", |
1453 | # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz | 1498 | # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz |
1454 | ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") | 1499 | WgetTestData("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz") |
1455 | : "2.8.12.1", | 1500 | : "2.8.12.1", |
1456 | # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz | 1501 | # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz |
1457 | ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "") | 1502 | WgetTestData("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz") |
1458 | : "2.10.3", | 1503 | : "2.10.3", |
1459 | # | 1504 | # |
1460 | # packages with versions only in current directory | 1505 | # packages with versions only in current directory |
1461 | # | 1506 | # |
1462 | # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 | 1507 | # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 |
1463 | ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") | 1508 | WgetTestData("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2") |
1464 | : "2.19", | 1509 | : "2.19", |
1465 | # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 | 1510 | # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 |
1466 | ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") | 1511 | WgetTestData("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2") |
1467 | : "20120814", | 1512 | : "20120814", |
1468 | # | 1513 | # |
1469 | # packages with "99" in the name of possible version | 1514 | # packages with "99" in the name of possible version |
1470 | # | 1515 | # |
1471 | # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz | 1516 | # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz |
1472 | ("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "") | 1517 | WgetTestData("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz") |
1473 | : "5.0", | 1518 | : "5.0", |
1474 | # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2 | 1519 | # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2 |
1475 | ("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "") | 1520 | WgetTestData("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2") |
1476 | : "1.15.1", | 1521 | : "1.15.1", |
1477 | # | 1522 | # |
1478 | # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX | 1523 | # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX |
1479 | # | 1524 | # |
1480 | # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 | 1525 | # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 |
1481 | # https://github.com/apple/cups/releases | 1526 | # https://github.com/apple/cups/releases |
1482 | ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") | 1527 | WgetTestData("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", check_uri="/apple/cups/releases", check_regex=r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") |
1483 | : "2.0.0", | 1528 | : "2.0.0", |
1484 | # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz | 1529 | # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz |
1485 | # http://ftp.debian.org/debian/pool/main/d/db5.3/ | 1530 | # http://ftp.debian.org/debian/pool/main/d/db5.3/ |
1486 | ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") | 1531 | WgetTestData("db", "/berkeley-db/db-5.3.21.tar.gz", check_uri="/debian/pool/main/d/db5.3/", check_regex=r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") |
1487 | : "5.3.10", | 1532 | : "5.3.10", |
1488 | # | 1533 | # |
1489 | # packages where the tarball compression changed in the new version | 1534 | # packages where the tarball compression changed in the new version |
1490 | # | 1535 | # |
1491 | # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz | 1536 | # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz |
1492 | ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "") | 1537 | WgetTestData("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz") |
1493 | : "2.8", | 1538 | : "2.8", |
1539 | |||
1540 | # | ||
1541 | # packages where the path doesn't actually contain the filename, so downloadfilename should be respected | ||
1542 | # | ||
1543 | WgetTestData("miniupnpd", "/software/miniupnp/download.php?file=miniupnpd_2.1.20191006.tar.gz;downloadfilename=miniupnpd_2.1.20191006.tar.gz", pv="2.1.20191006", check_uri="/software/miniupnp/download.php", check_regex=r"miniupnpd-(?P<pver>\d+(\.\d+)+)\.tar") | ||
1544 | : "2.3.7", | ||
1494 | } | 1545 | } |
1495 | 1546 | ||
1547 | test_crate_uris = { | ||
1548 | # basic example; version pattern "A.B.C+cargo-D.E.F" | ||
1549 | ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69") | ||
1550 | : "0.9.29" | ||
1551 | } | ||
1552 | |||
1496 | @skipIfNoNetwork() | 1553 | @skipIfNoNetwork() |
1497 | def test_git_latest_versionstring(self): | 1554 | def test_git_latest_versionstring(self): |
1498 | for k, v in self.test_git_uris.items(): | 1555 | for k, v in self.test_git_uris.items(): |
1499 | self.d.setVar("PN", k[0]) | 1556 | with self.subTest(pn=k[0]): |
1500 | self.d.setVar("SRCREV", k[2]) | 1557 | self.d.setVar("PN", k[0]) |
1501 | self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3]) | 1558 | self.d.setVar("SRCREV", k[2]) |
1502 | ud = bb.fetch2.FetchData(k[1], self.d) | 1559 | self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3]) |
1503 | pupver= ud.method.latest_versionstring(ud, self.d) | 1560 | ud = bb.fetch2.FetchData(k[1], self.d) |
1504 | verstring = pupver[0] | 1561 | pupver= ud.method.latest_versionstring(ud, self.d) |
1505 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) | 1562 | verstring = pupver[0] |
1506 | r = bb.utils.vercmp_string(v, verstring) | 1563 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) |
1507 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) | 1564 | r = bb.utils.vercmp_string(v, verstring) |
1508 | if k[4]: | 1565 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) |
1509 | r = bb.utils.vercmp_string(verstring, k[4]) | 1566 | if k[4]: |
1510 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4])) | 1567 | r = bb.utils.vercmp_string(verstring, k[4]) |
1568 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4])) | ||
1511 | 1569 | ||
1512 | def test_wget_latest_versionstring(self): | 1570 | def test_wget_latest_versionstring(self): |
1513 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" | 1571 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" |
1514 | server = HTTPService(testdata) | 1572 | server = HTTPService(testdata, host="127.0.0.1") |
1515 | server.start() | 1573 | server.start() |
1516 | port = server.port | 1574 | port = server.port |
1517 | try: | 1575 | try: |
1518 | for k, v in self.test_wget_uris.items(): | 1576 | for data, v in self.test_wget_uris.items(): |
1577 | with self.subTest(pn=data.pn): | ||
1578 | self.d.setVar("PN", data.pn) | ||
1579 | self.d.setVar("PV", data.pv) | ||
1580 | if data.check_uri: | ||
1581 | checkuri = "http://127.0.0.1:%s/%s" % (port, data.check_uri) | ||
1582 | self.d.setVar("UPSTREAM_CHECK_URI", checkuri) | ||
1583 | if data.check_regex: | ||
1584 | self.d.setVar("UPSTREAM_CHECK_REGEX", data.check_regex) | ||
1585 | |||
1586 | url = "http://127.0.0.1:%s/%s" % (port, data.path) | ||
1587 | ud = bb.fetch2.FetchData(url, self.d) | ||
1588 | pupver = ud.method.latest_versionstring(ud, self.d) | ||
1589 | verstring = pupver[0] | ||
1590 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % data.pn) | ||
1591 | r = bb.utils.vercmp_string(v, verstring) | ||
1592 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (data.pn, v, verstring)) | ||
1593 | finally: | ||
1594 | server.stop() | ||
1595 | |||
1596 | @skipIfNoNetwork() | ||
1597 | def test_crate_latest_versionstring(self): | ||
1598 | for k, v in self.test_crate_uris.items(): | ||
1599 | with self.subTest(pn=k[0]): | ||
1519 | self.d.setVar("PN", k[0]) | 1600 | self.d.setVar("PN", k[0]) |
1520 | checkuri = "" | 1601 | ud = bb.fetch2.FetchData(k[1], self.d) |
1521 | if k[2]: | ||
1522 | checkuri = "http://localhost:%s/" % port + k[2] | ||
1523 | self.d.setVar("UPSTREAM_CHECK_URI", checkuri) | ||
1524 | self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) | ||
1525 | url = "http://localhost:%s/" % port + k[1] | ||
1526 | ud = bb.fetch2.FetchData(url, self.d) | ||
1527 | pupver = ud.method.latest_versionstring(ud, self.d) | 1602 | pupver = ud.method.latest_versionstring(ud, self.d) |
1528 | verstring = pupver[0] | 1603 | verstring = pupver[0] |
1529 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) | 1604 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) |
1530 | r = bb.utils.vercmp_string(v, verstring) | 1605 | r = bb.utils.vercmp_string(v, verstring) |
1531 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) | 1606 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) |
1532 | finally: | ||
1533 | server.stop() | ||
1534 | |||
1535 | 1607 | ||
1536 | class FetchCheckStatusTest(FetcherTest): | 1608 | class FetchCheckStatusTest(FetcherTest): |
1537 | test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", | 1609 | test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", |
@@ -1715,6 +1787,8 @@ class GitShallowTest(FetcherTest): | |||
1715 | if cwd is None: | 1787 | if cwd is None: |
1716 | cwd = self.gitdir | 1788 | cwd = self.gitdir |
1717 | actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() | 1789 | actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() |
1790 | # Resolve references into the same format as the comparision (needed by git 2.48 onwards) | ||
1791 | actual_refs = self.git(['rev-parse', '--symbolic-full-name'] + actual_refs, cwd=cwd).splitlines() | ||
1718 | full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() | 1792 | full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() |
1719 | self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) | 1793 | self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) |
1720 | 1794 | ||
@@ -1761,7 +1835,6 @@ class GitShallowTest(FetcherTest): | |||
1761 | def fetch_shallow(self, uri=None, disabled=False, keepclone=False): | 1835 | def fetch_shallow(self, uri=None, disabled=False, keepclone=False): |
1762 | """Fetch a uri, generating a shallow tarball, then unpack using it""" | 1836 | """Fetch a uri, generating a shallow tarball, then unpack using it""" |
1763 | fetcher, ud = self.fetch_and_unpack(uri) | 1837 | fetcher, ud = self.fetch_and_unpack(uri) |
1764 | assert os.path.exists(ud.clonedir), 'Git clone in DLDIR (%s) does not exist for uri %s' % (ud.clonedir, uri) | ||
1765 | 1838 | ||
1766 | # Confirm that the unpacked repo is unshallow | 1839 | # Confirm that the unpacked repo is unshallow |
1767 | if not disabled: | 1840 | if not disabled: |
@@ -1769,9 +1842,10 @@ class GitShallowTest(FetcherTest): | |||
1769 | 1842 | ||
1770 | # fetch and unpack, from the shallow tarball | 1843 | # fetch and unpack, from the shallow tarball |
1771 | bb.utils.remove(self.gitdir, recurse=True) | 1844 | bb.utils.remove(self.gitdir, recurse=True) |
1772 | bb.process.run('chmod u+w -R "%s"' % ud.clonedir) | 1845 | if os.path.exists(ud.clonedir): |
1773 | bb.utils.remove(ud.clonedir, recurse=True) | 1846 | bb.process.run('chmod u+w -R "%s"' % ud.clonedir) |
1774 | bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) | 1847 | bb.utils.remove(ud.clonedir, recurse=True) |
1848 | bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) | ||
1775 | 1849 | ||
1776 | # confirm that the unpacked repo is used when no git clone or git | 1850 | # confirm that the unpacked repo is used when no git clone or git |
1777 | # mirror tarball is available | 1851 | # mirror tarball is available |
@@ -1854,7 +1928,12 @@ class GitShallowTest(FetcherTest): | |||
1854 | self.add_empty_file('c') | 1928 | self.add_empty_file('c') |
1855 | self.assertRevCount(3, cwd=self.srcdir) | 1929 | self.assertRevCount(3, cwd=self.srcdir) |
1856 | 1930 | ||
1931 | # Clone without tarball | ||
1932 | self.d.setVar('BB_GIT_SHALLOW', '0') | ||
1933 | fetcher, ud = self.fetch() | ||
1934 | |||
1857 | # Clone and generate mirror tarball | 1935 | # Clone and generate mirror tarball |
1936 | self.d.setVar('BB_GIT_SHALLOW', '1') | ||
1858 | fetcher, ud = self.fetch() | 1937 | fetcher, ud = self.fetch() |
1859 | 1938 | ||
1860 | # Ensure we have a current mirror tarball, but an out of date clone | 1939 | # Ensure we have a current mirror tarball, but an out of date clone |
@@ -1866,6 +1945,7 @@ class GitShallowTest(FetcherTest): | |||
1866 | fetcher, ud = self.fetch() | 1945 | fetcher, ud = self.fetch() |
1867 | fetcher.unpack(self.d.getVar('WORKDIR')) | 1946 | fetcher.unpack(self.d.getVar('WORKDIR')) |
1868 | self.assertRevCount(1) | 1947 | self.assertRevCount(1) |
1948 | assert os.path.exists(os.path.join(self.d.getVar('WORKDIR'), 'git', 'c')) | ||
1869 | 1949 | ||
1870 | def test_shallow_single_branch_no_merge(self): | 1950 | def test_shallow_single_branch_no_merge(self): |
1871 | self.add_empty_file('a') | 1951 | self.add_empty_file('a') |
@@ -1963,7 +2043,7 @@ class GitShallowTest(FetcherTest): | |||
1963 | self.git('submodule update', cwd=self.srcdir) | 2043 | self.git('submodule update', cwd=self.srcdir) |
1964 | self.git('commit -m submodule -a', cwd=self.srcdir) | 2044 | self.git('commit -m submodule -a', cwd=self.srcdir) |
1965 | 2045 | ||
1966 | uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir | 2046 | uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir |
1967 | 2047 | ||
1968 | # Fetch once to generate the shallow tarball | 2048 | # Fetch once to generate the shallow tarball |
1969 | fetcher, ud = self.fetch(uri) | 2049 | fetcher, ud = self.fetch(uri) |
@@ -2004,70 +2084,17 @@ class GitShallowTest(FetcherTest): | |||
2004 | assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0] | 2084 | assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0] |
2005 | assert os.path.exists(os.path.join(self.gitdir, 'c')) | 2085 | assert os.path.exists(os.path.join(self.gitdir, 'c')) |
2006 | 2086 | ||
2007 | def test_shallow_multi_one_uri(self): | ||
2008 | # Create initial git repo | ||
2009 | self.add_empty_file('a') | ||
2010 | self.add_empty_file('b') | ||
2011 | self.git('checkout -b a_branch', cwd=self.srcdir) | ||
2012 | self.add_empty_file('c') | ||
2013 | self.add_empty_file('d') | ||
2014 | self.git('checkout master', cwd=self.srcdir) | ||
2015 | self.git('tag v0.0 a_branch', cwd=self.srcdir) | ||
2016 | self.add_empty_file('e') | ||
2017 | self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir) | ||
2018 | self.add_empty_file('f') | ||
2019 | self.assertRevCount(7, cwd=self.srcdir) | ||
2020 | |||
2021 | uri = self.d.getVar('SRC_URI').split()[0] | ||
2022 | uri = '%s;branch=master,a_branch;name=master,a_branch' % uri | ||
2023 | |||
2024 | self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') | ||
2025 | self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') | ||
2026 | self.d.setVar('SRCREV_master', '${AUTOREV}') | ||
2027 | self.d.setVar('SRCREV_a_branch', '${AUTOREV}') | ||
2028 | |||
2029 | self.fetch_shallow(uri) | ||
2030 | |||
2031 | self.assertRevCount(5) | ||
2032 | self.assertRefs(['master', 'origin/master', 'origin/a_branch']) | ||
2033 | |||
2034 | def test_shallow_multi_one_uri_depths(self): | ||
2035 | # Create initial git repo | ||
2036 | self.add_empty_file('a') | ||
2037 | self.add_empty_file('b') | ||
2038 | self.git('checkout -b a_branch', cwd=self.srcdir) | ||
2039 | self.add_empty_file('c') | ||
2040 | self.add_empty_file('d') | ||
2041 | self.git('checkout master', cwd=self.srcdir) | ||
2042 | self.add_empty_file('e') | ||
2043 | self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir) | ||
2044 | self.add_empty_file('f') | ||
2045 | self.assertRevCount(7, cwd=self.srcdir) | ||
2046 | |||
2047 | uri = self.d.getVar('SRC_URI').split()[0] | ||
2048 | uri = '%s;branch=master,a_branch;name=master,a_branch' % uri | ||
2049 | |||
2050 | self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') | ||
2051 | self.d.setVar('BB_GIT_SHALLOW_DEPTH_master', '3') | ||
2052 | self.d.setVar('BB_GIT_SHALLOW_DEPTH_a_branch', '1') | ||
2053 | self.d.setVar('SRCREV_master', '${AUTOREV}') | ||
2054 | self.d.setVar('SRCREV_a_branch', '${AUTOREV}') | ||
2055 | |||
2056 | self.fetch_shallow(uri) | ||
2057 | |||
2058 | self.assertRevCount(4, ['--all']) | ||
2059 | self.assertRefs(['master', 'origin/master', 'origin/a_branch']) | ||
2060 | |||
2061 | def test_shallow_clone_preferred_over_shallow(self): | 2087 | def test_shallow_clone_preferred_over_shallow(self): |
2062 | self.add_empty_file('a') | 2088 | self.add_empty_file('a') |
2063 | self.add_empty_file('b') | 2089 | self.add_empty_file('b') |
2064 | 2090 | ||
2065 | # Fetch once to generate the shallow tarball | 2091 | # Fetch once to generate the shallow tarball |
2092 | self.d.setVar('BB_GIT_SHALLOW', '0') | ||
2066 | fetcher, ud = self.fetch() | 2093 | fetcher, ud = self.fetch() |
2067 | assert os.path.exists(os.path.join(self.dldir, ud.mirrortarballs[0])) | ||
2068 | 2094 | ||
2069 | # Fetch and unpack with both the clonedir and shallow tarball available | 2095 | # Fetch and unpack with both the clonedir and shallow tarball available |
2070 | bb.utils.remove(self.gitdir, recurse=True) | 2096 | bb.utils.remove(self.gitdir, recurse=True) |
2097 | self.d.setVar('BB_GIT_SHALLOW', '1') | ||
2071 | fetcher, ud = self.fetch_and_unpack() | 2098 | fetcher, ud = self.fetch_and_unpack() |
2072 | 2099 | ||
2073 | # The unpacked tree should *not* be shallow | 2100 | # The unpacked tree should *not* be shallow |
@@ -2175,7 +2202,7 @@ class GitShallowTest(FetcherTest): | |||
2175 | 2202 | ||
2176 | self.fetch_shallow() | 2203 | self.fetch_shallow() |
2177 | 2204 | ||
2178 | self.assertRevCount(5) | 2205 | self.assertRevCount(2) |
2179 | 2206 | ||
2180 | def test_shallow_invalid_revs(self): | 2207 | def test_shallow_invalid_revs(self): |
2181 | self.add_empty_file('a') | 2208 | self.add_empty_file('a') |
@@ -2194,7 +2221,10 @@ class GitShallowTest(FetcherTest): | |||
2194 | self.git('tag v0.0 master', cwd=self.srcdir) | 2221 | self.git('tag v0.0 master', cwd=self.srcdir) |
2195 | self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') | 2222 | self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') |
2196 | self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') | 2223 | self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') |
2197 | self.fetch_shallow() | 2224 | |
2225 | with self.assertRaises(bb.fetch2.FetchError), self.assertLogs("BitBake.Fetcher", level="ERROR") as cm: | ||
2226 | self.fetch_shallow() | ||
2227 | self.assertIn("fatal: no commits selected for shallow requests", cm.output[0]) | ||
2198 | 2228 | ||
2199 | def test_shallow_fetch_missing_revs_fails(self): | 2229 | def test_shallow_fetch_missing_revs_fails(self): |
2200 | self.add_empty_file('a') | 2230 | self.add_empty_file('a') |
@@ -2208,6 +2238,33 @@ class GitShallowTest(FetcherTest): | |||
2208 | self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0]) | 2238 | self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0]) |
2209 | 2239 | ||
2210 | @skipIfNoNetwork() | 2240 | @skipIfNoNetwork() |
2241 | def test_git_shallow_fetch_premirrors(self): | ||
2242 | url = "git://git.openembedded.org/bitbake;branch=master;protocol=https" | ||
2243 | |||
2244 | # Create a separate premirror directory within tempdir | ||
2245 | premirror = os.path.join(self.tempdir, "premirror") | ||
2246 | os.mkdir(premirror) | ||
2247 | |||
2248 | # Fetch a non-shallow clone into the premirror subdir | ||
2249 | self.d.setVar('BB_GIT_SHALLOW', '0') | ||
2250 | self.d.setVar("DL_DIR", premirror) | ||
2251 | fetcher, ud = self.fetch(url) | ||
2252 | |||
2253 | # Fetch a shallow clone from the premirror subdir with unpacking | ||
2254 | # using the original recipe URL and the premirror mapping | ||
2255 | self.d.setVar('BB_GIT_SHALLOW', '1') | ||
2256 | self.d.setVar("DL_DIR", self.dldir) | ||
2257 | self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') | ||
2258 | self.d.setVar('BB_NO_NETWORK', '1') | ||
2259 | self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0') | ||
2260 | self.d.setVar("PREMIRRORS", "git://.*/.* git://{0};protocol=file".format(premirror + "/git2/" + ud.host + ud.path.replace("/", "."))) | ||
2261 | fetcher = self.fetch_and_unpack(url) | ||
2262 | |||
2263 | # Verify that the unpacked sources are shallow clones | ||
2264 | self.assertRevCount(1) | ||
2265 | assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow')) | ||
2266 | |||
2267 | @skipIfNoNetwork() | ||
2211 | def test_bitbake(self): | 2268 | def test_bitbake(self): |
2212 | self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir) | 2269 | self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir) |
2213 | self.git('config core.bare true', cwd=self.srcdir) | 2270 | self.git('config core.bare true', cwd=self.srcdir) |
@@ -2225,7 +2282,7 @@ class GitShallowTest(FetcherTest): | |||
2225 | revs = len(self.git('rev-list master').splitlines()) | 2282 | revs = len(self.git('rev-list master').splitlines()) |
2226 | self.assertNotEqual(orig_revs, revs) | 2283 | self.assertNotEqual(orig_revs, revs) |
2227 | self.assertRefs(['master', 'origin/master']) | 2284 | self.assertRefs(['master', 'origin/master']) |
2228 | self.assertRevCount(orig_revs - 1758) | 2285 | self.assertRevCount(orig_revs - 1760) |
2229 | 2286 | ||
2230 | def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self): | 2287 | def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self): |
2231 | self.add_empty_file('a') | 2288 | self.add_empty_file('a') |
@@ -2239,23 +2296,33 @@ class GitShallowTest(FetcherTest): | |||
2239 | self.assertIn("No up to date source found", context.exception.msg) | 2296 | self.assertIn("No up to date source found", context.exception.msg) |
2240 | self.assertIn("clone directory not available or not up to date", context.exception.msg) | 2297 | self.assertIn("clone directory not available or not up to date", context.exception.msg) |
2241 | 2298 | ||
2242 | @skipIfNoNetwork() | 2299 | def test_shallow_check_is_shallow(self): |
2243 | def test_that_unpack_does_work_when_using_git_shallow_tarball_but_tarball_is_not_available(self): | 2300 | self.add_empty_file('a') |
2244 | self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') | 2301 | self.add_empty_file('b') |
2245 | self.d.setVar('BB_GIT_SHALLOW', '1') | ||
2246 | self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') | ||
2247 | fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d) | ||
2248 | fetcher.download() | ||
2249 | 2302 | ||
2250 | bb.utils.remove(self.dldir + "/*.tar.gz") | 2303 | # Fetch and unpack without the clonedir and *only* shallow tarball available |
2251 | fetcher.unpack(self.unpackdir) | 2304 | bb.utils.remove(self.gitdir, recurse=True) |
2305 | fetcher, ud = self.fetch_and_unpack() | ||
2252 | 2306 | ||
2253 | dir = os.listdir(self.unpackdir + "/git/") | 2307 | # The unpacked tree *should* be shallow |
2254 | self.assertIn("fstests.doap", dir) | 2308 | self.assertRevCount(1) |
2309 | assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow')) | ||
2310 | |||
2311 | def test_shallow_succeeds_with_tag_containing_slash(self): | ||
2312 | self.add_empty_file('a') | ||
2313 | self.add_empty_file('b') | ||
2314 | self.git('tag t1/t2/t3', cwd=self.srcdir) | ||
2315 | self.assertRevCount(2, cwd=self.srcdir) | ||
2316 | |||
2317 | srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip() | ||
2318 | self.d.setVar('SRCREV', srcrev) | ||
2319 | uri = self.d.getVar('SRC_URI').split()[0] | ||
2320 | uri = '%s;tag=t1/t2/t3' % uri | ||
2321 | self.fetch_shallow(uri) | ||
2322 | self.assertRevCount(1) | ||
2255 | 2323 | ||
2256 | class GitLfsTest(FetcherTest): | 2324 | class GitLfsTest(FetcherTest): |
2257 | def skipIfNoGitLFS(): | 2325 | def skipIfNoGitLFS(): |
2258 | import shutil | ||
2259 | if not shutil.which('git-lfs'): | 2326 | if not shutil.which('git-lfs'): |
2260 | return unittest.skip('git-lfs not installed') | 2327 | return unittest.skip('git-lfs not installed') |
2261 | return lambda f: f | 2328 | return lambda f: f |
@@ -2279,12 +2346,18 @@ class GitLfsTest(FetcherTest): | |||
2279 | self.git_init(cwd=self.srcdir) | 2346 | self.git_init(cwd=self.srcdir) |
2280 | self.commit_file('.gitattributes', '*.mp3 filter=lfs -text') | 2347 | self.commit_file('.gitattributes', '*.mp3 filter=lfs -text') |
2281 | 2348 | ||
2282 | def commit_file(self, filename, content): | 2349 | def commit(self, *, cwd=None): |
2283 | with open(os.path.join(self.srcdir, filename), "w") as f: | 2350 | cwd = cwd or self.srcdir |
2351 | self.git(["commit", "-m", "Change"], cwd=cwd) | ||
2352 | return self.git(["rev-parse", "HEAD"], cwd=cwd).strip() | ||
2353 | |||
2354 | def commit_file(self, filename, content, *, cwd=None): | ||
2355 | cwd = cwd or self.srcdir | ||
2356 | |||
2357 | with open(os.path.join(cwd, filename), "w") as f: | ||
2284 | f.write(content) | 2358 | f.write(content) |
2285 | self.git(["add", filename], cwd=self.srcdir) | 2359 | self.git(["add", filename], cwd=cwd) |
2286 | self.git(["commit", "-m", "Change"], cwd=self.srcdir) | 2360 | return self.commit(cwd=cwd) |
2287 | return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip() | ||
2288 | 2361 | ||
2289 | def fetch(self, uri=None, download=True): | 2362 | def fetch(self, uri=None, download=True): |
2290 | uris = self.d.getVar('SRC_URI').split() | 2363 | uris = self.d.getVar('SRC_URI').split() |
@@ -2305,25 +2378,112 @@ class GitLfsTest(FetcherTest): | |||
2305 | return unpacked_lfs_file | 2378 | return unpacked_lfs_file |
2306 | 2379 | ||
2307 | @skipIfNoGitLFS() | 2380 | @skipIfNoGitLFS() |
2381 | def test_gitsm_lfs(self): | ||
2382 | """Test that the gitsm fetcher caches objects stored via LFS""" | ||
2383 | self.git(["lfs", "install", "--local"], cwd=self.srcdir) | ||
2384 | |||
2385 | def fetch_and_verify(revision, filename, content): | ||
2386 | self.d.setVar('SRCREV', revision) | ||
2387 | fetcher, ud = self.fetch() | ||
2388 | |||
2389 | with hide_directory(submoduledir), hide_directory(self.srcdir): | ||
2390 | workdir = self.d.getVar('WORKDIR') | ||
2391 | fetcher.unpack(workdir) | ||
2392 | |||
2393 | with open(os.path.join(workdir, "git", filename)) as f: | ||
2394 | self.assertEqual(f.read(), content) | ||
2395 | |||
2396 | # Create the git repository that will later be used as a submodule | ||
2397 | submoduledir = self.tempdir + "/submodule" | ||
2398 | bb.utils.mkdirhier(submoduledir) | ||
2399 | self.git_init(submoduledir) | ||
2400 | self.git(["lfs", "install", "--local"], cwd=submoduledir) | ||
2401 | self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir) | ||
2402 | |||
2403 | submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir) | ||
2404 | _ = self.commit_file("a.mp3", "submodule version 2", cwd=submoduledir) | ||
2405 | |||
2406 | # Add the submodule to the repository at its current HEAD revision | ||
2407 | self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"], | ||
2408 | cwd=self.srcdir) | ||
2409 | base_commit_1 = self.commit() | ||
2410 | |||
2411 | # Let the submodule point at a different revision | ||
2412 | self.git(["checkout", submodule_commit_1], self.srcdir + "/submodule") | ||
2413 | self.git(["add", "submodule"], cwd=self.srcdir) | ||
2414 | base_commit_2 = self.commit() | ||
2415 | |||
2416 | # Add a LFS file to the repository | ||
2417 | base_commit_3 = self.commit_file("a.mp3", "version 1") | ||
2418 | # Update the added LFS file | ||
2419 | base_commit_4 = self.commit_file("a.mp3", "version 2") | ||
2420 | |||
2421 | self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master" % self.srcdir) | ||
2422 | |||
2423 | # Verify that LFS objects referenced from submodules are fetched and checked out | ||
2424 | fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 2") | ||
2425 | # Verify that the repository inside the download cache of a submodile is extended with any | ||
2426 | # additional LFS objects needed when checking out a different revision. | ||
2427 | fetch_and_verify(base_commit_2, "submodule/a.mp3", "submodule version 1") | ||
2428 | # Verify that LFS objects referenced from the base repository are fetched and checked out | ||
2429 | fetch_and_verify(base_commit_3, "a.mp3", "version 1") | ||
2430 | # Verify that the cached repository is extended with any additional LFS objects required | ||
2431 | # when checking out a different revision. | ||
2432 | fetch_and_verify(base_commit_4, "a.mp3", "version 2") | ||
2433 | |||
2434 | @skipIfNoGitLFS() | ||
2435 | def test_gitsm_lfs_disabled(self): | ||
2436 | """Test that the gitsm fetcher does not use LFS when explicitly disabled""" | ||
2437 | self.git(["lfs", "install", "--local"], cwd=self.srcdir) | ||
2438 | |||
2439 | def fetch_and_verify(revision, filename, content): | ||
2440 | self.d.setVar('SRCREV', revision) | ||
2441 | fetcher, ud = self.fetch() | ||
2442 | |||
2443 | with hide_directory(submoduledir), hide_directory(self.srcdir): | ||
2444 | workdir = self.d.getVar('WORKDIR') | ||
2445 | fetcher.unpack(workdir) | ||
2446 | |||
2447 | with open(os.path.join(workdir, "git", filename)) as f: | ||
2448 | # Assume that LFS did not perform smudging when the expected content is | ||
2449 | # missing. | ||
2450 | self.assertNotEqual(f.read(), content) | ||
2451 | |||
2452 | # Create the git repository that will later be used as a submodule | ||
2453 | submoduledir = self.tempdir + "/submodule" | ||
2454 | bb.utils.mkdirhier(submoduledir) | ||
2455 | self.git_init(submoduledir) | ||
2456 | self.git(["lfs", "install", "--local"], cwd=submoduledir) | ||
2457 | self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir) | ||
2458 | |||
2459 | submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir) | ||
2460 | |||
2461 | # Add the submodule to the repository at its current HEAD revision | ||
2462 | self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"], | ||
2463 | cwd=self.srcdir) | ||
2464 | base_commit_1 = self.commit() | ||
2465 | |||
2466 | # Add a LFS file to the repository | ||
2467 | base_commit_2 = self.commit_file("a.mp3", "version 1") | ||
2468 | |||
2469 | self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master;lfs=0" % self.srcdir) | ||
2470 | |||
2471 | # Verify that LFS objects referenced from submodules are not fetched nor checked out | ||
2472 | fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 1") | ||
2473 | # Verify that the LFS objects referenced from the base repository are not fetched nor | ||
2474 | # checked out | ||
2475 | fetch_and_verify(base_commit_2, "a.mp3", "version 1") | ||
2476 | |||
2477 | @skipIfNoGitLFS() | ||
2308 | def test_fetch_lfs_on_srcrev_change(self): | 2478 | def test_fetch_lfs_on_srcrev_change(self): |
2309 | """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested""" | 2479 | """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested""" |
2310 | self.git(["lfs", "install", "--local"], cwd=self.srcdir) | 2480 | self.git(["lfs", "install", "--local"], cwd=self.srcdir) |
2311 | 2481 | ||
2312 | @contextlib.contextmanager | ||
2313 | def hide_upstream_repository(): | ||
2314 | """Hide the upstream repository to make sure that git lfs cannot pull from it""" | ||
2315 | temp_name = self.srcdir + ".bak" | ||
2316 | os.rename(self.srcdir, temp_name) | ||
2317 | try: | ||
2318 | yield | ||
2319 | finally: | ||
2320 | os.rename(temp_name, self.srcdir) | ||
2321 | |||
2322 | def fetch_and_verify(revision, filename, content): | 2482 | def fetch_and_verify(revision, filename, content): |
2323 | self.d.setVar('SRCREV', revision) | 2483 | self.d.setVar('SRCREV', revision) |
2324 | fetcher, ud = self.fetch() | 2484 | fetcher, ud = self.fetch() |
2325 | 2485 | ||
2326 | with hide_upstream_repository(): | 2486 | with hide_directory(self.srcdir): |
2327 | workdir = self.d.getVar('WORKDIR') | 2487 | workdir = self.d.getVar('WORKDIR') |
2328 | fetcher.unpack(workdir) | 2488 | fetcher.unpack(workdir) |
2329 | 2489 | ||
@@ -2375,8 +2535,6 @@ class GitLfsTest(FetcherTest): | |||
2375 | 2535 | ||
2376 | @skipIfNoGitLFS() | 2536 | @skipIfNoGitLFS() |
2377 | def test_lfs_enabled(self): | 2537 | def test_lfs_enabled(self): |
2378 | import shutil | ||
2379 | |||
2380 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir | 2538 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir |
2381 | self.d.setVar('SRC_URI', uri) | 2539 | self.d.setVar('SRC_URI', uri) |
2382 | 2540 | ||
@@ -2387,8 +2545,6 @@ class GitLfsTest(FetcherTest): | |||
2387 | 2545 | ||
2388 | @skipIfNoGitLFS() | 2546 | @skipIfNoGitLFS() |
2389 | def test_lfs_disabled(self): | 2547 | def test_lfs_disabled(self): |
2390 | import shutil | ||
2391 | |||
2392 | uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir | 2548 | uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir |
2393 | self.d.setVar('SRC_URI', uri) | 2549 | self.d.setVar('SRC_URI', uri) |
2394 | 2550 | ||
@@ -2397,58 +2553,76 @@ class GitLfsTest(FetcherTest): | |||
2397 | fetcher, ud = self.fetch() | 2553 | fetcher, ud = self.fetch() |
2398 | fetcher.unpack(self.d.getVar('WORKDIR')) | 2554 | fetcher.unpack(self.d.getVar('WORKDIR')) |
2399 | 2555 | ||
2400 | def test_lfs_enabled_not_installed(self): | 2556 | @skipIfNoGitLFS() |
2401 | import shutil | 2557 | def test_lfs_enabled_not_installed_during_unpack(self): |
2558 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir | ||
2559 | self.d.setVar('SRC_URI', uri) | ||
2560 | |||
2561 | # Careful: suppress initial attempt at downloading | ||
2562 | fetcher, ud = self.fetch(uri=None, download=False) | ||
2563 | |||
2564 | fetcher.download() | ||
2565 | # If git-lfs cannot be found, the unpack should throw an error | ||
2566 | with self.assertRaises(bb.fetch2.FetchError): | ||
2567 | with unittest.mock.patch("shutil.which", return_value=None): | ||
2568 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2569 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2402 | 2570 | ||
2571 | def test_lfs_enabled_not_installed(self): | ||
2403 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir | 2572 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir |
2404 | self.d.setVar('SRC_URI', uri) | 2573 | self.d.setVar('SRC_URI', uri) |
2405 | 2574 | ||
2406 | # Careful: suppress initial attempt at downloading | 2575 | # Careful: suppress initial attempt at downloading |
2407 | fetcher, ud = self.fetch(uri=None, download=False) | 2576 | fetcher, ud = self.fetch(uri=None, download=False) |
2408 | 2577 | ||
2409 | # Artificially assert that git-lfs is not installed, so | 2578 | # If git-lfs cannot be found, the download should throw an error |
2410 | # we can verify a failure to unpack in it's absence. | 2579 | with unittest.mock.patch("shutil.which", return_value=None): |
2411 | old_find_git_lfs = ud.method._find_git_lfs | ||
2412 | try: | ||
2413 | # If git-lfs cannot be found, the unpack should throw an error | ||
2414 | with self.assertRaises(bb.fetch2.FetchError): | 2580 | with self.assertRaises(bb.fetch2.FetchError): |
2415 | fetcher.download() | 2581 | fetcher.download() |
2416 | ud.method._find_git_lfs = lambda d: False | ||
2417 | shutil.rmtree(self.gitdir, ignore_errors=True) | ||
2418 | fetcher.unpack(self.d.getVar('WORKDIR')) | ||
2419 | finally: | ||
2420 | ud.method._find_git_lfs = old_find_git_lfs | ||
2421 | 2582 | ||
2422 | def test_lfs_disabled_not_installed(self): | 2583 | def test_lfs_disabled_not_installed(self): |
2423 | import shutil | ||
2424 | |||
2425 | uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir | 2584 | uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir |
2426 | self.d.setVar('SRC_URI', uri) | 2585 | self.d.setVar('SRC_URI', uri) |
2427 | 2586 | ||
2428 | # Careful: suppress initial attempt at downloading | 2587 | # Careful: suppress initial attempt at downloading |
2429 | fetcher, ud = self.fetch(uri=None, download=False) | 2588 | fetcher, ud = self.fetch(uri=None, download=False) |
2430 | 2589 | ||
2431 | # Artificially assert that git-lfs is not installed, so | 2590 | # Even if git-lfs cannot be found, the download / unpack should be successful |
2432 | # we can verify a failure to unpack in it's absence. | 2591 | with unittest.mock.patch("shutil.which", return_value=None): |
2433 | old_find_git_lfs = ud.method._find_git_lfs | 2592 | fetcher.download() |
2434 | try: | 2593 | shutil.rmtree(self.gitdir, ignore_errors=True) |
2435 | # Even if git-lfs cannot be found, the unpack should be successful | 2594 | fetcher.unpack(self.d.getVar('WORKDIR')) |
2595 | |||
2596 | def test_lfs_enabled_not_installed_but_not_needed(self): | ||
2597 | srcdir = os.path.join(self.tempdir, "emptygit") | ||
2598 | bb.utils.mkdirhier(srcdir) | ||
2599 | self.git_init(srcdir) | ||
2600 | self.commit_file("test", "test content", cwd=srcdir) | ||
2601 | |||
2602 | uri = 'git://%s;protocol=file;lfs=1;branch=master' % srcdir | ||
2603 | self.d.setVar('SRC_URI', uri) | ||
2604 | |||
2605 | # Careful: suppress initial attempt at downloading | ||
2606 | fetcher, ud = self.fetch(uri=None, download=False) | ||
2607 | |||
2608 | # It shouldnt't matter that git-lfs cannot be found as the repository configuration does not | ||
2609 | # specify any LFS filters. | ||
2610 | with unittest.mock.patch("shutil.which", return_value=None): | ||
2436 | fetcher.download() | 2611 | fetcher.download() |
2437 | ud.method._find_git_lfs = lambda d: False | ||
2438 | shutil.rmtree(self.gitdir, ignore_errors=True) | 2612 | shutil.rmtree(self.gitdir, ignore_errors=True) |
2439 | fetcher.unpack(self.d.getVar('WORKDIR')) | 2613 | fetcher.unpack(self.d.getVar('WORKDIR')) |
2440 | finally: | ||
2441 | ud.method._find_git_lfs = old_find_git_lfs | ||
2442 | 2614 | ||
2443 | class GitURLWithSpacesTest(FetcherTest): | 2615 | class GitURLWithSpacesTest(FetcherTest): |
2444 | test_git_urls = { | 2616 | test_git_urls = { |
2445 | "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : { | 2617 | "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : { |
2446 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master', | 2618 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master', |
2619 | 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example.git', | ||
2447 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', | 2620 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', |
2448 | 'path': '/tfs/example path/example.git' | 2621 | 'path': '/tfs/example path/example.git' |
2449 | }, | 2622 | }, |
2450 | "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : { | 2623 | "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : { |
2451 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master', | 2624 | 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master', |
2625 | 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git', | ||
2452 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', | 2626 | 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', |
2453 | 'path': '/tfs/example path/example repo.git' | 2627 | 'path': '/tfs/example path/example repo.git' |
2454 | } | 2628 | } |
@@ -2471,6 +2645,7 @@ class GitURLWithSpacesTest(FetcherTest): | |||
2471 | self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock')) | 2645 | self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock')) |
2472 | self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) | 2646 | self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) |
2473 | self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) | 2647 | self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) |
2648 | self.assertEqual(ud.method._get_repo_url(ud), ref['repo_url']) | ||
2474 | 2649 | ||
2475 | class CrateTest(FetcherTest): | 2650 | class CrateTest(FetcherTest): |
2476 | @skipIfNoNetwork() | 2651 | @skipIfNoNetwork() |
@@ -2592,7 +2767,6 @@ class CrateTest(FetcherTest): | |||
2592 | 2767 | ||
2593 | class NPMTest(FetcherTest): | 2768 | class NPMTest(FetcherTest): |
2594 | def skipIfNoNpm(): | 2769 | def skipIfNoNpm(): |
2595 | import shutil | ||
2596 | if not shutil.which('npm'): | 2770 | if not shutil.which('npm'): |
2597 | return unittest.skip('npm not installed') | 2771 | return unittest.skip('npm not installed') |
2598 | return lambda f: f | 2772 | return lambda f: f |
@@ -2600,8 +2774,8 @@ class NPMTest(FetcherTest): | |||
2600 | @skipIfNoNpm() | 2774 | @skipIfNoNpm() |
2601 | @skipIfNoNetwork() | 2775 | @skipIfNoNetwork() |
2602 | def test_npm(self): | 2776 | def test_npm(self): |
2603 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2777 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2604 | fetcher = bb.fetch.Fetch([url], self.d) | 2778 | fetcher = bb.fetch.Fetch(urls, self.d) |
2605 | ud = fetcher.ud[fetcher.urls[0]] | 2779 | ud = fetcher.ud[fetcher.urls[0]] |
2606 | fetcher.download() | 2780 | fetcher.download() |
2607 | self.assertTrue(os.path.exists(ud.localpath)) | 2781 | self.assertTrue(os.path.exists(ud.localpath)) |
@@ -2614,9 +2788,9 @@ class NPMTest(FetcherTest): | |||
2614 | @skipIfNoNpm() | 2788 | @skipIfNoNpm() |
2615 | @skipIfNoNetwork() | 2789 | @skipIfNoNetwork() |
2616 | def test_npm_bad_checksum(self): | 2790 | def test_npm_bad_checksum(self): |
2617 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2791 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2618 | # Fetch once to get a tarball | 2792 | # Fetch once to get a tarball |
2619 | fetcher = bb.fetch.Fetch([url], self.d) | 2793 | fetcher = bb.fetch.Fetch(urls, self.d) |
2620 | ud = fetcher.ud[fetcher.urls[0]] | 2794 | ud = fetcher.ud[fetcher.urls[0]] |
2621 | fetcher.download() | 2795 | fetcher.download() |
2622 | self.assertTrue(os.path.exists(ud.localpath)) | 2796 | self.assertTrue(os.path.exists(ud.localpath)) |
@@ -2633,9 +2807,9 @@ class NPMTest(FetcherTest): | |||
2633 | @skipIfNoNpm() | 2807 | @skipIfNoNpm() |
2634 | @skipIfNoNetwork() | 2808 | @skipIfNoNetwork() |
2635 | def test_npm_premirrors(self): | 2809 | def test_npm_premirrors(self): |
2636 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2810 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2637 | # Fetch once to get a tarball | 2811 | # Fetch once to get a tarball |
2638 | fetcher = bb.fetch.Fetch([url], self.d) | 2812 | fetcher = bb.fetch.Fetch(urls, self.d) |
2639 | ud = fetcher.ud[fetcher.urls[0]] | 2813 | ud = fetcher.ud[fetcher.urls[0]] |
2640 | fetcher.download() | 2814 | fetcher.download() |
2641 | self.assertTrue(os.path.exists(ud.localpath)) | 2815 | self.assertTrue(os.path.exists(ud.localpath)) |
@@ -2655,7 +2829,7 @@ class NPMTest(FetcherTest): | |||
2655 | # while the fetcher object exists, which it does when we rename the | 2829 | # while the fetcher object exists, which it does when we rename the |
2656 | # download directory to "mirror" above. Thus we need a new fetcher to go | 2830 | # download directory to "mirror" above. Thus we need a new fetcher to go |
2657 | # with the now empty download directory. | 2831 | # with the now empty download directory. |
2658 | fetcher = bb.fetch.Fetch([url], self.d) | 2832 | fetcher = bb.fetch.Fetch(urls, self.d) |
2659 | ud = fetcher.ud[fetcher.urls[0]] | 2833 | ud = fetcher.ud[fetcher.urls[0]] |
2660 | fetcher.download() | 2834 | fetcher.download() |
2661 | self.assertTrue(os.path.exists(ud.localpath)) | 2835 | self.assertTrue(os.path.exists(ud.localpath)) |
@@ -2663,9 +2837,9 @@ class NPMTest(FetcherTest): | |||
2663 | @skipIfNoNpm() | 2837 | @skipIfNoNpm() |
2664 | @skipIfNoNetwork() | 2838 | @skipIfNoNetwork() |
2665 | def test_npm_premirrors_with_specified_filename(self): | 2839 | def test_npm_premirrors_with_specified_filename(self): |
2666 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2840 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2667 | # Fetch once to get a tarball | 2841 | # Fetch once to get a tarball |
2668 | fetcher = bb.fetch.Fetch([url], self.d) | 2842 | fetcher = bb.fetch.Fetch(urls, self.d) |
2669 | ud = fetcher.ud[fetcher.urls[0]] | 2843 | ud = fetcher.ud[fetcher.urls[0]] |
2670 | fetcher.download() | 2844 | fetcher.download() |
2671 | self.assertTrue(os.path.exists(ud.localpath)) | 2845 | self.assertTrue(os.path.exists(ud.localpath)) |
@@ -2685,8 +2859,8 @@ class NPMTest(FetcherTest): | |||
2685 | @skipIfNoNetwork() | 2859 | @skipIfNoNetwork() |
2686 | def test_npm_mirrors(self): | 2860 | def test_npm_mirrors(self): |
2687 | # Fetch once to get a tarball | 2861 | # Fetch once to get a tarball |
2688 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2862 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2689 | fetcher = bb.fetch.Fetch([url], self.d) | 2863 | fetcher = bb.fetch.Fetch(urls, self.d) |
2690 | ud = fetcher.ud[fetcher.urls[0]] | 2864 | ud = fetcher.ud[fetcher.urls[0]] |
2691 | fetcher.download() | 2865 | fetcher.download() |
2692 | self.assertTrue(os.path.exists(ud.localpath)) | 2866 | self.assertTrue(os.path.exists(ud.localpath)) |
@@ -2710,8 +2884,8 @@ class NPMTest(FetcherTest): | |||
2710 | @skipIfNoNpm() | 2884 | @skipIfNoNpm() |
2711 | @skipIfNoNetwork() | 2885 | @skipIfNoNetwork() |
2712 | def test_npm_destsuffix_downloadfilename(self): | 2886 | def test_npm_destsuffix_downloadfilename(self): |
2713 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' | 2887 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz'] |
2714 | fetcher = bb.fetch.Fetch([url], self.d) | 2888 | fetcher = bb.fetch.Fetch(urls, self.d) |
2715 | fetcher.download() | 2889 | fetcher.download() |
2716 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz'))) | 2890 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz'))) |
2717 | fetcher.unpack(self.unpackdir) | 2891 | fetcher.unpack(self.unpackdir) |
@@ -2719,18 +2893,18 @@ class NPMTest(FetcherTest): | |||
2719 | self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) | 2893 | self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) |
2720 | 2894 | ||
2721 | def test_npm_no_network_no_tarball(self): | 2895 | def test_npm_no_network_no_tarball(self): |
2722 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2896 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2723 | self.d.setVar('BB_NO_NETWORK', '1') | 2897 | self.d.setVar('BB_NO_NETWORK', '1') |
2724 | fetcher = bb.fetch.Fetch([url], self.d) | 2898 | fetcher = bb.fetch.Fetch(urls, self.d) |
2725 | with self.assertRaises(bb.fetch2.NetworkAccess): | 2899 | with self.assertRaises(bb.fetch2.NetworkAccess): |
2726 | fetcher.download() | 2900 | fetcher.download() |
2727 | 2901 | ||
2728 | @skipIfNoNpm() | 2902 | @skipIfNoNpm() |
2729 | @skipIfNoNetwork() | 2903 | @skipIfNoNetwork() |
2730 | def test_npm_no_network_with_tarball(self): | 2904 | def test_npm_no_network_with_tarball(self): |
2731 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2905 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2732 | # Fetch once to get a tarball | 2906 | # Fetch once to get a tarball |
2733 | fetcher = bb.fetch.Fetch([url], self.d) | 2907 | fetcher = bb.fetch.Fetch(urls, self.d) |
2734 | fetcher.download() | 2908 | fetcher.download() |
2735 | # Disable network access | 2909 | # Disable network access |
2736 | self.d.setVar('BB_NO_NETWORK', '1') | 2910 | self.d.setVar('BB_NO_NETWORK', '1') |
@@ -2743,8 +2917,8 @@ class NPMTest(FetcherTest): | |||
2743 | @skipIfNoNpm() | 2917 | @skipIfNoNpm() |
2744 | @skipIfNoNetwork() | 2918 | @skipIfNoNetwork() |
2745 | def test_npm_registry_alternate(self): | 2919 | def test_npm_registry_alternate(self): |
2746 | url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2920 | urls = ['npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2747 | fetcher = bb.fetch.Fetch([url], self.d) | 2921 | fetcher = bb.fetch.Fetch(urls, self.d) |
2748 | fetcher.download() | 2922 | fetcher.download() |
2749 | fetcher.unpack(self.unpackdir) | 2923 | fetcher.unpack(self.unpackdir) |
2750 | unpackdir = os.path.join(self.unpackdir, 'npm') | 2924 | unpackdir = os.path.join(self.unpackdir, 'npm') |
@@ -2753,8 +2927,8 @@ class NPMTest(FetcherTest): | |||
2753 | @skipIfNoNpm() | 2927 | @skipIfNoNpm() |
2754 | @skipIfNoNetwork() | 2928 | @skipIfNoNetwork() |
2755 | def test_npm_version_latest(self): | 2929 | def test_npm_version_latest(self): |
2756 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest' | 2930 | url = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest'] |
2757 | fetcher = bb.fetch.Fetch([url], self.d) | 2931 | fetcher = bb.fetch.Fetch(url, self.d) |
2758 | fetcher.download() | 2932 | fetcher.download() |
2759 | fetcher.unpack(self.unpackdir) | 2933 | fetcher.unpack(self.unpackdir) |
2760 | unpackdir = os.path.join(self.unpackdir, 'npm') | 2934 | unpackdir = os.path.join(self.unpackdir, 'npm') |
@@ -2763,46 +2937,46 @@ class NPMTest(FetcherTest): | |||
2763 | @skipIfNoNpm() | 2937 | @skipIfNoNpm() |
2764 | @skipIfNoNetwork() | 2938 | @skipIfNoNetwork() |
2765 | def test_npm_registry_invalid(self): | 2939 | def test_npm_registry_invalid(self): |
2766 | url = 'npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2940 | urls = ['npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2767 | fetcher = bb.fetch.Fetch([url], self.d) | 2941 | fetcher = bb.fetch.Fetch(urls, self.d) |
2768 | with self.assertRaises(bb.fetch2.FetchError): | 2942 | with self.assertRaises(bb.fetch2.FetchError): |
2769 | fetcher.download() | 2943 | fetcher.download() |
2770 | 2944 | ||
2771 | @skipIfNoNpm() | 2945 | @skipIfNoNpm() |
2772 | @skipIfNoNetwork() | 2946 | @skipIfNoNetwork() |
2773 | def test_npm_package_invalid(self): | 2947 | def test_npm_package_invalid(self): |
2774 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0' | 2948 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0'] |
2775 | fetcher = bb.fetch.Fetch([url], self.d) | 2949 | fetcher = bb.fetch.Fetch(urls, self.d) |
2776 | with self.assertRaises(bb.fetch2.FetchError): | 2950 | with self.assertRaises(bb.fetch2.FetchError): |
2777 | fetcher.download() | 2951 | fetcher.download() |
2778 | 2952 | ||
2779 | @skipIfNoNpm() | 2953 | @skipIfNoNpm() |
2780 | @skipIfNoNetwork() | 2954 | @skipIfNoNetwork() |
2781 | def test_npm_version_invalid(self): | 2955 | def test_npm_version_invalid(self): |
2782 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid' | 2956 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid'] |
2783 | with self.assertRaises(bb.fetch2.ParameterError): | 2957 | with self.assertRaises(bb.fetch2.ParameterError): |
2784 | fetcher = bb.fetch.Fetch([url], self.d) | 2958 | fetcher = bb.fetch.Fetch(urls, self.d) |
2785 | 2959 | ||
2786 | @skipIfNoNpm() | 2960 | @skipIfNoNpm() |
2787 | @skipIfNoNetwork() | 2961 | @skipIfNoNetwork() |
2788 | def test_npm_registry_none(self): | 2962 | def test_npm_registry_none(self): |
2789 | url = 'npm://;package=@savoirfairelinux/node-server-example;version=1.0.0' | 2963 | urls = ['npm://;package=@savoirfairelinux/node-server-example;version=1.0.0'] |
2790 | with self.assertRaises(bb.fetch2.MalformedUrl): | 2964 | with self.assertRaises(bb.fetch2.MalformedUrl): |
2791 | fetcher = bb.fetch.Fetch([url], self.d) | 2965 | fetcher = bb.fetch.Fetch(urls, self.d) |
2792 | 2966 | ||
2793 | @skipIfNoNpm() | 2967 | @skipIfNoNpm() |
2794 | @skipIfNoNetwork() | 2968 | @skipIfNoNetwork() |
2795 | def test_npm_package_none(self): | 2969 | def test_npm_package_none(self): |
2796 | url = 'npm://registry.npmjs.org;version=1.0.0' | 2970 | urls = ['npm://registry.npmjs.org;version=1.0.0'] |
2797 | with self.assertRaises(bb.fetch2.MissingParameterError): | 2971 | with self.assertRaises(bb.fetch2.MissingParameterError): |
2798 | fetcher = bb.fetch.Fetch([url], self.d) | 2972 | fetcher = bb.fetch.Fetch(urls, self.d) |
2799 | 2973 | ||
2800 | @skipIfNoNpm() | 2974 | @skipIfNoNpm() |
2801 | @skipIfNoNetwork() | 2975 | @skipIfNoNetwork() |
2802 | def test_npm_version_none(self): | 2976 | def test_npm_version_none(self): |
2803 | url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example' | 2977 | urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example'] |
2804 | with self.assertRaises(bb.fetch2.MissingParameterError): | 2978 | with self.assertRaises(bb.fetch2.MissingParameterError): |
2805 | fetcher = bb.fetch.Fetch([url], self.d) | 2979 | fetcher = bb.fetch.Fetch(urls, self.d) |
2806 | 2980 | ||
2807 | def create_shrinkwrap_file(self, data): | 2981 | def create_shrinkwrap_file(self, data): |
2808 | import json | 2982 | import json |
@@ -2811,32 +2985,30 @@ class NPMTest(FetcherTest): | |||
2811 | bb.utils.mkdirhier(datadir) | 2985 | bb.utils.mkdirhier(datadir) |
2812 | with open(swfile, 'w') as f: | 2986 | with open(swfile, 'w') as f: |
2813 | json.dump(data, f) | 2987 | json.dump(data, f) |
2814 | # Also configure the S directory | ||
2815 | self.sdir = os.path.join(self.unpackdir, 'S') | ||
2816 | self.d.setVar('S', self.sdir) | ||
2817 | return swfile | 2988 | return swfile |
2818 | 2989 | ||
2819 | @skipIfNoNpm() | ||
2820 | @skipIfNoNetwork() | 2990 | @skipIfNoNetwork() |
2821 | def test_npmsw(self): | 2991 | def test_npmsw(self): |
2822 | swfile = self.create_shrinkwrap_file({ | 2992 | swfile = self.create_shrinkwrap_file({ |
2823 | 'dependencies': { | 2993 | 'packages': { |
2824 | 'array-flatten': { | 2994 | 'node_modules/array-flatten': { |
2825 | 'version': '1.1.1', | 2995 | 'version': '1.1.1', |
2826 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 2996 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
2827 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=', | 2997 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=', |
2828 | 'dependencies': { | 2998 | 'dependencies': { |
2829 | 'content-type': { | 2999 | 'content-type': "1.0.4" |
2830 | 'version': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', | 3000 | } |
2831 | 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', | 3001 | }, |
2832 | 'dependencies': { | 3002 | 'node_modules/array-flatten/node_modules/content-type': { |
2833 | 'cookie': { | 3003 | 'version': '1.0.4', |
2834 | 'version': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', | 3004 | 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', |
2835 | 'from': 'git+https://github.com/jshttp/cookie.git' | 3005 | 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', |
2836 | } | 3006 | 'dependencies': { |
2837 | } | 3007 | 'cookie': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09' |
2838 | } | ||
2839 | } | 3008 | } |
3009 | }, | ||
3010 | 'node_modules/array-flatten/node_modules/content-type/node_modules/cookie': { | ||
3011 | 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09' | ||
2840 | } | 3012 | } |
2841 | } | 3013 | } |
2842 | }) | 3014 | }) |
@@ -2846,31 +3018,17 @@ class NPMTest(FetcherTest): | |||
2846 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) | 3018 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) |
2847 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) | 3019 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) |
2848 | fetcher.unpack(self.unpackdir) | 3020 | fetcher.unpack(self.unpackdir) |
2849 | self.assertTrue(os.path.exists(os.path.join(self.sdir, 'npm-shrinkwrap.json'))) | 3021 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm-shrinkwrap.json'))) |
2850 | self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) | 3022 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json'))) |
2851 | self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json'))) | 3023 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json'))) |
2852 | self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json'))) | 3024 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json'))) |
2853 | 3025 | ||
2854 | @skipIfNoNpm() | ||
2855 | @skipIfNoNetwork() | 3026 | @skipIfNoNetwork() |
2856 | def test_npmsw_git(self): | 3027 | def test_npmsw_git(self): |
2857 | swfile = self.create_shrinkwrap_file({ | 3028 | swfile = self.create_shrinkwrap_file({ |
2858 | 'dependencies': { | 3029 | 'packages': { |
2859 | 'cookie': { | 3030 | 'node_modules/cookie': { |
2860 | 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', | 3031 | 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09' |
2861 | 'from': 'github:jshttp/cookie.git' | ||
2862 | } | ||
2863 | } | ||
2864 | }) | ||
2865 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | ||
2866 | fetcher.download() | ||
2867 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) | ||
2868 | |||
2869 | swfile = self.create_shrinkwrap_file({ | ||
2870 | 'dependencies': { | ||
2871 | 'cookie': { | ||
2872 | 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', | ||
2873 | 'from': 'jshttp/cookie.git' | ||
2874 | } | 3032 | } |
2875 | } | 3033 | } |
2876 | }) | 3034 | }) |
@@ -2878,29 +3036,16 @@ class NPMTest(FetcherTest): | |||
2878 | fetcher.download() | 3036 | fetcher.download() |
2879 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) | 3037 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) |
2880 | 3038 | ||
2881 | swfile = self.create_shrinkwrap_file({ | ||
2882 | 'dependencies': { | ||
2883 | 'nodejs': { | ||
2884 | 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262', | ||
2885 | 'from': 'gitlab:gitlab-examples/nodejs' | ||
2886 | } | ||
2887 | } | ||
2888 | }) | ||
2889 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | ||
2890 | fetcher.download() | ||
2891 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git'))) | ||
2892 | |||
2893 | @skipIfNoNpm() | ||
2894 | @skipIfNoNetwork() | 3039 | @skipIfNoNetwork() |
2895 | def test_npmsw_dev(self): | 3040 | def test_npmsw_dev(self): |
2896 | swfile = self.create_shrinkwrap_file({ | 3041 | swfile = self.create_shrinkwrap_file({ |
2897 | 'dependencies': { | 3042 | 'packages': { |
2898 | 'array-flatten': { | 3043 | 'node_modules/array-flatten': { |
2899 | 'version': '1.1.1', | 3044 | 'version': '1.1.1', |
2900 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3045 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
2901 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3046 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
2902 | }, | 3047 | }, |
2903 | 'content-type': { | 3048 | 'node_modules/content-type': { |
2904 | 'version': '1.0.4', | 3049 | 'version': '1.0.4', |
2905 | 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', | 3050 | 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', |
2906 | 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', | 3051 | 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', |
@@ -2919,12 +3064,11 @@ class NPMTest(FetcherTest): | |||
2919 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) | 3064 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) |
2920 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) | 3065 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) |
2921 | 3066 | ||
2922 | @skipIfNoNpm() | ||
2923 | @skipIfNoNetwork() | 3067 | @skipIfNoNetwork() |
2924 | def test_npmsw_destsuffix(self): | 3068 | def test_npmsw_destsuffix(self): |
2925 | swfile = self.create_shrinkwrap_file({ | 3069 | swfile = self.create_shrinkwrap_file({ |
2926 | 'dependencies': { | 3070 | 'packages': { |
2927 | 'array-flatten': { | 3071 | 'node_modules/array-flatten': { |
2928 | 'version': '1.1.1', | 3072 | 'version': '1.1.1', |
2929 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3073 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
2930 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3074 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -2938,8 +3082,8 @@ class NPMTest(FetcherTest): | |||
2938 | 3082 | ||
2939 | def test_npmsw_no_network_no_tarball(self): | 3083 | def test_npmsw_no_network_no_tarball(self): |
2940 | swfile = self.create_shrinkwrap_file({ | 3084 | swfile = self.create_shrinkwrap_file({ |
2941 | 'dependencies': { | 3085 | 'packages': { |
2942 | 'array-flatten': { | 3086 | 'node_modules/array-flatten': { |
2943 | 'version': '1.1.1', | 3087 | 'version': '1.1.1', |
2944 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3088 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
2945 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3089 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -2961,8 +3105,8 @@ class NPMTest(FetcherTest): | |||
2961 | self.d.setVar('BB_NO_NETWORK', '1') | 3105 | self.d.setVar('BB_NO_NETWORK', '1') |
2962 | # Fetch again | 3106 | # Fetch again |
2963 | swfile = self.create_shrinkwrap_file({ | 3107 | swfile = self.create_shrinkwrap_file({ |
2964 | 'dependencies': { | 3108 | 'packages': { |
2965 | 'array-flatten': { | 3109 | 'node_modules/array-flatten': { |
2966 | 'version': '1.1.1', | 3110 | 'version': '1.1.1', |
2967 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3111 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
2968 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3112 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -2972,15 +3116,14 @@ class NPMTest(FetcherTest): | |||
2972 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | 3116 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) |
2973 | fetcher.download() | 3117 | fetcher.download() |
2974 | fetcher.unpack(self.unpackdir) | 3118 | fetcher.unpack(self.unpackdir) |
2975 | self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) | 3119 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json'))) |
2976 | 3120 | ||
2977 | @skipIfNoNpm() | ||
2978 | @skipIfNoNetwork() | 3121 | @skipIfNoNetwork() |
2979 | def test_npmsw_npm_reusability(self): | 3122 | def test_npmsw_npm_reusability(self): |
2980 | # Fetch once with npmsw | 3123 | # Fetch once with npmsw |
2981 | swfile = self.create_shrinkwrap_file({ | 3124 | swfile = self.create_shrinkwrap_file({ |
2982 | 'dependencies': { | 3125 | 'packages': { |
2983 | 'array-flatten': { | 3126 | 'node_modules/array-flatten': { |
2984 | 'version': '1.1.1', | 3127 | 'version': '1.1.1', |
2985 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3128 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
2986 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3129 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -2997,13 +3140,12 @@ class NPMTest(FetcherTest): | |||
2997 | fetcher.unpack(self.unpackdir) | 3140 | fetcher.unpack(self.unpackdir) |
2998 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json'))) | 3141 | self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json'))) |
2999 | 3142 | ||
3000 | @skipIfNoNpm() | ||
3001 | @skipIfNoNetwork() | 3143 | @skipIfNoNetwork() |
3002 | def test_npmsw_bad_checksum(self): | 3144 | def test_npmsw_bad_checksum(self): |
3003 | # Try to fetch with bad checksum | 3145 | # Try to fetch with bad checksum |
3004 | swfile = self.create_shrinkwrap_file({ | 3146 | swfile = self.create_shrinkwrap_file({ |
3005 | 'dependencies': { | 3147 | 'packages': { |
3006 | 'array-flatten': { | 3148 | 'node_modules/array-flatten': { |
3007 | 'version': '1.1.1', | 3149 | 'version': '1.1.1', |
3008 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3150 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
3009 | 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg=' | 3151 | 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg=' |
@@ -3015,8 +3157,8 @@ class NPMTest(FetcherTest): | |||
3015 | fetcher.download() | 3157 | fetcher.download() |
3016 | # Fetch correctly to get a tarball | 3158 | # Fetch correctly to get a tarball |
3017 | swfile = self.create_shrinkwrap_file({ | 3159 | swfile = self.create_shrinkwrap_file({ |
3018 | 'dependencies': { | 3160 | 'packages': { |
3019 | 'array-flatten': { | 3161 | 'node_modules/array-flatten': { |
3020 | 'version': '1.1.1', | 3162 | 'version': '1.1.1', |
3021 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3163 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
3022 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3164 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -3054,8 +3196,8 @@ class NPMTest(FetcherTest): | |||
3054 | # Fetch again | 3196 | # Fetch again |
3055 | self.assertFalse(os.path.exists(ud.localpath)) | 3197 | self.assertFalse(os.path.exists(ud.localpath)) |
3056 | swfile = self.create_shrinkwrap_file({ | 3198 | swfile = self.create_shrinkwrap_file({ |
3057 | 'dependencies': { | 3199 | 'packages': { |
3058 | 'array-flatten': { | 3200 | 'node_modules/array-flatten': { |
3059 | 'version': '1.1.1', | 3201 | 'version': '1.1.1', |
3060 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | 3202 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', |
3061 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3203 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -3082,8 +3224,8 @@ class NPMTest(FetcherTest): | |||
3082 | # Fetch again with invalid url | 3224 | # Fetch again with invalid url |
3083 | self.assertFalse(os.path.exists(ud.localpath)) | 3225 | self.assertFalse(os.path.exists(ud.localpath)) |
3084 | swfile = self.create_shrinkwrap_file({ | 3226 | swfile = self.create_shrinkwrap_file({ |
3085 | 'dependencies': { | 3227 | 'packages': { |
3086 | 'array-flatten': { | 3228 | 'node_modules/array-flatten': { |
3087 | 'version': '1.1.1', | 3229 | 'version': '1.1.1', |
3088 | 'resolved': 'https://invalid', | 3230 | 'resolved': 'https://invalid', |
3089 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | 3231 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' |
@@ -3094,6 +3236,28 @@ class NPMTest(FetcherTest): | |||
3094 | fetcher.download() | 3236 | fetcher.download() |
3095 | self.assertTrue(os.path.exists(ud.localpath)) | 3237 | self.assertTrue(os.path.exists(ud.localpath)) |
3096 | 3238 | ||
3239 | @skipIfNoNetwork() | ||
3240 | def test_npmsw_bundled(self): | ||
3241 | swfile = self.create_shrinkwrap_file({ | ||
3242 | 'packages': { | ||
3243 | 'node_modules/array-flatten': { | ||
3244 | 'version': '1.1.1', | ||
3245 | 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', | ||
3246 | 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' | ||
3247 | }, | ||
3248 | 'node_modules/content-type': { | ||
3249 | 'version': '1.0.4', | ||
3250 | 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', | ||
3251 | 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', | ||
3252 | 'inBundle': True | ||
3253 | } | ||
3254 | } | ||
3255 | }) | ||
3256 | fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) | ||
3257 | fetcher.download() | ||
3258 | self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) | ||
3259 | self.assertFalse(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) | ||
3260 | |||
3097 | class GitSharedTest(FetcherTest): | 3261 | class GitSharedTest(FetcherTest): |
3098 | def setUp(self): | 3262 | def setUp(self): |
3099 | super(GitSharedTest, self).setUp() | 3263 | super(GitSharedTest, self).setUp() |
@@ -3121,6 +3285,72 @@ class GitSharedTest(FetcherTest): | |||
3121 | alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') | 3285 | alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') |
3122 | self.assertFalse(os.path.exists(alt)) | 3286 | self.assertFalse(os.path.exists(alt)) |
3123 | 3287 | ||
3288 | class GitTagVerificationTests(FetcherTest): | ||
3289 | |||
3290 | @skipIfNoNetwork() | ||
3291 | def test_tag_rev_match(self): | ||
3292 | # Test a url with rev= and tag= set works | ||
3293 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d) | ||
3294 | fetcher.download() | ||
3295 | fetcher.unpack(self.unpackdir) | ||
3296 | |||
3297 | def test_annotated_tag_rev_match(self): | ||
3298 | # Test a url with rev= and tag= set works | ||
3299 | # rev is the annotated tag revision in this case | ||
3300 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=6d363159e4b7dc566fc40d069b2615e61774a7d8;tag=2.8.7"], self.d) | ||
3301 | fetcher.download() | ||
3302 | fetcher.unpack(self.unpackdir) | ||
3303 | |||
3304 | @skipIfNoNetwork() | ||
3305 | def test_tag_rev_match2(self): | ||
3306 | # Test a url with SRCREV and tag= set works | ||
3307 | self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb') | ||
3308 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;tag=2.8.7"], self.d) | ||
3309 | fetcher.download() | ||
3310 | fetcher.unpack(self.unpackdir) | ||
3311 | |||
3312 | @skipIfNoNetwork() | ||
3313 | def test_tag_rev_match3(self): | ||
3314 | # Test a url with SRCREV, rev= and tag= set works | ||
3315 | self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb') | ||
3316 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d) | ||
3317 | fetcher.download() | ||
3318 | fetcher.unpack(self.unpackdir) | ||
3319 | |||
3320 | @skipIfNoNetwork() | ||
3321 | def test_tag_rev_match4(self): | ||
3322 | # Test a url with SRCREV and rev= mismatching errors | ||
3323 | self.d.setVar('SRCREV', 'bade540fc31a1c26839efd2c7785a751ce24ebfb') | ||
3324 | with self.assertRaises(bb.fetch2.FetchError): | ||
3325 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d) | ||
3326 | |||
3327 | @skipIfNoNetwork() | ||
3328 | def test_tag_rev_match5(self): | ||
3329 | # Test a url with SRCREV, rev= and tag= set works when using shallow clones | ||
3330 | self.d.setVar('BB_GIT_SHALLOW', '1') | ||
3331 | self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb') | ||
3332 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d) | ||
3333 | fetcher.download() | ||
3334 | fetcher.unpack(self.unpackdir) | ||
3335 | |||
3336 | @skipIfNoNetwork() | ||
3337 | def test_tag_rev_match6(self): | ||
3338 | # Test a url with SRCREV, rev= and a mismatched tag= when using shallow clones | ||
3339 | self.d.setVar('BB_GIT_SHALLOW', '1') | ||
3340 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d) | ||
3341 | fetcher.download() | ||
3342 | with self.assertRaises(bb.fetch2.FetchError): | ||
3343 | fetcher.unpack(self.unpackdir) | ||
3344 | |||
3345 | @skipIfNoNetwork() | ||
3346 | def test_tag_rev_match7(self): | ||
3347 | # Test a url with SRCREV, rev= and a mismatched tag= | ||
3348 | self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb') | ||
3349 | fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d) | ||
3350 | fetcher.download() | ||
3351 | with self.assertRaises(bb.fetch2.FetchError): | ||
3352 | fetcher.unpack(self.unpackdir) | ||
3353 | |||
3124 | 3354 | ||
3125 | class FetchPremirroronlyLocalTest(FetcherTest): | 3355 | class FetchPremirroronlyLocalTest(FetcherTest): |
3126 | 3356 | ||
@@ -3203,58 +3433,6 @@ class FetchPremirroronlyLocalTest(FetcherTest): | |||
3203 | with self.assertRaises(bb.fetch2.NetworkAccess): | 3433 | with self.assertRaises(bb.fetch2.NetworkAccess): |
3204 | fetcher.download() | 3434 | fetcher.download() |
3205 | 3435 | ||
3206 | def test_mirror_tarball_multiple_branches(self): | ||
3207 | """ | ||
3208 | test if PREMIRRORS can handle multiple name/branches correctly | ||
3209 | both branches have required revisions | ||
3210 | """ | ||
3211 | self.make_git_repo() | ||
3212 | branch1rev = self.git_new_branch("testbranch1") | ||
3213 | branch2rev = self.git_new_branch("testbranch2") | ||
3214 | self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2" | ||
3215 | self.d.setVar("SRCREV_branch1", branch1rev) | ||
3216 | self.d.setVar("SRCREV_branch2", branch2rev) | ||
3217 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3218 | self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist") | ||
3219 | fetcher.download() | ||
3220 | fetcher.unpack(os.path.join(self.tempdir, "unpacked")) | ||
3221 | unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename) | ||
3222 | self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!") | ||
3223 | with open(unpacked, 'r') as f: | ||
3224 | content = f.read() | ||
3225 | ## We expect to see testbranch1 in the file, not master, not testbranch2 | ||
3226 | self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!") | ||
3227 | |||
3228 | def test_mirror_tarball_multiple_branches_nobranch(self): | ||
3229 | """ | ||
3230 | test if PREMIRRORS can handle multiple name/branches correctly | ||
3231 | Unbalanced name/branches raises ParameterError | ||
3232 | """ | ||
3233 | self.make_git_repo() | ||
3234 | branch1rev = self.git_new_branch("testbranch1") | ||
3235 | branch2rev = self.git_new_branch("testbranch2") | ||
3236 | self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2" | ||
3237 | self.d.setVar("SRCREV_branch1", branch1rev) | ||
3238 | self.d.setVar("SRCREV_branch2", branch2rev) | ||
3239 | with self.assertRaises(bb.fetch2.ParameterError): | ||
3240 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3241 | |||
3242 | def test_mirror_tarball_multiple_branches_norev(self): | ||
3243 | """ | ||
3244 | test if PREMIRRORS can handle multiple name/branches correctly | ||
3245 | one of the branches specifies non existing SRCREV | ||
3246 | """ | ||
3247 | self.make_git_repo() | ||
3248 | branch1rev = self.git_new_branch("testbranch1") | ||
3249 | branch2rev = self.git_new_branch("testbranch2") | ||
3250 | self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2" | ||
3251 | self.d.setVar("SRCREV_branch1", branch1rev) | ||
3252 | self.d.setVar("SRCREV_branch2", "0"*40) | ||
3253 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | ||
3254 | self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist") | ||
3255 | with self.assertRaises(bb.fetch2.NetworkAccess): | ||
3256 | fetcher.download() | ||
3257 | |||
3258 | 3436 | ||
3259 | class FetchPremirroronlyNetworkTest(FetcherTest): | 3437 | class FetchPremirroronlyNetworkTest(FetcherTest): |
3260 | 3438 | ||
@@ -3265,16 +3443,16 @@ class FetchPremirroronlyNetworkTest(FetcherTest): | |||
3265 | self.reponame = "fstests" | 3443 | self.reponame = "fstests" |
3266 | self.clonedir = os.path.join(self.tempdir, "git") | 3444 | self.clonedir = os.path.join(self.tempdir, "git") |
3267 | self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame)) | 3445 | self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame)) |
3268 | self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https" | 3446 | self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https;branch=master" |
3269 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") | 3447 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") |
3270 | self.d.setVar("BB_NO_NETWORK", "0") | 3448 | self.d.setVar("BB_NO_NETWORK", "0") |
3271 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") | 3449 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") |
3272 | 3450 | ||
3273 | def make_git_repo(self): | 3451 | def make_git_repo(self): |
3274 | import shutil | ||
3275 | self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz" | 3452 | self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz" |
3276 | os.makedirs(self.clonedir) | 3453 | os.makedirs(self.clonedir) |
3277 | self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir) | 3454 | self.git("clone --bare {}".format(self.recipe_url), self.clonedir) |
3455 | self.git("update-ref HEAD 15413486df1f5a5b5af699b6f3ba5f0984e52a9f", self.gitdir) | ||
3278 | bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) | 3456 | bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) |
3279 | shutil.rmtree(self.clonedir) | 3457 | shutil.rmtree(self.clonedir) |
3280 | 3458 | ||
@@ -3282,7 +3460,7 @@ class FetchPremirroronlyNetworkTest(FetcherTest): | |||
3282 | def test_mirror_tarball_updated(self): | 3460 | def test_mirror_tarball_updated(self): |
3283 | self.make_git_repo() | 3461 | self.make_git_repo() |
3284 | ## Upstream commit is in the mirror | 3462 | ## Upstream commit is in the mirror |
3285 | self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec") | 3463 | self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f") |
3286 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | 3464 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) |
3287 | fetcher.download() | 3465 | fetcher.download() |
3288 | 3466 | ||
@@ -3290,7 +3468,7 @@ class FetchPremirroronlyNetworkTest(FetcherTest): | |||
3290 | def test_mirror_tarball_outdated(self): | 3468 | def test_mirror_tarball_outdated(self): |
3291 | self.make_git_repo() | 3469 | self.make_git_repo() |
3292 | ## Upstream commit not in the mirror | 3470 | ## Upstream commit not in the mirror |
3293 | self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f") | 3471 | self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec") |
3294 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | 3472 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) |
3295 | with self.assertRaises(bb.fetch2.NetworkAccess): | 3473 | with self.assertRaises(bb.fetch2.NetworkAccess): |
3296 | fetcher.download() | 3474 | fetcher.download() |
@@ -3300,7 +3478,6 @@ class FetchPremirroronlyMercurialTest(FetcherTest): | |||
3300 | the test covers also basic hg:// clone (see fetch_and_create_tarball | 3478 | the test covers also basic hg:// clone (see fetch_and_create_tarball |
3301 | """ | 3479 | """ |
3302 | def skipIfNoHg(): | 3480 | def skipIfNoHg(): |
3303 | import shutil | ||
3304 | if not shutil.which('hg'): | 3481 | if not shutil.which('hg'): |
3305 | return unittest.skip('Mercurial not installed') | 3482 | return unittest.skip('Mercurial not installed') |
3306 | return lambda f: f | 3483 | return lambda f: f |
@@ -3347,7 +3524,7 @@ class FetchPremirroronlyBrokenTarball(FetcherTest): | |||
3347 | os.mkdir(self.mirrordir) | 3524 | os.mkdir(self.mirrordir) |
3348 | self.reponame = "bitbake" | 3525 | self.reponame = "bitbake" |
3349 | self.gitdir = os.path.join(self.tempdir, "git", self.reponame) | 3526 | self.gitdir = os.path.join(self.tempdir, "git", self.reponame) |
3350 | self.recipe_url = "git://git.fake.repo/bitbake;protocol=https" | 3527 | self.recipe_url = "git://git.fake.repo/bitbake;protocol=https;branch=master" |
3351 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") | 3528 | self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") |
3352 | self.d.setVar("BB_NO_NETWORK", "1") | 3529 | self.d.setVar("BB_NO_NETWORK", "1") |
3353 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") | 3530 | self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") |
@@ -3356,10 +3533,223 @@ class FetchPremirroronlyBrokenTarball(FetcherTest): | |||
3356 | targz.write("This is not tar.gz file!") | 3533 | targz.write("This is not tar.gz file!") |
3357 | 3534 | ||
3358 | def test_mirror_broken_download(self): | 3535 | def test_mirror_broken_download(self): |
3359 | import sys | ||
3360 | self.d.setVar("SRCREV", "0"*40) | 3536 | self.d.setVar("SRCREV", "0"*40) |
3361 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) | 3537 | fetcher = bb.fetch.Fetch([self.recipe_url], self.d) |
3362 | with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs: | 3538 | with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs: |
3363 | fetcher.download() | 3539 | fetcher.download() |
3364 | output = "".join(logs.output) | 3540 | output = "".join(logs.output) |
3365 | self.assertFalse(" not a git repository (or any parent up to mount point /)" in output) | 3541 | self.assertFalse(" not a git repository (or any parent up to mount point /)" in output) |
3542 | |||
3543 | class GoModTest(FetcherTest): | ||
3544 | |||
3545 | @skipIfNoNetwork() | ||
3546 | def test_gomod_url(self): | ||
3547 | urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;' | ||
3548 | 'sha256sum=9bb69aea32f1d59711701f9562d66432c9c0374205e5009d1d1a62f03fb4fdad'] | ||
3549 | |||
3550 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3551 | ud = fetcher.ud[urls[0]] | ||
3552 | self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.zip') | ||
3553 | self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.zip') | ||
3554 | self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0') | ||
3555 | |||
3556 | fetcher.download() | ||
3557 | fetcher.unpack(self.unpackdir) | ||
3558 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3559 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip'))) | ||
3560 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod'))) | ||
3561 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')), | ||
3562 | '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873') | ||
3563 | |||
3564 | @skipIfNoNetwork() | ||
3565 | def test_gomod_url_go_mod_only(self): | ||
3566 | urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;mod=1;' | ||
3567 | 'sha256sum=7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873'] | ||
3568 | |||
3569 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3570 | ud = fetcher.ud[urls[0]] | ||
3571 | self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.mod') | ||
3572 | self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.mod') | ||
3573 | self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0') | ||
3574 | |||
3575 | fetcher.download() | ||
3576 | fetcher.unpack(self.unpackdir) | ||
3577 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3578 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod'))) | ||
3579 | |||
3580 | @skipIfNoNetwork() | ||
3581 | def test_gomod_url_sha256sum_varflag(self): | ||
3582 | urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0'] | ||
3583 | self.d.setVarFlag('SRC_URI', 'gopkg.in/ini.v1@v1.67.0.sha256sum', 'bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6') | ||
3584 | |||
3585 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3586 | ud = fetcher.ud[urls[0]] | ||
3587 | self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip') | ||
3588 | self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip') | ||
3589 | self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0') | ||
3590 | |||
3591 | fetcher.download() | ||
3592 | fetcher.unpack(self.unpackdir) | ||
3593 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3594 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip'))) | ||
3595 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod'))) | ||
3596 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')), | ||
3597 | '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1') | ||
3598 | |||
3599 | @skipIfNoNetwork() | ||
3600 | def test_gomod_url_no_go_mod_in_module(self): | ||
3601 | urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0;' | ||
3602 | 'sha256sum=bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6'] | ||
3603 | |||
3604 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3605 | ud = fetcher.ud[urls[0]] | ||
3606 | self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip') | ||
3607 | self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip') | ||
3608 | self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0') | ||
3609 | |||
3610 | fetcher.download() | ||
3611 | fetcher.unpack(self.unpackdir) | ||
3612 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3613 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip'))) | ||
3614 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod'))) | ||
3615 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')), | ||
3616 | '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1') | ||
3617 | |||
3618 | @skipIfNoNetwork() | ||
3619 | def test_gomod_url_host_only(self): | ||
3620 | urls = ['gomod://go.opencensus.io;version=v0.24.0;' | ||
3621 | 'sha256sum=203a767d7f8e7c1ebe5588220ad168d1e15b14ae70a636de7ca9a4a88a7e0d0c'] | ||
3622 | |||
3623 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3624 | ud = fetcher.ud[urls[0]] | ||
3625 | self.assertEqual(ud.url, 'https://proxy.golang.org/go.opencensus.io/%40v/v0.24.0.zip') | ||
3626 | self.assertEqual(ud.parm['downloadfilename'], 'go.opencensus.io@v0.24.0.zip') | ||
3627 | self.assertEqual(ud.parm['name'], 'go.opencensus.io@v0.24.0') | ||
3628 | |||
3629 | fetcher.download() | ||
3630 | fetcher.unpack(self.unpackdir) | ||
3631 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3632 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip'))) | ||
3633 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod'))) | ||
3634 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')), | ||
3635 | '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96') | ||
3636 | |||
3637 | class GoModGitTest(FetcherTest): | ||
3638 | |||
3639 | @skipIfNoNetwork() | ||
3640 | def test_gomodgit_url_repo(self): | ||
3641 | urls = ['gomodgit://golang.org/x/net;version=v0.9.0;' | ||
3642 | 'repo=go.googlesource.com/net;' | ||
3643 | 'srcrev=694cff8668bac64e0864b552bffc280cd27f21b1'] | ||
3644 | |||
3645 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3646 | ud = fetcher.ud[urls[0]] | ||
3647 | self.assertEqual(ud.host, 'go.googlesource.com') | ||
3648 | self.assertEqual(ud.path, '/net') | ||
3649 | self.assertEqual(ud.name, 'golang.org/x/net@v0.9.0') | ||
3650 | self.assertEqual(self.d.getVar('SRCREV_golang.org/x/net@v0.9.0'), '694cff8668bac64e0864b552bffc280cd27f21b1') | ||
3651 | |||
3652 | fetcher.download() | ||
3653 | self.assertTrue(os.path.exists(ud.localpath)) | ||
3654 | |||
3655 | fetcher.unpack(self.unpackdir) | ||
3656 | vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs') | ||
3657 | self.assertTrue(os.path.exists(os.path.join(vcsdir, 'ed42bd05533fd84ae290a5d33ebd3695a0a2b06131beebd5450825bee8603aca'))) | ||
3658 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3659 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.zip'))) | ||
3660 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod'))) | ||
3661 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')), | ||
3662 | 'c5d6851ede50ec1c001afb763040194b68961bf06997e2605e8bf06dcd2aeb2e') | ||
3663 | |||
3664 | @skipIfNoNetwork() | ||
3665 | def test_gomodgit_url_subdir(self): | ||
3666 | urls = ['gomodgit://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;' | ||
3667 | 'repo=github.com/Azure/azure-sdk-for-go;subdir=sdk/storage/azblob;' | ||
3668 | 'srcrev=ec928e0ed34db682b3f783d3739d1c538142e0c3'] | ||
3669 | |||
3670 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3671 | ud = fetcher.ud[urls[0]] | ||
3672 | self.assertEqual(ud.host, 'github.com') | ||
3673 | self.assertEqual(ud.path, '/Azure/azure-sdk-for-go') | ||
3674 | self.assertEqual(ud.parm['subpath'], 'sdk/storage/azblob') | ||
3675 | self.assertEqual(ud.name, 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0') | ||
3676 | self.assertEqual(self.d.getVar('SRCREV_github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'), 'ec928e0ed34db682b3f783d3739d1c538142e0c3') | ||
3677 | |||
3678 | fetcher.download() | ||
3679 | self.assertTrue(os.path.exists(ud.localpath)) | ||
3680 | |||
3681 | fetcher.unpack(self.unpackdir) | ||
3682 | vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs') | ||
3683 | self.assertTrue(os.path.exists(os.path.join(vcsdir, 'd31d6145676ed3066ce573a8198f326dea5be45a43b3d8f41ce7787fd71d66b3'))) | ||
3684 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3685 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip'))) | ||
3686 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod'))) | ||
3687 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')), | ||
3688 | '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873') | ||
3689 | |||
3690 | @skipIfNoNetwork() | ||
3691 | def test_gomodgit_url_srcrev_var(self): | ||
3692 | urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0'] | ||
3693 | self.d.setVar('SRCREV_gopkg.in/ini.v1@v1.67.0', 'b2f570e5b5b844226bbefe6fb521d891f529a951') | ||
3694 | |||
3695 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3696 | ud = fetcher.ud[urls[0]] | ||
3697 | self.assertEqual(ud.host, 'gopkg.in') | ||
3698 | self.assertEqual(ud.path, '/ini.v1') | ||
3699 | self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0') | ||
3700 | self.assertEqual(ud.parm['srcrev'], 'b2f570e5b5b844226bbefe6fb521d891f529a951') | ||
3701 | |||
3702 | fetcher.download() | ||
3703 | fetcher.unpack(self.unpackdir) | ||
3704 | vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs') | ||
3705 | self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3'))) | ||
3706 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3707 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip'))) | ||
3708 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod'))) | ||
3709 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')), | ||
3710 | '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1') | ||
3711 | |||
3712 | @skipIfNoNetwork() | ||
3713 | def test_gomodgit_url_no_go_mod_in_module(self): | ||
3714 | urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0;' | ||
3715 | 'srcrev=b2f570e5b5b844226bbefe6fb521d891f529a951'] | ||
3716 | |||
3717 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3718 | ud = fetcher.ud[urls[0]] | ||
3719 | self.assertEqual(ud.host, 'gopkg.in') | ||
3720 | self.assertEqual(ud.path, '/ini.v1') | ||
3721 | self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0') | ||
3722 | self.assertEqual(self.d.getVar('SRCREV_gopkg.in/ini.v1@v1.67.0'), 'b2f570e5b5b844226bbefe6fb521d891f529a951') | ||
3723 | |||
3724 | fetcher.download() | ||
3725 | fetcher.unpack(self.unpackdir) | ||
3726 | vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs') | ||
3727 | self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3'))) | ||
3728 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3729 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip'))) | ||
3730 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod'))) | ||
3731 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')), | ||
3732 | '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1') | ||
3733 | |||
3734 | @skipIfNoNetwork() | ||
3735 | def test_gomodgit_url_host_only(self): | ||
3736 | urls = ['gomodgit://go.opencensus.io;version=v0.24.0;' | ||
3737 | 'repo=github.com/census-instrumentation/opencensus-go;' | ||
3738 | 'srcrev=b1a01ee95db0e690d91d7193d037447816fae4c5'] | ||
3739 | |||
3740 | fetcher = bb.fetch2.Fetch(urls, self.d) | ||
3741 | ud = fetcher.ud[urls[0]] | ||
3742 | self.assertEqual(ud.host, 'github.com') | ||
3743 | self.assertEqual(ud.path, '/census-instrumentation/opencensus-go') | ||
3744 | self.assertEqual(ud.name, 'go.opencensus.io@v0.24.0') | ||
3745 | self.assertEqual(self.d.getVar('SRCREV_go.opencensus.io@v0.24.0'), 'b1a01ee95db0e690d91d7193d037447816fae4c5') | ||
3746 | |||
3747 | fetcher.download() | ||
3748 | fetcher.unpack(self.unpackdir) | ||
3749 | vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs') | ||
3750 | self.assertTrue(os.path.exists(os.path.join(vcsdir, 'aae3ac7b2122ed3345654e6327855e9682f4a5350d63e93dbcfc51c4419df0e1'))) | ||
3751 | downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download') | ||
3752 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip'))) | ||
3753 | self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod'))) | ||
3754 | self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')), | ||
3755 | '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96') | ||
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py index 72d1962e7e..e3cba67ad4 100644 --- a/bitbake/lib/bb/tests/parse.py +++ b/bitbake/lib/bb/tests/parse.py | |||
@@ -75,6 +75,59 @@ unset B[flag] | |||
75 | self.assertEqual(d.getVarFlag("A","flag"), None) | 75 | self.assertEqual(d.getVarFlag("A","flag"), None) |
76 | self.assertEqual(d.getVar("B"), "2") | 76 | self.assertEqual(d.getVar("B"), "2") |
77 | 77 | ||
78 | defaulttest = """ | ||
79 | A = "set value" | ||
80 | A ??= "default value" | ||
81 | |||
82 | A[flag_set_vs_question] = "set flag" | ||
83 | A[flag_set_vs_question] ?= "question flag" | ||
84 | |||
85 | A[flag_set_vs_default] = "set flag" | ||
86 | A[flag_set_vs_default] ??= "default flag" | ||
87 | |||
88 | A[flag_question] ?= "question flag" | ||
89 | |||
90 | A[flag_default] ??= "default flag" | ||
91 | |||
92 | A[flag_question_vs_default] ?= "question flag" | ||
93 | A[flag_question_vs_default] ??= "default flag" | ||
94 | |||
95 | A[flag_default_vs_question] ??= "default flag" | ||
96 | A[flag_default_vs_question] ?= "question flag" | ||
97 | |||
98 | A[flag_set_question_default] = "set flag" | ||
99 | A[flag_set_question_default] ?= "question flag" | ||
100 | A[flag_set_question_default] ??= "default flag" | ||
101 | |||
102 | A[flag_set_default_question] = "set flag" | ||
103 | A[flag_set_default_question] ??= "default flag" | ||
104 | A[flag_set_default_question] ?= "question flag" | ||
105 | |||
106 | A[flag_set_twice] = "set flag first" | ||
107 | A[flag_set_twice] = "set flag second" | ||
108 | |||
109 | A[flag_question_twice] ?= "question flag first" | ||
110 | A[flag_question_twice] ?= "question flag second" | ||
111 | |||
112 | A[flag_default_twice] ??= "default flag first" | ||
113 | A[flag_default_twice] ??= "default flag second" | ||
114 | """ | ||
115 | def test_parse_defaulttest(self): | ||
116 | f = self.parsehelper(self.defaulttest) | ||
117 | d = bb.parse.handle(f.name, self.d)[''] | ||
118 | self.assertEqual(d.getVar("A"), "set value") | ||
119 | self.assertEqual(d.getVarFlag("A","flag_set_vs_question"), "set flag") | ||
120 | self.assertEqual(d.getVarFlag("A","flag_set_vs_default"), "set flag") | ||
121 | self.assertEqual(d.getVarFlag("A","flag_question"), "question flag") | ||
122 | self.assertEqual(d.getVarFlag("A","flag_default"), "default flag") | ||
123 | self.assertEqual(d.getVarFlag("A","flag_question_vs_default"), "question flag") | ||
124 | self.assertEqual(d.getVarFlag("A","flag_default_vs_question"), "question flag") | ||
125 | self.assertEqual(d.getVarFlag("A","flag_set_question_default"), "set flag") | ||
126 | self.assertEqual(d.getVarFlag("A","flag_set_default_question"), "set flag") | ||
127 | self.assertEqual(d.getVarFlag("A","flag_set_twice"), "set flag second") | ||
128 | self.assertEqual(d.getVarFlag("A","flag_question_twice"), "question flag first") | ||
129 | self.assertEqual(d.getVarFlag("A","flag_default_twice"), "default flag second") | ||
130 | |||
78 | exporttest = """ | 131 | exporttest = """ |
79 | A = "a" | 132 | A = "a" |
80 | export B = "b" | 133 | export B = "b" |
@@ -177,7 +230,19 @@ python () { | |||
177 | 230 | ||
178 | addtask_deltask = """ | 231 | addtask_deltask = """ |
179 | addtask do_patch after do_foo after do_unpack before do_configure before do_compile | 232 | addtask do_patch after do_foo after do_unpack before do_configure before do_compile |
180 | addtask do_fetch do_patch | 233 | addtask do_fetch2 do_patch2 |
234 | |||
235 | addtask do_myplaintask | ||
236 | addtask do_myplaintask2 | ||
237 | deltask do_myplaintask2 | ||
238 | addtask do_mytask# comment | ||
239 | addtask do_mytask2 # comment2 | ||
240 | addtask do_mytask3 | ||
241 | deltask do_mytask3# comment | ||
242 | deltask do_mytask4 # comment2 | ||
243 | |||
244 | # Ensure a missing task prefix on after works | ||
245 | addtask do_mytask5 after mytask | ||
181 | 246 | ||
182 | MYVAR = "do_patch" | 247 | MYVAR = "do_patch" |
183 | EMPTYVAR = "" | 248 | EMPTYVAR = "" |
@@ -185,17 +250,12 @@ deltask do_fetch ${MYVAR} ${EMPTYVAR} | |||
185 | deltask ${EMPTYVAR} | 250 | deltask ${EMPTYVAR} |
186 | """ | 251 | """ |
187 | def test_parse_addtask_deltask(self): | 252 | def test_parse_addtask_deltask(self): |
188 | import sys | ||
189 | 253 | ||
190 | with self.assertLogs() as logs: | 254 | f = self.parsehelper(self.addtask_deltask) |
191 | f = self.parsehelper(self.addtask_deltask) | 255 | d = bb.parse.handle(f.name, self.d)[''] |
192 | d = bb.parse.handle(f.name, self.d)[''] | ||
193 | 256 | ||
194 | output = "".join(logs.output) | 257 | self.assertSequenceEqual(['do_fetch2', 'do_patch2', 'do_myplaintask', 'do_mytask', 'do_mytask2', 'do_mytask5'], bb.build.listtasks(d)) |
195 | self.assertTrue("addtask contained multiple 'before' keywords" in output) | 258 | self.assertEqual(['do_mytask'], d.getVarFlag("do_mytask5", "deps")) |
196 | self.assertTrue("addtask contained multiple 'after' keywords" in output) | ||
197 | self.assertTrue('addtask ignored: " do_patch"' in output) | ||
198 | #self.assertTrue('dependent task do_foo for do_patch does not exist' in output) | ||
199 | 259 | ||
200 | broken_multiline_comment = """ | 260 | broken_multiline_comment = """ |
201 | # First line of comment \\ | 261 | # First line of comment \\ |
@@ -341,3 +401,65 @@ EXPORT_FUNCTIONS do_compile do_compilepython | |||
341 | self.assertIn("else", d.getVar("do_compilepython")) | 401 | self.assertIn("else", d.getVar("do_compilepython")) |
342 | check_function_flags(d) | 402 | check_function_flags(d) |
343 | 403 | ||
404 | export_function_unclosed_tab = """ | ||
405 | do_compile () { | ||
406 | bb.note("Something") | ||
407 | \t} | ||
408 | """ | ||
409 | export_function_unclosed_space = """ | ||
410 | do_compile () { | ||
411 | bb.note("Something") | ||
412 | } | ||
413 | """ | ||
414 | export_function_residue = """ | ||
415 | do_compile () { | ||
416 | bb.note("Something") | ||
417 | } | ||
418 | |||
419 | include \\ | ||
420 | """ | ||
421 | |||
422 | def test_unclosed_functions(self): | ||
423 | def test_helper(content, expected_error): | ||
424 | with tempfile.TemporaryDirectory() as tempdir: | ||
425 | recipename = tempdir + "/recipe_unclosed.bb" | ||
426 | with open(recipename, "w") as f: | ||
427 | f.write(content) | ||
428 | f.flush() | ||
429 | os.chdir(tempdir) | ||
430 | with self.assertRaises(bb.parse.ParseError) as error: | ||
431 | bb.parse.handle(recipename, bb.data.createCopy(self.d)) | ||
432 | self.assertIn(expected_error, str(error.exception)) | ||
433 | |||
434 | with tempfile.TemporaryDirectory() as tempdir: | ||
435 | test_helper(self.export_function_unclosed_tab, "Unparsed lines from unclosed function") | ||
436 | test_helper(self.export_function_unclosed_space, "Unparsed lines from unclosed function") | ||
437 | test_helper(self.export_function_residue, "Unparsed lines") | ||
438 | |||
439 | recipename_closed = tempdir + "/recipe_closed.bb" | ||
440 | with open(recipename_closed, "w") as in_file: | ||
441 | lines = self.export_function_unclosed_tab.split("\n") | ||
442 | lines[3] = "}" | ||
443 | in_file.write("\n".join(lines)) | ||
444 | in_file.flush() | ||
445 | bb.parse.handle(recipename_closed, bb.data.createCopy(self.d)) | ||
446 | |||
447 | special_character_assignment = """ | ||
448 | A+="a" | ||
449 | A+ = "b" | ||
450 | + = "c" | ||
451 | """ | ||
452 | ambigous_assignment = """ | ||
453 | += "d" | ||
454 | """ | ||
455 | def test_parse_special_character_assignment(self): | ||
456 | f = self.parsehelper(self.special_character_assignment) | ||
457 | d = bb.parse.handle(f.name, self.d)[''] | ||
458 | self.assertEqual(d.getVar("A"), " a") | ||
459 | self.assertEqual(d.getVar("A+"), "b") | ||
460 | self.assertEqual(d.getVar("+"), "c") | ||
461 | |||
462 | f = self.parsehelper(self.ambigous_assignment) | ||
463 | with self.assertRaises(bb.parse.ParseError) as error: | ||
464 | bb.parse.handle(f.name, self.d) | ||
465 | self.assertIn("Empty variable name in assignment", str(error.exception)) | ||
diff --git a/bitbake/lib/bb/tests/persist_data.py b/bitbake/lib/bb/tests/persist_data.py deleted file mode 100644 index f641b5acbc..0000000000 --- a/bitbake/lib/bb/tests/persist_data.py +++ /dev/null | |||
@@ -1,129 +0,0 @@ | |||
1 | # | ||
2 | # BitBake Test for lib/bb/persist_data/ | ||
3 | # | ||
4 | # Copyright (C) 2018 Garmin Ltd. | ||
5 | # | ||
6 | # SPDX-License-Identifier: GPL-2.0-only | ||
7 | # | ||
8 | |||
9 | import unittest | ||
10 | import bb.data | ||
11 | import bb.persist_data | ||
12 | import tempfile | ||
13 | import threading | ||
14 | |||
15 | class PersistDataTest(unittest.TestCase): | ||
16 | def _create_data(self): | ||
17 | return bb.persist_data.persist('TEST_PERSIST_DATA', self.d) | ||
18 | |||
19 | def setUp(self): | ||
20 | self.d = bb.data.init() | ||
21 | self.tempdir = tempfile.TemporaryDirectory() | ||
22 | self.d['PERSISTENT_DIR'] = self.tempdir.name | ||
23 | self.data = self._create_data() | ||
24 | self.items = { | ||
25 | 'A1': '1', | ||
26 | 'B1': '2', | ||
27 | 'C2': '3' | ||
28 | } | ||
29 | self.stress_count = 10000 | ||
30 | self.thread_count = 5 | ||
31 | |||
32 | for k,v in self.items.items(): | ||
33 | self.data[k] = v | ||
34 | |||
35 | def tearDown(self): | ||
36 | self.tempdir.cleanup() | ||
37 | |||
38 | def _iter_helper(self, seen, iterator): | ||
39 | with iter(iterator): | ||
40 | for v in iterator: | ||
41 | self.assertTrue(v in seen) | ||
42 | seen.remove(v) | ||
43 | self.assertEqual(len(seen), 0, '%s not seen' % seen) | ||
44 | |||
45 | def test_get(self): | ||
46 | for k, v in self.items.items(): | ||
47 | self.assertEqual(self.data[k], v) | ||
48 | |||
49 | self.assertIsNone(self.data.get('D')) | ||
50 | with self.assertRaises(KeyError): | ||
51 | self.data['D'] | ||
52 | |||
53 | def test_set(self): | ||
54 | for k, v in self.items.items(): | ||
55 | self.data[k] += '-foo' | ||
56 | |||
57 | for k, v in self.items.items(): | ||
58 | self.assertEqual(self.data[k], v + '-foo') | ||
59 | |||
60 | def test_delete(self): | ||
61 | self.data['D'] = '4' | ||
62 | self.assertEqual(self.data['D'], '4') | ||
63 | del self.data['D'] | ||
64 | self.assertIsNone(self.data.get('D')) | ||
65 | with self.assertRaises(KeyError): | ||
66 | self.data['D'] | ||
67 | |||
68 | def test_contains(self): | ||
69 | for k in self.items: | ||
70 | self.assertTrue(k in self.data) | ||
71 | self.assertTrue(self.data.has_key(k)) | ||
72 | self.assertFalse('NotFound' in self.data) | ||
73 | self.assertFalse(self.data.has_key('NotFound')) | ||
74 | |||
75 | def test_len(self): | ||
76 | self.assertEqual(len(self.data), len(self.items)) | ||
77 | |||
78 | def test_iter(self): | ||
79 | self._iter_helper(set(self.items.keys()), self.data) | ||
80 | |||
81 | def test_itervalues(self): | ||
82 | self._iter_helper(set(self.items.values()), self.data.itervalues()) | ||
83 | |||
84 | def test_iteritems(self): | ||
85 | self._iter_helper(set(self.items.items()), self.data.iteritems()) | ||
86 | |||
87 | def test_get_by_pattern(self): | ||
88 | self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1')) | ||
89 | |||
90 | def _stress_read(self, data): | ||
91 | for i in range(self.stress_count): | ||
92 | for k in self.items: | ||
93 | data[k] | ||
94 | |||
95 | def _stress_write(self, data): | ||
96 | for i in range(self.stress_count): | ||
97 | for k, v in self.items.items(): | ||
98 | data[k] = v + str(i) | ||
99 | |||
100 | def _validate_stress(self): | ||
101 | for k, v in self.items.items(): | ||
102 | self.assertEqual(self.data[k], v + str(self.stress_count - 1)) | ||
103 | |||
104 | def test_stress(self): | ||
105 | self._stress_read(self.data) | ||
106 | self._stress_write(self.data) | ||
107 | self._validate_stress() | ||
108 | |||
109 | def test_stress_threads(self): | ||
110 | def read_thread(): | ||
111 | data = self._create_data() | ||
112 | self._stress_read(data) | ||
113 | |||
114 | def write_thread(): | ||
115 | data = self._create_data() | ||
116 | self._stress_write(data) | ||
117 | |||
118 | threads = [] | ||
119 | for i in range(self.thread_count): | ||
120 | threads.append(threading.Thread(target=read_thread)) | ||
121 | threads.append(threading.Thread(target=write_thread)) | ||
122 | |||
123 | for t in threads: | ||
124 | t.start() | ||
125 | self._stress_read(self.data) | ||
126 | for t in threads: | ||
127 | t.join() | ||
128 | self._validate_stress() | ||
129 | |||
diff --git a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass index b57650d591..80b003b2b5 100644 --- a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass +++ b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass | |||
@@ -9,7 +9,7 @@ def stamptask(d): | |||
9 | with open(stampname, "a+") as f: | 9 | with open(stampname, "a+") as f: |
10 | f.write(d.getVar("BB_UNIHASH") + "\n") | 10 | f.write(d.getVar("BB_UNIHASH") + "\n") |
11 | 11 | ||
12 | if d.getVar("BB_CURRENT_MC") != "default": | 12 | if d.getVar("BB_CURRENT_MC") != "": |
13 | thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}") | 13 | thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}") |
14 | if thistask in d.getVar("SLOWTASKS").split(): | 14 | if thistask in d.getVar("SLOWTASKS").split(): |
15 | bb.note("Slowing task %s" % thistask) | 15 | bb.note("Slowing task %s" % thistask) |
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb new file mode 100644 index 0000000000..3c7dca0257 --- /dev/null +++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb | |||
@@ -0,0 +1,2 @@ | |||
1 | do_build[mcdepends] = "mc::mc-1:h1:do_invalid" | ||
2 | |||
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb | |||
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py index cc87e8d6a8..74f5ded2e6 100644 --- a/bitbake/lib/bb/tests/runqueue.py +++ b/bitbake/lib/bb/tests/runqueue.py | |||
@@ -26,7 +26,7 @@ class RunQueueTests(unittest.TestCase): | |||
26 | a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot" | 26 | a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot" |
27 | b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot" | 27 | b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot" |
28 | 28 | ||
29 | def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): | 29 | def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False, allowfailure=False): |
30 | env = os.environ.copy() | 30 | env = os.environ.copy() |
31 | env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) | 31 | env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) |
32 | env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR" | 32 | env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR" |
@@ -41,6 +41,8 @@ class RunQueueTests(unittest.TestCase): | |||
41 | output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) | 41 | output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) |
42 | print(output) | 42 | print(output) |
43 | except subprocess.CalledProcessError as e: | 43 | except subprocess.CalledProcessError as e: |
44 | if allowfailure: | ||
45 | return e.output | ||
44 | self.fail("Command %s failed with %s" % (cmd, e.output)) | 46 | self.fail("Command %s failed with %s" % (cmd, e.output)) |
45 | tasks = [] | 47 | tasks = [] |
46 | tasklog = builddir + "/task.log" | 48 | tasklog = builddir + "/task.log" |
@@ -314,6 +316,13 @@ class RunQueueTests(unittest.TestCase): | |||
314 | ["mc_2:a1:%s" % t for t in rerun_tasks] | 316 | ["mc_2:a1:%s" % t for t in rerun_tasks] |
315 | self.assertEqual(set(tasks), set(expected)) | 317 | self.assertEqual(set(tasks), set(expected)) |
316 | 318 | ||
319 | # Check that a multiconfig that doesn't exist rasies a correct error message | ||
320 | error_output = self.run_bitbakecmd(["bitbake", "g1"], tempdir, "", extraenv=extraenv, cleanup=True, allowfailure=True) | ||
321 | self.assertIn("non-existent task", error_output) | ||
322 | # If the word 'Traceback' or 'KeyError' is in the output we've regressed | ||
323 | self.assertNotIn("Traceback", error_output) | ||
324 | self.assertNotIn("KeyError", error_output) | ||
325 | |||
317 | self.shutdown(tempdir) | 326 | self.shutdown(tempdir) |
318 | 327 | ||
319 | def test_hashserv_single(self): | 328 | def test_hashserv_single(self): |
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py index c363f62d7d..48e61dfcea 100644 --- a/bitbake/lib/bb/tests/utils.py +++ b/bitbake/lib/bb/tests/utils.py | |||
@@ -130,6 +130,14 @@ class Checksum(unittest.TestCase): | |||
130 | checksum = bb.utils.sha256_file(f.name) | 130 | checksum = bb.utils.sha256_file(f.name) |
131 | self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f") | 131 | self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f") |
132 | 132 | ||
133 | def test_goh1(self): | ||
134 | import hashlib | ||
135 | with tempfile.NamedTemporaryFile() as f: | ||
136 | f.write(self.filler) | ||
137 | f.flush() | ||
138 | checksum = bb.utils.goh1_file(f.name) | ||
139 | self.assertEqual(checksum, "81191f04d4abf413e5badd234814e4202d9efa73e6f9437e9ddd6b8165b569ef") | ||
140 | |||
133 | class EditMetadataFile(unittest.TestCase): | 141 | class EditMetadataFile(unittest.TestCase): |
134 | _origfile = """ | 142 | _origfile = """ |
135 | # A comment | 143 | # A comment |
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py index dcd3910cc4..f48baeb334 100644 --- a/bitbake/lib/bb/tinfoil.py +++ b/bitbake/lib/bb/tinfoil.py | |||
@@ -15,6 +15,7 @@ import atexit | |||
15 | import re | 15 | import re |
16 | from collections import OrderedDict, defaultdict | 16 | from collections import OrderedDict, defaultdict |
17 | from functools import partial | 17 | from functools import partial |
18 | from contextlib import contextmanager | ||
18 | 19 | ||
19 | import bb.cache | 20 | import bb.cache |
20 | import bb.cooker | 21 | import bb.cooker |
@@ -188,11 +189,19 @@ class TinfoilCookerAdapter: | |||
188 | self._cache[name] = attrvalue | 189 | self._cache[name] = attrvalue |
189 | return attrvalue | 190 | return attrvalue |
190 | 191 | ||
192 | class TinfoilSkiplistByMcAdapter: | ||
193 | def __init__(self, tinfoil): | ||
194 | self.tinfoil = tinfoil | ||
195 | |||
196 | def __getitem__(self, mc): | ||
197 | return self.tinfoil.get_skipped_recipes(mc) | ||
198 | |||
191 | def __init__(self, tinfoil): | 199 | def __init__(self, tinfoil): |
192 | self.tinfoil = tinfoil | 200 | self.tinfoil = tinfoil |
193 | self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() | 201 | self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() |
194 | self.collections = {} | 202 | self.collections = {} |
195 | self.recipecaches = {} | 203 | self.recipecaches = {} |
204 | self.skiplist_by_mc = self.TinfoilSkiplistByMcAdapter(tinfoil) | ||
196 | for mc in self.multiconfigs: | 205 | for mc in self.multiconfigs: |
197 | self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) | 206 | self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) |
198 | self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) | 207 | self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) |
@@ -201,8 +210,6 @@ class TinfoilCookerAdapter: | |||
201 | # Grab these only when they are requested since they aren't always used | 210 | # Grab these only when they are requested since they aren't always used |
202 | if name in self._cache: | 211 | if name in self._cache: |
203 | return self._cache[name] | 212 | return self._cache[name] |
204 | elif name == 'skiplist': | ||
205 | attrvalue = self.tinfoil.get_skipped_recipes() | ||
206 | elif name == 'bbfile_config_priorities': | 213 | elif name == 'bbfile_config_priorities': |
207 | ret = self.tinfoil.run_command('getLayerPriorities') | 214 | ret = self.tinfoil.run_command('getLayerPriorities') |
208 | bbfile_config_priorities = [] | 215 | bbfile_config_priorities = [] |
@@ -514,12 +521,12 @@ class Tinfoil: | |||
514 | """ | 521 | """ |
515 | return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) | 522 | return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) |
516 | 523 | ||
517 | def get_skipped_recipes(self): | 524 | def get_skipped_recipes(self, mc=''): |
518 | """ | 525 | """ |
519 | Find recipes which were skipped (i.e. SkipRecipe was raised | 526 | Find recipes which were skipped (i.e. SkipRecipe was raised |
520 | during parsing). | 527 | during parsing). |
521 | """ | 528 | """ |
522 | return OrderedDict(self.run_command('getSkippedRecipes')) | 529 | return OrderedDict(self.run_command('getSkippedRecipes', mc)) |
523 | 530 | ||
524 | def get_all_providers(self, mc=''): | 531 | def get_all_providers(self, mc=''): |
525 | return defaultdict(list, self.run_command('allProviders', mc)) | 532 | return defaultdict(list, self.run_command('allProviders', mc)) |
@@ -533,6 +540,7 @@ class Tinfoil: | |||
533 | def get_runtime_providers(self, rdep): | 540 | def get_runtime_providers(self, rdep): |
534 | return self.run_command('getRuntimeProviders', rdep) | 541 | return self.run_command('getRuntimeProviders', rdep) |
535 | 542 | ||
543 | # TODO: teach this method about mc | ||
536 | def get_recipe_file(self, pn): | 544 | def get_recipe_file(self, pn): |
537 | """ | 545 | """ |
538 | Get the file name for the specified recipe/target. Raises | 546 | Get the file name for the specified recipe/target. Raises |
@@ -541,6 +549,7 @@ class Tinfoil: | |||
541 | """ | 549 | """ |
542 | best = self.find_best_provider(pn) | 550 | best = self.find_best_provider(pn) |
543 | if not best or (len(best) > 3 and not best[3]): | 551 | if not best or (len(best) > 3 and not best[3]): |
552 | # TODO: pass down mc | ||
544 | skiplist = self.get_skipped_recipes() | 553 | skiplist = self.get_skipped_recipes() |
545 | taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) | 554 | taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) |
546 | skipreasons = taskdata.get_reasons(pn) | 555 | skipreasons = taskdata.get_reasons(pn) |
@@ -633,6 +642,29 @@ class Tinfoil: | |||
633 | fn = self.get_recipe_file(pn) | 642 | fn = self.get_recipe_file(pn) |
634 | return self.parse_recipe_file(fn) | 643 | return self.parse_recipe_file(fn) |
635 | 644 | ||
645 | @contextmanager | ||
646 | def _data_tracked_if_enabled(self): | ||
647 | """ | ||
648 | A context manager to enable data tracking for a code segment if data | ||
649 | tracking was enabled for this tinfoil instance. | ||
650 | """ | ||
651 | if self.tracking: | ||
652 | # Enable history tracking just for the operation | ||
653 | self.run_command('enableDataTracking') | ||
654 | |||
655 | # Here goes the operation with the optional data tracking | ||
656 | yield | ||
657 | |||
658 | if self.tracking: | ||
659 | self.run_command('disableDataTracking') | ||
660 | |||
661 | def finalizeData(self): | ||
662 | """ | ||
663 | Run anonymous functions and expand keys | ||
664 | """ | ||
665 | with self._data_tracked_if_enabled(): | ||
666 | return self._reconvert_type(self.run_command('finalizeData'), 'DataStoreConnectionHandle') | ||
667 | |||
636 | def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): | 668 | def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): |
637 | """ | 669 | """ |
638 | Parse the specified recipe file (with or without bbappends) | 670 | Parse the specified recipe file (with or without bbappends) |
@@ -645,10 +677,7 @@ class Tinfoil: | |||
645 | appendlist: optional list of bbappend files to apply, if you | 677 | appendlist: optional list of bbappend files to apply, if you |
646 | want to filter them | 678 | want to filter them |
647 | """ | 679 | """ |
648 | if self.tracking: | 680 | with self._data_tracked_if_enabled(): |
649 | # Enable history tracking just for the parse operation | ||
650 | self.run_command('enableDataTracking') | ||
651 | try: | ||
652 | if appends and appendlist == []: | 681 | if appends and appendlist == []: |
653 | appends = False | 682 | appends = False |
654 | if config_data: | 683 | if config_data: |
@@ -660,9 +689,6 @@ class Tinfoil: | |||
660 | return self._reconvert_type(dscon, 'DataStoreConnectionHandle') | 689 | return self._reconvert_type(dscon, 'DataStoreConnectionHandle') |
661 | else: | 690 | else: |
662 | return None | 691 | return None |
663 | finally: | ||
664 | if self.tracking: | ||
665 | self.run_command('disableDataTracking') | ||
666 | 692 | ||
667 | def build_file(self, buildfile, task, internal=True): | 693 | def build_file(self, buildfile, task, internal=True): |
668 | """ | 694 | """ |
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py index 8b212b7803..4ee45d67a2 100644 --- a/bitbake/lib/bb/ui/buildinfohelper.py +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -559,7 +559,10 @@ class ORMWrapper(object): | |||
559 | # we might have an invalid link; no way to detect this. just set it to None | 559 | # we might have an invalid link; no way to detect this. just set it to None |
560 | filetarget_obj = None | 560 | filetarget_obj = None |
561 | 561 | ||
562 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 562 | try: |
563 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
564 | except Target_File.DoesNotExist: | ||
565 | parent_obj = None | ||
563 | 566 | ||
564 | Target_File.objects.create( | 567 | Target_File.objects.create( |
565 | target = target_obj, | 568 | target = target_obj, |
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py index f86999bb09..9a589a5c8e 100644 --- a/bitbake/lib/bb/ui/knotty.py +++ b/bitbake/lib/bb/ui/knotty.py | |||
@@ -24,6 +24,12 @@ import atexit | |||
24 | from itertools import groupby | 24 | from itertools import groupby |
25 | 25 | ||
26 | from bb.ui import uihelper | 26 | from bb.ui import uihelper |
27 | import bb.build | ||
28 | import bb.command | ||
29 | import bb.cooker | ||
30 | import bb.event | ||
31 | import bb.runqueue | ||
32 | import bb.utils | ||
27 | 33 | ||
28 | featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING] | 34 | featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING] |
29 | 35 | ||
@@ -103,7 +109,7 @@ def new_progress(msg, maxval): | |||
103 | return NonInteractiveProgress(msg, maxval) | 109 | return NonInteractiveProgress(msg, maxval) |
104 | 110 | ||
105 | def pluralise(singular, plural, qty): | 111 | def pluralise(singular, plural, qty): |
106 | if(qty == 1): | 112 | if qty == 1: |
107 | return singular % qty | 113 | return singular % qty |
108 | else: | 114 | else: |
109 | return plural % qty | 115 | return plural % qty |
@@ -112,6 +118,7 @@ def pluralise(singular, plural, qty): | |||
112 | class InteractConsoleLogFilter(logging.Filter): | 118 | class InteractConsoleLogFilter(logging.Filter): |
113 | def __init__(self, tf): | 119 | def __init__(self, tf): |
114 | self.tf = tf | 120 | self.tf = tf |
121 | super().__init__() | ||
115 | 122 | ||
116 | def filter(self, record): | 123 | def filter(self, record): |
117 | if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")): | 124 | if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")): |
@@ -346,7 +353,7 @@ def print_event_log(event, includelogs, loglines, termfilter): | |||
346 | termfilter.clearFooter() | 353 | termfilter.clearFooter() |
347 | bb.error("Logfile of failure stored in: %s" % logfile) | 354 | bb.error("Logfile of failure stored in: %s" % logfile) |
348 | if includelogs and not event.errprinted: | 355 | if includelogs and not event.errprinted: |
349 | print("Log data follows:") | 356 | bb.plain("Log data follows:") |
350 | f = open(logfile, "r") | 357 | f = open(logfile, "r") |
351 | lines = [] | 358 | lines = [] |
352 | while True: | 359 | while True: |
@@ -359,11 +366,11 @@ def print_event_log(event, includelogs, loglines, termfilter): | |||
359 | if len(lines) > int(loglines): | 366 | if len(lines) > int(loglines): |
360 | lines.pop(0) | 367 | lines.pop(0) |
361 | else: | 368 | else: |
362 | print('| %s' % l) | 369 | bb.plain('| %s' % l) |
363 | f.close() | 370 | f.close() |
364 | if lines: | 371 | if lines: |
365 | for line in lines: | 372 | for line in lines: |
366 | print(line) | 373 | bb.plain(line) |
367 | 374 | ||
368 | def _log_settings_from_server(server, observe_only): | 375 | def _log_settings_from_server(server, observe_only): |
369 | # Get values of variables which control our output | 376 | # Get values of variables which control our output |
@@ -555,13 +562,23 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
555 | } | 562 | } |
556 | }) | 563 | }) |
557 | 564 | ||
558 | bb.utils.mkdirhier(os.path.dirname(consolelogfile)) | 565 | consolelogdirname = os.path.dirname(consolelogfile) |
559 | loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log') | 566 | # `bb.utils.mkdirhier` has this check, but it reports failure using bb.fatal, which logs |
567 | # to the very logger we are trying to set up. | ||
568 | if '${' in str(consolelogdirname): | ||
569 | print( | ||
570 | "FATAL: Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR pollution.".format( | ||
571 | consolelogdirname)) | ||
572 | if '${MACHINE}' in consolelogdirname: | ||
573 | print("HINT: It looks like you forgot to set MACHINE in local.conf.") | ||
574 | |||
575 | bb.utils.mkdirhier(consolelogdirname) | ||
576 | loglink = os.path.join(consolelogdirname, 'console-latest.log') | ||
560 | bb.utils.remove(loglink) | 577 | bb.utils.remove(loglink) |
561 | try: | 578 | try: |
562 | os.symlink(os.path.basename(consolelogfile), loglink) | 579 | os.symlink(os.path.basename(consolelogfile), loglink) |
563 | except OSError: | 580 | except OSError: |
564 | pass | 581 | pass |
565 | 582 | ||
566 | # Add the logging domains specified by the user on the command line | 583 | # Add the logging domains specified by the user on the command line |
567 | for (domainarg, iterator) in groupby(params.debug_domains): | 584 | for (domainarg, iterator) in groupby(params.debug_domains): |
@@ -577,6 +594,8 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
577 | else: | 594 | else: |
578 | log_exec_tty = False | 595 | log_exec_tty = False |
579 | 596 | ||
597 | should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == '' | ||
598 | |||
580 | helper = uihelper.BBUIHelper() | 599 | helper = uihelper.BBUIHelper() |
581 | 600 | ||
582 | # Look for the specially designated handlers which need to be passed to the | 601 | # Look for the specially designated handlers which need to be passed to the |
@@ -640,7 +659,7 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
640 | return_value = 0 | 659 | return_value = 0 |
641 | errors = 0 | 660 | errors = 0 |
642 | warnings = 0 | 661 | warnings = 0 |
643 | taskfailures = [] | 662 | taskfailures = {} |
644 | 663 | ||
645 | printintervaldelta = 10 * 60 # 10 minutes | 664 | printintervaldelta = 10 * 60 # 10 minutes |
646 | printinterval = printintervaldelta | 665 | printinterval = printintervaldelta |
@@ -726,6 +745,8 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
726 | if isinstance(event, bb.build.TaskFailed): | 745 | if isinstance(event, bb.build.TaskFailed): |
727 | return_value = 1 | 746 | return_value = 1 |
728 | print_event_log(event, includelogs, loglines, termfilter) | 747 | print_event_log(event, includelogs, loglines, termfilter) |
748 | k = "{}:{}".format(event._fn, event._task) | ||
749 | taskfailures[k] = event.logfile | ||
729 | if isinstance(event, bb.build.TaskBase): | 750 | if isinstance(event, bb.build.TaskBase): |
730 | logger.info(event._message) | 751 | logger.info(event._message) |
731 | continue | 752 | continue |
@@ -821,7 +842,7 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
821 | 842 | ||
822 | if isinstance(event, bb.runqueue.runQueueTaskFailed): | 843 | if isinstance(event, bb.runqueue.runQueueTaskFailed): |
823 | return_value = 1 | 844 | return_value = 1 |
824 | taskfailures.append(event.taskstring) | 845 | taskfailures.setdefault(event.taskstring) |
825 | logger.error(str(event)) | 846 | logger.error(str(event)) |
826 | continue | 847 | continue |
827 | 848 | ||
@@ -942,11 +963,21 @@ def main(server, eventHandler, params, tf = TerminalFilter): | |||
942 | try: | 963 | try: |
943 | termfilter.clearFooter() | 964 | termfilter.clearFooter() |
944 | summary = "" | 965 | summary = "" |
966 | def format_hyperlink(url, link_text): | ||
967 | if should_print_hyperlinks: | ||
968 | start = f'\033]8;;{url}\033\\' | ||
969 | end = '\033]8;;\033\\' | ||
970 | return f'{start}{link_text}{end}' | ||
971 | return link_text | ||
972 | |||
945 | if taskfailures: | 973 | if taskfailures: |
946 | summary += pluralise("\nSummary: %s task failed:", | 974 | summary += pluralise("\nSummary: %s task failed:", |
947 | "\nSummary: %s tasks failed:", len(taskfailures)) | 975 | "\nSummary: %s tasks failed:", len(taskfailures)) |
948 | for failure in taskfailures: | 976 | for (failure, log_file) in taskfailures.items(): |
949 | summary += "\n %s" % failure | 977 | summary += "\n %s" % failure |
978 | if log_file: | ||
979 | hyperlink = format_hyperlink(f"file://{log_file}", log_file) | ||
980 | summary += "\n log: {}".format(hyperlink) | ||
950 | if warnings: | 981 | if warnings: |
951 | summary += pluralise("\nSummary: There was %s WARNING message.", | 982 | summary += pluralise("\nSummary: There was %s WARNING message.", |
952 | "\nSummary: There were %s WARNING messages.", warnings) | 983 | "\nSummary: There were %s WARNING messages.", warnings) |
diff --git a/bitbake/lib/bb/ui/teamcity.py b/bitbake/lib/bb/ui/teamcity.py index fca46c2874..7eeaab8d63 100644 --- a/bitbake/lib/bb/ui/teamcity.py +++ b/bitbake/lib/bb/ui/teamcity.py | |||
@@ -30,7 +30,6 @@ import bb.build | |||
30 | import bb.command | 30 | import bb.command |
31 | import bb.cooker | 31 | import bb.cooker |
32 | import bb.event | 32 | import bb.event |
33 | import bb.exceptions | ||
34 | import bb.runqueue | 33 | import bb.runqueue |
35 | from bb.ui import uihelper | 34 | from bb.ui import uihelper |
36 | 35 | ||
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter): | |||
102 | details = "" | 101 | details = "" |
103 | if hasattr(record, 'bb_exc_formatted'): | 102 | if hasattr(record, 'bb_exc_formatted'): |
104 | details = ''.join(record.bb_exc_formatted) | 103 | details = ''.join(record.bb_exc_formatted) |
105 | elif hasattr(record, 'bb_exc_info'): | ||
106 | etype, value, tb = record.bb_exc_info | ||
107 | formatted = bb.exceptions.format_exception(etype, value, tb, limit=5) | ||
108 | details = ''.join(formatted) | ||
109 | 104 | ||
110 | if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: | 105 | if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: |
111 | # ERROR gets a separate errorDetails field | 106 | # ERROR gets a separate errorDetails field |
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py index 82913e0da8..e6983bd559 100644 --- a/bitbake/lib/bb/ui/uihelper.py +++ b/bitbake/lib/bb/ui/uihelper.py | |||
@@ -31,7 +31,7 @@ class BBUIHelper: | |||
31 | 31 | ||
32 | if isinstance(event, bb.build.TaskStarted): | 32 | if isinstance(event, bb.build.TaskStarted): |
33 | tid = event._fn + ":" + event._task | 33 | tid = event._fn + ":" + event._task |
34 | if event._mc != "default": | 34 | if event._mc != "": |
35 | self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } | 35 | self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } |
36 | else: | 36 | else: |
37 | self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } | 37 | self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } |
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index ebee65d3dd..a2806fd360 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
@@ -11,11 +11,8 @@ import re, fcntl, os, string, stat, shutil, time | |||
11 | import sys | 11 | import sys |
12 | import errno | 12 | import errno |
13 | import logging | 13 | import logging |
14 | import bb | ||
15 | import bb.msg | ||
16 | import locale | 14 | import locale |
17 | import multiprocessing | 15 | import multiprocessing |
18 | import fcntl | ||
19 | import importlib | 16 | import importlib |
20 | import importlib.machinery | 17 | import importlib.machinery |
21 | import importlib.util | 18 | import importlib.util |
@@ -24,7 +21,6 @@ import subprocess | |||
24 | import glob | 21 | import glob |
25 | import fnmatch | 22 | import fnmatch |
26 | import traceback | 23 | import traceback |
27 | import errno | ||
28 | import signal | 24 | import signal |
29 | import collections | 25 | import collections |
30 | import copy | 26 | import copy |
@@ -36,6 +32,8 @@ import tempfile | |||
36 | from subprocess import getstatusoutput | 32 | from subprocess import getstatusoutput |
37 | from contextlib import contextmanager | 33 | from contextlib import contextmanager |
38 | from ctypes import cdll | 34 | from ctypes import cdll |
35 | import bb | ||
36 | import bb.msg | ||
39 | 37 | ||
40 | logger = logging.getLogger("BitBake.Util") | 38 | logger = logging.getLogger("BitBake.Util") |
41 | python_extensions = importlib.machinery.all_suffixes() | 39 | python_extensions = importlib.machinery.all_suffixes() |
@@ -84,7 +82,16 @@ def explode_version(s): | |||
84 | return r | 82 | return r |
85 | 83 | ||
86 | def split_version(s): | 84 | def split_version(s): |
87 | """Split a version string into its constituent parts (PE, PV, PR)""" | 85 | """Split a version string into its constituent parts (PE, PV, PR). |
86 | |||
87 | Arguments: | ||
88 | |||
89 | - ``s``: version string. The format of the input string should be:: | ||
90 | |||
91 | ${PE}:${PV}-${PR} | ||
92 | |||
93 | Returns a tuple ``(pe, pv, pr)``. | ||
94 | """ | ||
88 | s = s.strip(" <>=") | 95 | s = s.strip(" <>=") |
89 | e = 0 | 96 | e = 0 |
90 | if s.count(':'): | 97 | if s.count(':'): |
@@ -136,16 +143,30 @@ def vercmp(ta, tb): | |||
136 | return r | 143 | return r |
137 | 144 | ||
138 | def vercmp_string(a, b): | 145 | def vercmp_string(a, b): |
139 | """ Split version strings and compare them """ | 146 | """ Split version strings using ``bb.utils.split_version()`` and compare |
147 | them with ``bb.utils.vercmp().`` | ||
148 | |||
149 | Arguments: | ||
150 | |||
151 | - ``a``: left version string operand. | ||
152 | - ``b``: right version string operand. | ||
153 | |||
154 | Returns what ``bb.utils.vercmp()`` returns.""" | ||
140 | ta = split_version(a) | 155 | ta = split_version(a) |
141 | tb = split_version(b) | 156 | tb = split_version(b) |
142 | return vercmp(ta, tb) | 157 | return vercmp(ta, tb) |
143 | 158 | ||
144 | def vercmp_string_op(a, b, op): | 159 | def vercmp_string_op(a, b, op): |
145 | """ | 160 | """ |
146 | Compare two versions and check if the specified comparison operator matches the result of the comparison. | 161 | Takes the return value ``bb.utils.vercmp()`` and returns the operation |
147 | This function is fairly liberal about what operators it will accept since there are a variety of styles | 162 | defined by ``op`` between the return value and 0. |
148 | depending on the context. | 163 | |
164 | Arguments: | ||
165 | |||
166 | - ``a``: left version string operand. | ||
167 | - ``b``: right version string operand. | ||
168 | - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``, | ||
169 | ``>``, ``>>``, ``<``, ``<<`` or ``!=``. | ||
149 | """ | 170 | """ |
150 | res = vercmp_string(a, b) | 171 | res = vercmp_string(a, b) |
151 | if op in ('=', '=='): | 172 | if op in ('=', '=='): |
@@ -165,9 +186,16 @@ def vercmp_string_op(a, b, op): | |||
165 | 186 | ||
166 | def explode_deps(s): | 187 | def explode_deps(s): |
167 | """ | 188 | """ |
168 | Take an RDEPENDS style string of format: | 189 | Takes an RDEPENDS style string of format:: |
169 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 190 | |
170 | and return a list of dependencies. | 191 | DEPEND1 (optional version) DEPEND2 (optional version) ... |
192 | |||
193 | Arguments: | ||
194 | |||
195 | - ``s``: input RDEPENDS style string | ||
196 | |||
197 | Returns a list of dependencies. | ||
198 | |||
171 | Version information is ignored. | 199 | Version information is ignored. |
172 | """ | 200 | """ |
173 | r = [] | 201 | r = [] |
@@ -189,9 +217,17 @@ def explode_deps(s): | |||
189 | 217 | ||
190 | def explode_dep_versions2(s, *, sort=True): | 218 | def explode_dep_versions2(s, *, sort=True): |
191 | """ | 219 | """ |
192 | Take an RDEPENDS style string of format: | 220 | Takes an RDEPENDS style string of format:: |
193 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 221 | |
194 | and return a dictionary of dependencies and versions. | 222 | DEPEND1 (optional version) DEPEND2 (optional version) ... |
223 | |||
224 | Arguments: | ||
225 | |||
226 | - ``s``: input RDEPENDS style string | ||
227 | - ``*``: *Unused*. | ||
228 | - ``sort``: whether to sort the output or not. | ||
229 | |||
230 | Returns a dictionary of dependencies and versions. | ||
195 | """ | 231 | """ |
196 | r = collections.OrderedDict() | 232 | r = collections.OrderedDict() |
197 | l = s.replace(",", "").split() | 233 | l = s.replace(",", "").split() |
@@ -256,10 +292,17 @@ def explode_dep_versions2(s, *, sort=True): | |||
256 | 292 | ||
257 | def explode_dep_versions(s): | 293 | def explode_dep_versions(s): |
258 | """ | 294 | """ |
259 | Take an RDEPENDS style string of format: | 295 | Take an RDEPENDS style string of format:: |
260 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 296 | |
261 | skip null value and items appeared in dependency string multiple times | 297 | DEPEND1 (optional version) DEPEND2 (optional version) ... |
262 | and return a dictionary of dependencies and versions. | 298 | |
299 | Skips null values and items appeared in dependency string multiple times. | ||
300 | |||
301 | Arguments: | ||
302 | |||
303 | - ``s``: input RDEPENDS style string | ||
304 | |||
305 | Returns a dictionary of dependencies and versions. | ||
263 | """ | 306 | """ |
264 | r = explode_dep_versions2(s) | 307 | r = explode_dep_versions2(s) |
265 | for d in r: | 308 | for d in r: |
@@ -273,7 +316,17 @@ def explode_dep_versions(s): | |||
273 | 316 | ||
274 | def join_deps(deps, commasep=True): | 317 | def join_deps(deps, commasep=True): |
275 | """ | 318 | """ |
276 | Take the result from explode_dep_versions and generate a dependency string | 319 | Take a result from ``bb.utils.explode_dep_versions()`` and generate a |
320 | dependency string. | ||
321 | |||
322 | Arguments: | ||
323 | |||
324 | - ``deps``: dictionary of dependencies and versions. | ||
325 | - ``commasep``: makes the return value separated by commas if ``True``, | ||
326 | separated by spaces otherwise. | ||
327 | |||
328 | Returns a comma-separated (space-separated if ``comma-sep`` is ``False``) | ||
329 | string of dependencies and versions. | ||
277 | """ | 330 | """ |
278 | result = [] | 331 | result = [] |
279 | for dep in deps: | 332 | for dep in deps: |
@@ -435,7 +488,11 @@ def better_eval(source, locals, extraglobals = None): | |||
435 | 488 | ||
436 | @contextmanager | 489 | @contextmanager |
437 | def fileslocked(files, *args, **kwargs): | 490 | def fileslocked(files, *args, **kwargs): |
438 | """Context manager for locking and unlocking file locks.""" | 491 | """Context manager for locking and unlocking file locks. Uses |
492 | ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock | ||
493 | files. | ||
494 | |||
495 | No return value.""" | ||
439 | locks = [] | 496 | locks = [] |
440 | if files: | 497 | if files: |
441 | for lockfile in files: | 498 | for lockfile in files: |
@@ -446,19 +503,29 @@ def fileslocked(files, *args, **kwargs): | |||
446 | try: | 503 | try: |
447 | yield | 504 | yield |
448 | finally: | 505 | finally: |
506 | locks.reverse() | ||
449 | for lock in locks: | 507 | for lock in locks: |
450 | bb.utils.unlockfile(lock) | 508 | bb.utils.unlockfile(lock) |
451 | 509 | ||
452 | def lockfile(name, shared=False, retry=True, block=False): | 510 | def lockfile(name, shared=False, retry=True, block=False): |
453 | """ | 511 | """ |
454 | Use the specified file as a lock file, return when the lock has | 512 | Use the specified file (with filename ``name``) as a lock file, return when |
455 | been acquired. Returns a variable to pass to unlockfile(). | 513 | the lock has been acquired. Returns a variable to pass to unlockfile(). |
456 | Parameters: | 514 | |
457 | retry: True to re-try locking if it fails, False otherwise | 515 | Arguments: |
458 | block: True to block until the lock succeeds, False otherwise | 516 | |
517 | - ``shared``: sets the lock as a shared lock instead of an | ||
518 | exclusive lock. | ||
519 | - ``retry``: ``True`` to re-try locking if it fails, ``False`` | ||
520 | otherwise. | ||
521 | - ``block``: ``True`` to block until the lock succeeds, | ||
522 | ``False`` otherwise. | ||
523 | |||
459 | The retry and block parameters are kind of equivalent unless you | 524 | The retry and block parameters are kind of equivalent unless you |
460 | consider the possibility of sending a signal to the process to break | 525 | consider the possibility of sending a signal to the process to break |
461 | out - at which point you want block=True rather than retry=True. | 526 | out - at which point you want block=True rather than retry=True. |
527 | |||
528 | Returns the locked file descriptor in case of success, ``None`` otherwise. | ||
462 | """ | 529 | """ |
463 | basename = os.path.basename(name) | 530 | basename = os.path.basename(name) |
464 | if len(basename) > 255: | 531 | if len(basename) > 255: |
@@ -517,7 +584,13 @@ def lockfile(name, shared=False, retry=True, block=False): | |||
517 | 584 | ||
518 | def unlockfile(lf): | 585 | def unlockfile(lf): |
519 | """ | 586 | """ |
520 | Unlock a file locked using lockfile() | 587 | Unlock a file locked using ``bb.utils.lockfile()``. |
588 | |||
589 | Arguments: | ||
590 | |||
591 | - ``lf``: the locked file descriptor. | ||
592 | |||
593 | No return value. | ||
521 | """ | 594 | """ |
522 | try: | 595 | try: |
523 | # If we had a shared lock, we need to promote to exclusive before | 596 | # If we had a shared lock, we need to promote to exclusive before |
@@ -545,7 +618,11 @@ def _hasher(method, filename): | |||
545 | 618 | ||
546 | def md5_file(filename): | 619 | def md5_file(filename): |
547 | """ | 620 | """ |
548 | Return the hex string representation of the MD5 checksum of filename. | 621 | Arguments: |
622 | |||
623 | - ``filename``: path to the input file. | ||
624 | |||
625 | Returns the hexadecimal string representation of the MD5 checksum of filename. | ||
549 | """ | 626 | """ |
550 | import hashlib | 627 | import hashlib |
551 | try: | 628 | try: |
@@ -557,36 +634,81 @@ def md5_file(filename): | |||
557 | 634 | ||
558 | def sha256_file(filename): | 635 | def sha256_file(filename): |
559 | """ | 636 | """ |
560 | Return the hex string representation of the 256-bit SHA checksum of | 637 | Returns the hexadecimal representation of the 256-bit SHA checksum of |
561 | filename. | 638 | filename. |
639 | |||
640 | Arguments: | ||
641 | |||
642 | - ``filename``: path to the file. | ||
562 | """ | 643 | """ |
563 | import hashlib | 644 | import hashlib |
564 | return _hasher(hashlib.sha256(), filename) | 645 | return _hasher(hashlib.sha256(), filename) |
565 | 646 | ||
566 | def sha1_file(filename): | 647 | def sha1_file(filename): |
567 | """ | 648 | """ |
568 | Return the hex string representation of the SHA1 checksum of the filename | 649 | Returns the hexadecimal representation of the SHA1 checksum of the filename |
650 | |||
651 | Arguments: | ||
652 | |||
653 | - ``filename``: path to the file. | ||
569 | """ | 654 | """ |
570 | import hashlib | 655 | import hashlib |
571 | return _hasher(hashlib.sha1(), filename) | 656 | return _hasher(hashlib.sha1(), filename) |
572 | 657 | ||
573 | def sha384_file(filename): | 658 | def sha384_file(filename): |
574 | """ | 659 | """ |
575 | Return the hex string representation of the SHA384 checksum of the filename | 660 | Returns the hexadecimal representation of the SHA384 checksum of the filename |
661 | |||
662 | Arguments: | ||
663 | |||
664 | - ``filename``: path to the file. | ||
576 | """ | 665 | """ |
577 | import hashlib | 666 | import hashlib |
578 | return _hasher(hashlib.sha384(), filename) | 667 | return _hasher(hashlib.sha384(), filename) |
579 | 668 | ||
580 | def sha512_file(filename): | 669 | def sha512_file(filename): |
581 | """ | 670 | """ |
582 | Return the hex string representation of the SHA512 checksum of the filename | 671 | Returns the hexadecimal representation of the SHA512 checksum of the filename |
672 | |||
673 | Arguments: | ||
674 | |||
675 | - ``filename``: path to the file. | ||
583 | """ | 676 | """ |
584 | import hashlib | 677 | import hashlib |
585 | return _hasher(hashlib.sha512(), filename) | 678 | return _hasher(hashlib.sha512(), filename) |
586 | 679 | ||
680 | def goh1_file(filename): | ||
681 | """ | ||
682 | Returns the hexadecimal string representation of the Go mod h1 checksum of the | ||
683 | filename. The Go mod h1 checksum uses the Go dirhash package. The package | ||
684 | defines hashes over directory trees and is used by go mod for mod files and | ||
685 | zip archives. | ||
686 | |||
687 | Arguments: | ||
688 | |||
689 | - ``filename``: path to the file. | ||
690 | """ | ||
691 | import hashlib | ||
692 | import zipfile | ||
693 | |||
694 | lines = [] | ||
695 | if zipfile.is_zipfile(filename): | ||
696 | with zipfile.ZipFile(filename) as archive: | ||
697 | for fn in sorted(archive.namelist()): | ||
698 | method = hashlib.sha256() | ||
699 | method.update(archive.read(fn)) | ||
700 | hash = method.hexdigest() | ||
701 | lines.append("%s %s\n" % (hash, fn)) | ||
702 | else: | ||
703 | hash = _hasher(hashlib.sha256(), filename) | ||
704 | lines.append("%s go.mod\n" % hash) | ||
705 | method = hashlib.sha256() | ||
706 | method.update("".join(lines).encode('utf-8')) | ||
707 | return method.hexdigest() | ||
708 | |||
587 | def preserved_envvars_exported(): | 709 | def preserved_envvars_exported(): |
588 | """Variables which are taken from the environment and placed in and exported | 710 | """Returns the list of variables which are taken from the environment and |
589 | from the metadata""" | 711 | placed in and exported from the metadata.""" |
590 | return [ | 712 | return [ |
591 | 'BB_TASKHASH', | 713 | 'BB_TASKHASH', |
592 | 'HOME', | 714 | 'HOME', |
@@ -600,7 +722,8 @@ def preserved_envvars_exported(): | |||
600 | ] | 722 | ] |
601 | 723 | ||
602 | def preserved_envvars(): | 724 | def preserved_envvars(): |
603 | """Variables which are taken from the environment and placed in the metadata""" | 725 | """Returns the list of variables which are taken from the environment and |
726 | placed in the metadata.""" | ||
604 | v = [ | 727 | v = [ |
605 | 'BBPATH', | 728 | 'BBPATH', |
606 | 'BB_PRESERVE_ENV', | 729 | 'BB_PRESERVE_ENV', |
@@ -609,7 +732,9 @@ def preserved_envvars(): | |||
609 | return v + preserved_envvars_exported() | 732 | return v + preserved_envvars_exported() |
610 | 733 | ||
611 | def check_system_locale(): | 734 | def check_system_locale(): |
612 | """Make sure the required system locale are available and configured""" | 735 | """Make sure the required system locale are available and configured. |
736 | |||
737 | No return value.""" | ||
613 | default_locale = locale.getlocale(locale.LC_CTYPE) | 738 | default_locale = locale.getlocale(locale.LC_CTYPE) |
614 | 739 | ||
615 | try: | 740 | try: |
@@ -627,6 +752,12 @@ def filter_environment(good_vars): | |||
627 | """ | 752 | """ |
628 | Create a pristine environment for bitbake. This will remove variables that | 753 | Create a pristine environment for bitbake. This will remove variables that |
629 | are not known and may influence the build in a negative way. | 754 | are not known and may influence the build in a negative way. |
755 | |||
756 | Arguments: | ||
757 | |||
758 | - ``good_vars``: list of variable to exclude from the filtering. | ||
759 | |||
760 | No return value. | ||
630 | """ | 761 | """ |
631 | 762 | ||
632 | removed_vars = {} | 763 | removed_vars = {} |
@@ -671,6 +802,8 @@ def clean_environment(): | |||
671 | """ | 802 | """ |
672 | Clean up any spurious environment variables. This will remove any | 803 | Clean up any spurious environment variables. This will remove any |
673 | variables the user hasn't chosen to preserve. | 804 | variables the user hasn't chosen to preserve. |
805 | |||
806 | No return value. | ||
674 | """ | 807 | """ |
675 | if 'BB_PRESERVE_ENV' not in os.environ: | 808 | if 'BB_PRESERVE_ENV' not in os.environ: |
676 | good_vars = approved_variables() | 809 | good_vars = approved_variables() |
@@ -681,6 +814,8 @@ def clean_environment(): | |||
681 | def empty_environment(): | 814 | def empty_environment(): |
682 | """ | 815 | """ |
683 | Remove all variables from the environment. | 816 | Remove all variables from the environment. |
817 | |||
818 | No return value. | ||
684 | """ | 819 | """ |
685 | for s in list(os.environ.keys()): | 820 | for s in list(os.environ.keys()): |
686 | os.unsetenv(s) | 821 | os.unsetenv(s) |
@@ -689,6 +824,12 @@ def empty_environment(): | |||
689 | def build_environment(d): | 824 | def build_environment(d): |
690 | """ | 825 | """ |
691 | Build an environment from all exported variables. | 826 | Build an environment from all exported variables. |
827 | |||
828 | Arguments: | ||
829 | |||
830 | - ``d``: the data store. | ||
831 | |||
832 | No return value. | ||
692 | """ | 833 | """ |
693 | import bb.data | 834 | import bb.data |
694 | for var in bb.data.keys(d): | 835 | for var in bb.data.keys(d): |
@@ -713,7 +854,17 @@ def _check_unsafe_delete_path(path): | |||
713 | return False | 854 | return False |
714 | 855 | ||
715 | def remove(path, recurse=False, ionice=False): | 856 | def remove(path, recurse=False, ionice=False): |
716 | """Equivalent to rm -f or rm -rf""" | 857 | """Equivalent to rm -f or rm -rf. |
858 | |||
859 | Arguments: | ||
860 | |||
861 | - ``path``: path to file/directory to remove. | ||
862 | - ``recurse``: deletes recursively if ``True``. | ||
863 | - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man | ||
864 | ionice``. | ||
865 | |||
866 | No return value. | ||
867 | """ | ||
717 | if not path: | 868 | if not path: |
718 | return | 869 | return |
719 | if recurse: | 870 | if recurse: |
@@ -734,7 +885,17 @@ def remove(path, recurse=False, ionice=False): | |||
734 | raise | 885 | raise |
735 | 886 | ||
736 | def prunedir(topdir, ionice=False): | 887 | def prunedir(topdir, ionice=False): |
737 | """ Delete everything reachable from the directory named in 'topdir'. """ | 888 | """ |
889 | Delete everything reachable from the directory named in ``topdir``. | ||
890 | |||
891 | Arguments: | ||
892 | |||
893 | - ``topdir``: directory path. | ||
894 | - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man | ||
895 | ionice``. | ||
896 | |||
897 | No return value. | ||
898 | """ | ||
738 | # CAUTION: This is dangerous! | 899 | # CAUTION: This is dangerous! |
739 | if _check_unsafe_delete_path(topdir): | 900 | if _check_unsafe_delete_path(topdir): |
740 | raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) | 901 | raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) |
@@ -746,8 +907,15 @@ def prunedir(topdir, ionice=False): | |||
746 | # | 907 | # |
747 | def prune_suffix(var, suffixes, d): | 908 | def prune_suffix(var, suffixes, d): |
748 | """ | 909 | """ |
749 | See if var ends with any of the suffixes listed and | 910 | Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and |
750 | remove it if found | 911 | remove it if found. |
912 | |||
913 | Arguments: | ||
914 | |||
915 | - ``var``: string to check for suffixes. | ||
916 | - ``suffixes``: list of strings representing suffixes to check for. | ||
917 | |||
918 | Returns the string ``var`` without the suffix. | ||
751 | """ | 919 | """ |
752 | for suffix in suffixes: | 920 | for suffix in suffixes: |
753 | if suffix and var.endswith(suffix): | 921 | if suffix and var.endswith(suffix): |
@@ -756,7 +924,13 @@ def prune_suffix(var, suffixes, d): | |||
756 | 924 | ||
757 | def mkdirhier(directory): | 925 | def mkdirhier(directory): |
758 | """Create a directory like 'mkdir -p', but does not complain if | 926 | """Create a directory like 'mkdir -p', but does not complain if |
759 | directory already exists like os.makedirs | 927 | directory already exists like ``os.makedirs()``. |
928 | |||
929 | Arguments: | ||
930 | |||
931 | - ``directory``: path to the directory. | ||
932 | |||
933 | No return value. | ||
760 | """ | 934 | """ |
761 | if '${' in str(directory): | 935 | if '${' in str(directory): |
762 | bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory)) | 936 | bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory)) |
@@ -767,10 +941,24 @@ def mkdirhier(directory): | |||
767 | raise e | 941 | raise e |
768 | 942 | ||
769 | def movefile(src, dest, newmtime = None, sstat = None): | 943 | def movefile(src, dest, newmtime = None, sstat = None): |
770 | """Moves a file from src to dest, preserving all permissions and | 944 | """Moves a file from ``src`` to ``dest``, preserving all permissions and |
771 | attributes; mtime will be preserved even when moving across | 945 | attributes; mtime will be preserved even when moving across |
772 | filesystems. Returns true on success and false on failure. Move is | 946 | filesystems. Returns ``True`` on success and ``False`` on failure. Move is |
773 | atomic. | 947 | atomic. |
948 | |||
949 | Arguments: | ||
950 | |||
951 | - ``src`` -- Source file. | ||
952 | - ``dest`` -- Destination file. | ||
953 | - ``newmtime`` -- new mtime to be passed as float seconds since the epoch. | ||
954 | - ``sstat`` -- os.stat_result to use for the destination file. | ||
955 | |||
956 | Returns an ``os.stat_result`` of the destination file if the | ||
957 | source file is a symbolic link or the ``sstat`` argument represents a | ||
958 | symbolic link - in which case the destination file will also be created as | ||
959 | a symbolic link. | ||
960 | |||
961 | Otherwise, returns ``newmtime`` on success and ``False`` on failure. | ||
774 | """ | 962 | """ |
775 | 963 | ||
776 | #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" | 964 | #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" |
@@ -861,9 +1049,24 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
861 | 1049 | ||
862 | def copyfile(src, dest, newmtime = None, sstat = None): | 1050 | def copyfile(src, dest, newmtime = None, sstat = None): |
863 | """ | 1051 | """ |
864 | Copies a file from src to dest, preserving all permissions and | 1052 | Copies a file from ``src`` to ``dest``, preserving all permissions and |
865 | attributes; mtime will be preserved even when moving across | 1053 | attributes; mtime will be preserved even when moving across |
866 | filesystems. Returns true on success and false on failure. | 1054 | filesystems. |
1055 | |||
1056 | Arguments: | ||
1057 | |||
1058 | - ``src``: Source file. | ||
1059 | - ``dest``: Destination file. | ||
1060 | - ``newmtime``: new mtime to be passed as float seconds since the epoch. | ||
1061 | - ``sstat``: os.stat_result to use for the destination file. | ||
1062 | |||
1063 | Returns an ``os.stat_result`` of the destination file if the | ||
1064 | source file is a symbolic link or the ``sstat`` argument represents a | ||
1065 | symbolic link - in which case the destination file will also be created as | ||
1066 | a symbolic link. | ||
1067 | |||
1068 | Otherwise, returns ``newmtime`` on success and ``False`` on failure. | ||
1069 | |||
867 | """ | 1070 | """ |
868 | #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" | 1071 | #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" |
869 | try: | 1072 | try: |
@@ -941,10 +1144,16 @@ def copyfile(src, dest, newmtime = None, sstat = None): | |||
941 | 1144 | ||
942 | def break_hardlinks(src, sstat = None): | 1145 | def break_hardlinks(src, sstat = None): |
943 | """ | 1146 | """ |
944 | Ensures src is the only hardlink to this file. Other hardlinks, | 1147 | Ensures ``src`` is the only hardlink to this file. Other hardlinks, |
945 | if any, are not affected (other than in their st_nlink value, of | 1148 | if any, are not affected (other than in their st_nlink value, of |
946 | course). Returns true on success and false on failure. | 1149 | course). |
1150 | |||
1151 | Arguments: | ||
947 | 1152 | ||
1153 | - ``src``: source file path. | ||
1154 | - ``sstat``: os.stat_result to use when checking if the file is a link. | ||
1155 | |||
1156 | Returns ``True`` on success and ``False`` on failure. | ||
948 | """ | 1157 | """ |
949 | try: | 1158 | try: |
950 | if not sstat: | 1159 | if not sstat: |
@@ -958,11 +1167,24 @@ def break_hardlinks(src, sstat = None): | |||
958 | 1167 | ||
959 | def which(path, item, direction = 0, history = False, executable=False): | 1168 | def which(path, item, direction = 0, history = False, executable=False): |
960 | """ | 1169 | """ |
961 | Locate `item` in the list of paths `path` (colon separated string like $PATH). | 1170 | Locate ``item`` in the list of paths ``path`` (colon separated string like |
962 | If `direction` is non-zero then the list is reversed. | 1171 | ``$PATH``). |
963 | If `history` is True then the list of candidates also returned as result,history. | 1172 | |
964 | If `executable` is True then the candidate has to be an executable file, | 1173 | Arguments: |
965 | otherwise the candidate simply has to exist. | 1174 | |
1175 | - ``path``: list of colon-separated paths. | ||
1176 | - ``item``: string to search for. | ||
1177 | - ``direction``: if non-zero then the list is reversed. | ||
1178 | - ``history``: if ``True`` then the list of candidates also returned as | ||
1179 | ``result,history`` where ``history`` is the list of previous path | ||
1180 | checked. | ||
1181 | - ``executable``: if ``True`` then the candidate defined by ``path`` has | ||
1182 | to be an executable file, otherwise if ``False`` the candidate simply | ||
1183 | has to exist. | ||
1184 | |||
1185 | Returns the item if found in the list of path, otherwise an empty string. | ||
1186 | If ``history`` is ``True``, return the list of previous path checked in a | ||
1187 | tuple with the found (or not found) item as ``(item, history)``. | ||
966 | """ | 1188 | """ |
967 | 1189 | ||
968 | if executable: | 1190 | if executable: |
@@ -993,6 +1215,8 @@ def which(path, item, direction = 0, history = False, executable=False): | |||
993 | def umask(new_mask): | 1215 | def umask(new_mask): |
994 | """ | 1216 | """ |
995 | Context manager to set the umask to a specific mask, and restore it afterwards. | 1217 | Context manager to set the umask to a specific mask, and restore it afterwards. |
1218 | |||
1219 | No return value. | ||
996 | """ | 1220 | """ |
997 | current_mask = os.umask(new_mask) | 1221 | current_mask = os.umask(new_mask) |
998 | try: | 1222 | try: |
@@ -1003,7 +1227,17 @@ def umask(new_mask): | |||
1003 | def to_boolean(string, default=None): | 1227 | def to_boolean(string, default=None): |
1004 | """ | 1228 | """ |
1005 | Check input string and return boolean value True/False/None | 1229 | Check input string and return boolean value True/False/None |
1006 | depending upon the checks | 1230 | depending upon the checks. |
1231 | |||
1232 | Arguments: | ||
1233 | |||
1234 | - ``string``: input string. | ||
1235 | - ``default``: default return value if the input ``string`` is ``None``, | ||
1236 | ``0``, ``False`` or an empty string. | ||
1237 | |||
1238 | Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False`` | ||
1239 | if the string is one of "n", "no", "0", or "false". Return ``default`` if | ||
1240 | the input ``string`` is ``None``, ``0``, ``False`` or an empty string. | ||
1007 | """ | 1241 | """ |
1008 | if not string: | 1242 | if not string: |
1009 | return default | 1243 | return default |
@@ -1024,18 +1258,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
1024 | 1258 | ||
1025 | Arguments: | 1259 | Arguments: |
1026 | 1260 | ||
1027 | variable -- the variable name. This will be fetched and expanded (using | 1261 | - ``variable``: the variable name. This will be fetched and expanded (using |
1028 | d.getVar(variable)) and then split into a set(). | 1262 | d.getVar(variable)) and then split into a set(). |
1029 | 1263 | - ``checkvalues``: if this is a string it is split on whitespace into a set(), | |
1030 | checkvalues -- if this is a string it is split on whitespace into a set(), | 1264 | otherwise coerced directly into a set(). |
1031 | otherwise coerced directly into a set(). | 1265 | - ``truevalue``: the value to return if checkvalues is a subset of variable. |
1032 | 1266 | - ``falsevalue``: the value to return if variable is empty or if checkvalues is | |
1033 | truevalue -- the value to return if checkvalues is a subset of variable. | 1267 | not a subset of variable. |
1268 | - ``d``: the data store. | ||
1034 | 1269 | ||
1035 | falsevalue -- the value to return if variable is empty or if checkvalues is | 1270 | Returns ``True`` if the variable contains the values specified, ``False`` |
1036 | not a subset of variable. | 1271 | otherwise. |
1037 | |||
1038 | d -- the data store. | ||
1039 | """ | 1272 | """ |
1040 | 1273 | ||
1041 | val = d.getVar(variable) | 1274 | val = d.getVar(variable) |
@@ -1055,18 +1288,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d): | |||
1055 | 1288 | ||
1056 | Arguments: | 1289 | Arguments: |
1057 | 1290 | ||
1058 | variable -- the variable name. This will be fetched and expanded (using | 1291 | - ``variable``: the variable name. This will be fetched and expanded (using |
1059 | d.getVar(variable)) and then split into a set(). | 1292 | d.getVar(variable)) and then split into a set(). |
1060 | 1293 | - ``checkvalues``: if this is a string it is split on whitespace into a set(), | |
1061 | checkvalues -- if this is a string it is split on whitespace into a set(), | 1294 | otherwise coerced directly into a set(). |
1062 | otherwise coerced directly into a set(). | 1295 | - ``truevalue``: the value to return if checkvalues is a subset of variable. |
1063 | 1296 | - ``falsevalue``: the value to return if variable is empty or if checkvalues is | |
1064 | truevalue -- the value to return if checkvalues is a subset of variable. | 1297 | not a subset of variable. |
1298 | - ``d``: the data store. | ||
1065 | 1299 | ||
1066 | falsevalue -- the value to return if variable is empty or if checkvalues is | 1300 | Returns ``True`` if the variable contains any of the values specified, |
1067 | not a subset of variable. | 1301 | ``False`` otherwise. |
1068 | |||
1069 | d -- the data store. | ||
1070 | """ | 1302 | """ |
1071 | val = d.getVar(variable) | 1303 | val = d.getVar(variable) |
1072 | if not val: | 1304 | if not val: |
@@ -1081,17 +1313,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d): | |||
1081 | return falsevalue | 1313 | return falsevalue |
1082 | 1314 | ||
1083 | def filter(variable, checkvalues, d): | 1315 | def filter(variable, checkvalues, d): |
1084 | """Return all words in the variable that are present in the checkvalues. | 1316 | """Return all words in the variable that are present in the ``checkvalues``. |
1085 | 1317 | ||
1086 | Arguments: | 1318 | Arguments: |
1087 | 1319 | ||
1088 | variable -- the variable name. This will be fetched and expanded (using | 1320 | - ``variable``: the variable name. This will be fetched and expanded (using |
1089 | d.getVar(variable)) and then split into a set(). | 1321 | d.getVar(variable)) and then split into a set(). |
1090 | 1322 | - ``checkvalues``: if this is a string it is split on whitespace into a set(), | |
1091 | checkvalues -- if this is a string it is split on whitespace into a set(), | 1323 | otherwise coerced directly into a set(). |
1092 | otherwise coerced directly into a set(). | 1324 | - ``d``: the data store. |
1093 | 1325 | ||
1094 | d -- the data store. | 1326 | Returns a list of string. |
1095 | """ | 1327 | """ |
1096 | 1328 | ||
1097 | val = d.getVar(variable) | 1329 | val = d.getVar(variable) |
@@ -1107,8 +1339,27 @@ def filter(variable, checkvalues, d): | |||
1107 | 1339 | ||
1108 | def get_referenced_vars(start_expr, d): | 1340 | def get_referenced_vars(start_expr, d): |
1109 | """ | 1341 | """ |
1110 | :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level | 1342 | Get the names of the variables referenced in a given expression. |
1111 | are ordered arbitrarily) | 1343 | |
1344 | Arguments: | ||
1345 | |||
1346 | - ``start_expr``: the expression where to look for variables references. | ||
1347 | |||
1348 | For example:: | ||
1349 | |||
1350 | ${VAR_A} string ${VAR_B} | ||
1351 | |||
1352 | Or:: | ||
1353 | |||
1354 | ${@d.getVar('VAR')} | ||
1355 | |||
1356 | If a variables makes references to other variables, the latter are also | ||
1357 | returned recursively. | ||
1358 | |||
1359 | - ``d``: the data store. | ||
1360 | |||
1361 | Returns the names of vars referenced in ``start_expr`` (recursively), in | ||
1362 | quasi-BFS order (variables within the same level are ordered arbitrarily). | ||
1112 | """ | 1363 | """ |
1113 | 1364 | ||
1114 | seen = set() | 1365 | seen = set() |
@@ -1188,7 +1439,9 @@ def multiprocessingpool(*args, **kwargs): | |||
1188 | return multiprocessing.Pool(*args, **kwargs) | 1439 | return multiprocessing.Pool(*args, **kwargs) |
1189 | 1440 | ||
1190 | def exec_flat_python_func(func, *args, **kwargs): | 1441 | def exec_flat_python_func(func, *args, **kwargs): |
1191 | """Execute a flat python function (defined with def funcname(args):...)""" | 1442 | """Execute a flat python function (defined with ``def funcname(args): ...``) |
1443 | |||
1444 | Returns the return value of the function.""" | ||
1192 | # Prepare a small piece of python code which calls the requested function | 1445 | # Prepare a small piece of python code which calls the requested function |
1193 | # To do this we need to prepare two things - a set of variables we can use to pass | 1446 | # To do this we need to prepare two things - a set of variables we can use to pass |
1194 | # the values of arguments into the calling function, and the list of arguments for | 1447 | # the values of arguments into the calling function, and the list of arguments for |
@@ -1214,48 +1467,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1214 | """Edit lines from a recipe or config file and modify one or more | 1467 | """Edit lines from a recipe or config file and modify one or more |
1215 | specified variable values set in the file using a specified callback | 1468 | specified variable values set in the file using a specified callback |
1216 | function. Lines are expected to have trailing newlines. | 1469 | function. Lines are expected to have trailing newlines. |
1217 | Parameters: | 1470 | |
1218 | meta_lines: lines from the file; can be a list or an iterable | 1471 | Arguments: |
1219 | (e.g. file pointer) | 1472 | |
1220 | variables: a list of variable names to look for. Functions | 1473 | - ``meta_lines``: lines from the file; can be a list or an iterable |
1221 | may also be specified, but must be specified with '()' at | 1474 | (e.g. file pointer) |
1222 | the end of the name. Note that the function doesn't have | 1475 | - ``variables``: a list of variable names to look for. Functions |
1223 | any intrinsic understanding of :append, :prepend, :remove, | 1476 | may also be specified, but must be specified with ``()`` at |
1224 | or overrides, so these are considered as part of the name. | 1477 | the end of the name. Note that the function doesn't have |
1225 | These values go into a regular expression, so regular | 1478 | any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``, |
1226 | expression syntax is allowed. | 1479 | or overrides, so these are considered as part of the name. |
1227 | varfunc: callback function called for every variable matching | 1480 | These values go into a regular expression, so regular |
1228 | one of the entries in the variables parameter. The function | 1481 | expression syntax is allowed. |
1229 | should take four arguments: | 1482 | - ``varfunc``: callback function called for every variable matching |
1230 | varname: name of variable matched | 1483 | one of the entries in the variables parameter. |
1231 | origvalue: current value in file | 1484 | |
1232 | op: the operator (e.g. '+=') | 1485 | The function should take four arguments: |
1233 | newlines: list of lines up to this point. You can use | 1486 | |
1234 | this to prepend lines before this variable setting | 1487 | - ``varname``: name of variable matched |
1235 | if you wish. | 1488 | - ``origvalue``: current value in file |
1236 | and should return a four-element tuple: | 1489 | - ``op``: the operator (e.g. ``+=``) |
1237 | newvalue: new value to substitute in, or None to drop | 1490 | - ``newlines``: list of lines up to this point. You can use |
1238 | the variable setting entirely. (If the removal | 1491 | this to prepend lines before this variable setting |
1239 | results in two consecutive blank lines, one of the | 1492 | if you wish. |
1240 | blank lines will also be dropped). | 1493 | |
1241 | newop: the operator to use - if you specify None here, | 1494 | And should return a four-element tuple: |
1242 | the original operation will be used. | 1495 | |
1243 | indent: number of spaces to indent multi-line entries, | 1496 | - ``newvalue``: new value to substitute in, or ``None`` to drop |
1244 | or -1 to indent up to the level of the assignment | 1497 | the variable setting entirely. (If the removal |
1245 | and opening quote, or a string to use as the indent. | 1498 | results in two consecutive blank lines, one of the |
1246 | minbreak: True to allow the first element of a | 1499 | blank lines will also be dropped). |
1247 | multi-line value to continue on the same line as | 1500 | - ``newop``: the operator to use - if you specify ``None`` here, |
1248 | the assignment, False to indent before the first | 1501 | the original operation will be used. |
1249 | element. | 1502 | - ``indent``: number of spaces to indent multi-line entries, |
1250 | To clarify, if you wish not to change the value, then you | 1503 | or ``-1`` to indent up to the level of the assignment |
1251 | would return like this: return origvalue, None, 0, True | 1504 | and opening quote, or a string to use as the indent. |
1252 | match_overrides: True to match items with _overrides on the end, | 1505 | - ``minbreak``: ``True`` to allow the first element of a |
1253 | False otherwise | 1506 | multi-line value to continue on the same line as |
1507 | the assignment, ``False`` to indent before the first | ||
1508 | element. | ||
1509 | |||
1510 | To clarify, if you wish not to change the value, then you | ||
1511 | would return like this:: | ||
1512 | |||
1513 | return origvalue, None, 0, True | ||
1514 | - ``match_overrides``: True to match items with _overrides on the end, | ||
1515 | False otherwise | ||
1516 | |||
1254 | Returns a tuple: | 1517 | Returns a tuple: |
1255 | updated: | 1518 | |
1256 | True if changes were made, False otherwise. | 1519 | - ``updated``: ``True`` if changes were made, ``False`` otherwise. |
1257 | newlines: | 1520 | - ``newlines``: Lines after processing. |
1258 | Lines after processing | ||
1259 | """ | 1521 | """ |
1260 | 1522 | ||
1261 | var_res = {} | 1523 | var_res = {} |
@@ -1399,12 +1661,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1399 | 1661 | ||
1400 | 1662 | ||
1401 | def edit_metadata_file(meta_file, variables, varfunc): | 1663 | def edit_metadata_file(meta_file, variables, varfunc): |
1402 | """Edit a recipe or config file and modify one or more specified | 1664 | """Edit a recipe or configuration file and modify one or more specified |
1403 | variable values set in the file using a specified callback function. | 1665 | variable values set in the file using a specified callback function. |
1404 | The file is only written to if the value(s) actually change. | 1666 | The file is only written to if the value(s) actually change. |
1405 | This is basically the file version of edit_metadata(), see that | 1667 | This is basically the file version of ``bb.utils.edit_metadata()``, see that |
1406 | function's description for parameter/usage information. | 1668 | function's description for parameter/usage information. |
1407 | Returns True if the file was written to, False otherwise. | 1669 | |
1670 | Returns ``True`` if the file was written to, ``False`` otherwise. | ||
1408 | """ | 1671 | """ |
1409 | with open(meta_file, 'r') as f: | 1672 | with open(meta_file, 'r') as f: |
1410 | (updated, newlines) = edit_metadata(f, variables, varfunc) | 1673 | (updated, newlines) = edit_metadata(f, variables, varfunc) |
@@ -1415,23 +1678,25 @@ def edit_metadata_file(meta_file, variables, varfunc): | |||
1415 | 1678 | ||
1416 | 1679 | ||
1417 | def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): | 1680 | def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): |
1418 | """Edit bblayers.conf, adding and/or removing layers | 1681 | """Edit ``bblayers.conf``, adding and/or removing layers. |
1419 | Parameters: | 1682 | |
1420 | bblayers_conf: path to bblayers.conf file to edit | 1683 | Arguments: |
1421 | add: layer path (or list of layer paths) to add; None or empty | 1684 | |
1422 | list to add nothing | 1685 | - ``bblayers_conf``: path to ``bblayers.conf`` file to edit |
1423 | remove: layer path (or list of layer paths) to remove; None or | 1686 | - ``add``: layer path (or list of layer paths) to add; ``None`` or empty |
1424 | empty list to remove nothing | 1687 | list to add nothing |
1425 | edit_cb: optional callback function that will be called after | 1688 | - ``remove``: layer path (or list of layer paths) to remove; ``None`` or |
1426 | processing adds/removes once per existing entry. | 1689 | empty list to remove nothing |
1690 | - ``edit_cb``: optional callback function that will be called | ||
1691 | after processing adds/removes once per existing entry. | ||
1692 | |||
1427 | Returns a tuple: | 1693 | Returns a tuple: |
1428 | notadded: list of layers specified to be added but weren't | ||
1429 | (because they were already in the list) | ||
1430 | notremoved: list of layers that were specified to be removed | ||
1431 | but weren't (because they weren't in the list) | ||
1432 | """ | ||
1433 | 1694 | ||
1434 | import fnmatch | 1695 | - ``notadded``: list of layers specified to be added but weren't |
1696 | (because they were already in the list) | ||
1697 | - ``notremoved``: list of layers that were specified to be removed | ||
1698 | but weren't (because they weren't in the list) | ||
1699 | """ | ||
1435 | 1700 | ||
1436 | def remove_trailing_sep(pth): | 1701 | def remove_trailing_sep(pth): |
1437 | if pth and pth[-1] == os.sep: | 1702 | if pth and pth[-1] == os.sep: |
@@ -1550,7 +1815,22 @@ def get_collection_res(d): | |||
1550 | 1815 | ||
1551 | 1816 | ||
1552 | def get_file_layer(filename, d, collection_res={}): | 1817 | def get_file_layer(filename, d, collection_res={}): |
1553 | """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" | 1818 | """Determine the collection (or layer name, as defined by a layer's |
1819 | ``layer.conf`` file) containing the specified file. | ||
1820 | |||
1821 | Arguments: | ||
1822 | |||
1823 | - ``filename``: the filename to look for. | ||
1824 | - ``d``: the data store. | ||
1825 | - ``collection_res``: dictionary with the layer names as keys and file | ||
1826 | patterns to match as defined with the BBFILE_COLLECTIONS and | ||
1827 | BBFILE_PATTERN variables respectively. The return value of | ||
1828 | ``bb.utils.get_collection_res()`` is the default if this variable is | ||
1829 | not specified. | ||
1830 | |||
1831 | Returns the layer name containing the file. If multiple layers contain the | ||
1832 | file, the last matching layer name from collection_res is returned. | ||
1833 | """ | ||
1554 | if not collection_res: | 1834 | if not collection_res: |
1555 | collection_res = get_collection_res(d) | 1835 | collection_res = get_collection_res(d) |
1556 | 1836 | ||
@@ -1588,7 +1868,13 @@ class PrCtlError(Exception): | |||
1588 | 1868 | ||
1589 | def signal_on_parent_exit(signame): | 1869 | def signal_on_parent_exit(signame): |
1590 | """ | 1870 | """ |
1591 | Trigger signame to be sent when the parent process dies | 1871 | Trigger ``signame`` to be sent when the parent process dies. |
1872 | |||
1873 | Arguments: | ||
1874 | |||
1875 | - ``signame``: name of the signal. See ``man signal``. | ||
1876 | |||
1877 | No return value. | ||
1592 | """ | 1878 | """ |
1593 | signum = getattr(signal, signame) | 1879 | signum = getattr(signal, signame) |
1594 | # http://linux.die.net/man/2/prctl | 1880 | # http://linux.die.net/man/2/prctl |
@@ -1623,7 +1909,7 @@ def ioprio_set(who, cls, value): | |||
1623 | bb.warn("Unable to set IO Prio for arch %s" % _unamearch) | 1909 | bb.warn("Unable to set IO Prio for arch %s" % _unamearch) |
1624 | 1910 | ||
1625 | def set_process_name(name): | 1911 | def set_process_name(name): |
1626 | from ctypes import cdll, byref, create_string_buffer | 1912 | from ctypes import byref, create_string_buffer |
1627 | # This is nice to have for debugging, not essential | 1913 | # This is nice to have for debugging, not essential |
1628 | try: | 1914 | try: |
1629 | libc = cdll.LoadLibrary('libc.so.6') | 1915 | libc = cdll.LoadLibrary('libc.so.6') |
@@ -1675,6 +1961,13 @@ def disable_network(uid=None, gid=None): | |||
1675 | Disable networking in the current process if the kernel supports it, else | 1961 | Disable networking in the current process if the kernel supports it, else |
1676 | just return after logging to debug. To do this we need to create a new user | 1962 | just return after logging to debug. To do this we need to create a new user |
1677 | namespace, then map back to the original uid/gid. | 1963 | namespace, then map back to the original uid/gid. |
1964 | |||
1965 | Arguments: | ||
1966 | |||
1967 | - ``uid``: original user id. | ||
1968 | - ``gid``: original user group id. | ||
1969 | |||
1970 | No return value. | ||
1678 | """ | 1971 | """ |
1679 | libc = ctypes.CDLL('libc.so.6') | 1972 | libc = ctypes.CDLL('libc.so.6') |
1680 | 1973 | ||
@@ -1744,9 +2037,14 @@ class LogCatcher(logging.Handler): | |||
1744 | 2037 | ||
1745 | def is_semver(version): | 2038 | def is_semver(version): |
1746 | """ | 2039 | """ |
1747 | Is the version string following the semver semantic? | 2040 | Arguments: |
1748 | 2041 | ||
1749 | https://semver.org/spec/v2.0.0.html | 2042 | - ``version``: the version string. |
2043 | |||
2044 | Returns ``True`` if the version string follow semantic versioning, ``False`` | ||
2045 | otherwise. | ||
2046 | |||
2047 | See https://semver.org/spec/v2.0.0.html. | ||
1750 | """ | 2048 | """ |
1751 | regex = re.compile( | 2049 | regex = re.compile( |
1752 | r""" | 2050 | r""" |
@@ -1784,6 +2082,8 @@ def rename(src, dst): | |||
1784 | def environment(**envvars): | 2082 | def environment(**envvars): |
1785 | """ | 2083 | """ |
1786 | Context manager to selectively update the environment with the specified mapping. | 2084 | Context manager to selectively update the environment with the specified mapping. |
2085 | |||
2086 | No return value. | ||
1787 | """ | 2087 | """ |
1788 | backup = dict(os.environ) | 2088 | backup = dict(os.environ) |
1789 | try: | 2089 | try: |
@@ -1800,6 +2100,13 @@ def is_local_uid(uid=''): | |||
1800 | """ | 2100 | """ |
1801 | Check whether uid is a local one or not. | 2101 | Check whether uid is a local one or not. |
1802 | Can't use pwd module since it gets all UIDs, not local ones only. | 2102 | Can't use pwd module since it gets all UIDs, not local ones only. |
2103 | |||
2104 | Arguments: | ||
2105 | |||
2106 | - ``uid``: user id. If not specified the user id is determined from | ||
2107 | ``os.getuid()``. | ||
2108 | |||
2109 | Returns ``True`` is the user id is local, ``False`` otherwise. | ||
1803 | """ | 2110 | """ |
1804 | if not uid: | 2111 | if not uid: |
1805 | uid = os.getuid() | 2112 | uid = os.getuid() |
@@ -1814,7 +2121,7 @@ def is_local_uid(uid=''): | |||
1814 | 2121 | ||
1815 | def mkstemp(suffix=None, prefix=None, dir=None, text=False): | 2122 | def mkstemp(suffix=None, prefix=None, dir=None, text=False): |
1816 | """ | 2123 | """ |
1817 | Generates a unique filename, independent of time. | 2124 | Generates a unique temporary file, independent of time. |
1818 | 2125 | ||
1819 | mkstemp() in glibc (at least) generates unique file names based on the | 2126 | mkstemp() in glibc (at least) generates unique file names based on the |
1820 | current system time. When combined with highly parallel builds, and | 2127 | current system time. When combined with highly parallel builds, and |
@@ -1823,6 +2130,18 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False): | |||
1823 | 2130 | ||
1824 | This function adds additional entropy to the file name so that a collision | 2131 | This function adds additional entropy to the file name so that a collision |
1825 | is independent of time and thus extremely unlikely. | 2132 | is independent of time and thus extremely unlikely. |
2133 | |||
2134 | Arguments: | ||
2135 | |||
2136 | - ``suffix``: filename suffix. | ||
2137 | - ``prefix``: filename prefix. | ||
2138 | - ``dir``: directory where the file will be created. | ||
2139 | - ``text``: if ``True``, the file is opened in text mode. | ||
2140 | |||
2141 | Returns a tuple containing: | ||
2142 | |||
2143 | - the file descriptor for the created file | ||
2144 | - the name of the file. | ||
1826 | """ | 2145 | """ |
1827 | entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20)) | 2146 | entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20)) |
1828 | if prefix: | 2147 | if prefix: |
@@ -1833,12 +2152,20 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False): | |||
1833 | 2152 | ||
1834 | def path_is_descendant(descendant, ancestor): | 2153 | def path_is_descendant(descendant, ancestor): |
1835 | """ | 2154 | """ |
1836 | Returns True if the path `descendant` is a descendant of `ancestor` | 2155 | Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor`` |
1837 | (including being equivalent to `ancestor` itself). Otherwise returns False. | 2156 | (including being equivalent to ``ancestor`` itself). Otherwise returns |
2157 | ``False``. | ||
2158 | |||
1838 | Correctly accounts for symlinks, bind mounts, etc. by using | 2159 | Correctly accounts for symlinks, bind mounts, etc. by using |
1839 | os.path.samestat() to compare paths | 2160 | ``os.path.samestat()`` to compare paths. |
2161 | |||
2162 | May raise any exception that ``os.stat()`` raises. | ||
1840 | 2163 | ||
1841 | May raise any exception that os.stat() raises | 2164 | Arguments: |
2165 | |||
2166 | - ``descendant``: path to check for being an ancestor. | ||
2167 | - ``ancestor``: path to the ancestor ``descendant`` will be checked | ||
2168 | against. | ||
1842 | """ | 2169 | """ |
1843 | 2170 | ||
1844 | ancestor_stat = os.stat(ancestor) | 2171 | ancestor_stat = os.stat(ancestor) |
@@ -1857,12 +2184,31 @@ def path_is_descendant(descendant, ancestor): | |||
1857 | # If we don't have a timeout of some kind and a process/thread exits badly (for example | 2184 | # If we don't have a timeout of some kind and a process/thread exits badly (for example |
1858 | # OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better | 2185 | # OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better |
1859 | # we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. | 2186 | # we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. |
2187 | # This function can still deadlock python since it can't signal the other threads to exit | ||
2188 | # (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads | ||
2189 | # to exit. | ||
1860 | @contextmanager | 2190 | @contextmanager |
1861 | def lock_timeout(lock): | 2191 | def lock_timeout(lock): |
1862 | held = lock.acquire(timeout=5*60) | ||
1863 | try: | 2192 | try: |
2193 | s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals()) | ||
2194 | held = lock.acquire(timeout=5*60) | ||
1864 | if not held: | 2195 | if not held: |
2196 | bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack()) | ||
1865 | os._exit(1) | 2197 | os._exit(1) |
1866 | yield held | 2198 | yield held |
1867 | finally: | 2199 | finally: |
1868 | lock.release() | 2200 | lock.release() |
2201 | signal.pthread_sigmask(signal.SIG_SETMASK, s) | ||
2202 | |||
2203 | # A version of lock_timeout without the check that the lock was locked and a shorter timeout | ||
2204 | @contextmanager | ||
2205 | def lock_timeout_nocheck(lock): | ||
2206 | l = False | ||
2207 | try: | ||
2208 | s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals()) | ||
2209 | l = lock.acquire(timeout=10) | ||
2210 | yield l | ||
2211 | finally: | ||
2212 | if l: | ||
2213 | lock.release() | ||
2214 | signal.pthread_sigmask(signal.SIG_SETMASK, s) | ||
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py index a8f2699335..a14f19948e 100644 --- a/bitbake/lib/bblayers/action.py +++ b/bitbake/lib/bblayers/action.py | |||
@@ -50,8 +50,8 @@ class ActionPlugin(LayerPlugin): | |||
50 | 50 | ||
51 | try: | 51 | try: |
52 | notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None) | 52 | notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None) |
53 | self.tinfoil.modified_files() | ||
54 | if not (args.force or notadded): | 53 | if not (args.force or notadded): |
54 | self.tinfoil.modified_files() | ||
55 | try: | 55 | try: |
56 | self.tinfoil.run_command('parseConfiguration') | 56 | self.tinfoil.run_command('parseConfiguration') |
57 | except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): | 57 | except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): |
@@ -83,6 +83,8 @@ class ActionPlugin(LayerPlugin): | |||
83 | layerdir = os.path.abspath(item) | 83 | layerdir = os.path.abspath(item) |
84 | layerdirs.append(layerdir) | 84 | layerdirs.append(layerdir) |
85 | (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs) | 85 | (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs) |
86 | if args.force > 1: | ||
87 | return 0 | ||
86 | self.tinfoil.modified_files() | 88 | self.tinfoil.modified_files() |
87 | if notremoved: | 89 | if notremoved: |
88 | for item in notremoved: | 90 | for item in notremoved: |
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py index bfc18a7593..eb7cb465b4 100644 --- a/bitbake/lib/bblayers/query.py +++ b/bitbake/lib/bblayers/query.py | |||
@@ -142,10 +142,10 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
142 | # Ensure we list skipped recipes | 142 | # Ensure we list skipped recipes |
143 | # We are largely guessing about PN, PV and the preferred version here, | 143 | # We are largely guessing about PN, PV and the preferred version here, |
144 | # but we have no choice since skipped recipes are not fully parsed | 144 | # but we have no choice since skipped recipes are not fully parsed |
145 | skiplist = list(self.tinfoil.cooker.skiplist.keys()) | 145 | skiplist = list(self.tinfoil.cooker.skiplist_by_mc[mc].keys()) |
146 | mcspec = 'mc:%s:' % mc | 146 | |
147 | if mc: | 147 | if mc: |
148 | skiplist = [s[len(mcspec):] for s in skiplist if s.startswith(mcspec)] | 148 | skiplist = [s.removeprefix(f'mc:{mc}:') for s in skiplist] |
149 | 149 | ||
150 | for fn in skiplist: | 150 | for fn in skiplist: |
151 | recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_') | 151 | recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_') |
@@ -162,7 +162,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. | |||
162 | def print_item(f, pn, ver, layer, ispref): | 162 | def print_item(f, pn, ver, layer, ispref): |
163 | if not selected_layer or layer == selected_layer: | 163 | if not selected_layer or layer == selected_layer: |
164 | if not bare and f in skiplist: | 164 | if not bare and f in skiplist: |
165 | skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist[f].skipreason | 165 | skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist_by_mc[mc][f].skipreason |
166 | else: | 166 | else: |
167 | skipped = '' | 167 | skipped = '' |
168 | if show_filenames: | 168 | if show_filenames: |
@@ -301,7 +301,7 @@ Lists recipes with the bbappends that apply to them as subitems. | |||
301 | if self.show_appends_for_pn(pn, cooker_data, args.mc): | 301 | if self.show_appends_for_pn(pn, cooker_data, args.mc): |
302 | appends = True | 302 | appends = True |
303 | 303 | ||
304 | if not args.pnspec and self.show_appends_for_skipped(): | 304 | if not args.pnspec and self.show_appends_for_skipped(args.mc): |
305 | appends = True | 305 | appends = True |
306 | 306 | ||
307 | if not appends: | 307 | if not appends: |
@@ -317,9 +317,9 @@ Lists recipes with the bbappends that apply to them as subitems. | |||
317 | 317 | ||
318 | return self.show_appends_output(filenames, best_filename) | 318 | return self.show_appends_output(filenames, best_filename) |
319 | 319 | ||
320 | def show_appends_for_skipped(self): | 320 | def show_appends_for_skipped(self, mc): |
321 | filenames = [os.path.basename(f) | 321 | filenames = [os.path.basename(f) |
322 | for f in self.tinfoil.cooker.skiplist.keys()] | 322 | for f in self.tinfoil.cooker.skiplist_by_mc[mc].keys()] |
323 | return self.show_appends_output(filenames, None, " (skipped)") | 323 | return self.show_appends_output(filenames, None, " (skipped)") |
324 | 324 | ||
325 | def show_appends_output(self, filenames, best_filename, name_suffix = ''): | 325 | def show_appends_output(self, filenames, best_filename, name_suffix = ''): |
diff --git a/bitbake/lib/bs4/AUTHORS b/bitbake/lib/bs4/AUTHORS new file mode 100644 index 0000000000..1f14fe07de --- /dev/null +++ b/bitbake/lib/bs4/AUTHORS | |||
@@ -0,0 +1,49 @@ | |||
1 | Behold, mortal, the origins of Beautiful Soup... | ||
2 | ================================================ | ||
3 | |||
4 | Leonard Richardson is the primary maintainer. | ||
5 | |||
6 | Aaron DeVore and Isaac Muse have made significant contributions to the | ||
7 | code base. | ||
8 | |||
9 | Mark Pilgrim provided the encoding detection code that forms the base | ||
10 | of UnicodeDammit. | ||
11 | |||
12 | Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful | ||
13 | Soup 4 working under Python 3. | ||
14 | |||
15 | Simon Willison wrote soupselect, which was used to make Beautiful Soup | ||
16 | support CSS selectors. Isaac Muse wrote SoupSieve, which made it | ||
17 | possible to _remove_ the CSS selector code from Beautiful Soup. | ||
18 | |||
19 | Sam Ruby helped with a lot of edge cases. | ||
20 | |||
21 | Jonathan Ellis was awarded the prestigious Beau Potage D'Or for his | ||
22 | work in solving the nestable tags conundrum. | ||
23 | |||
24 | An incomplete list of people have contributed patches to Beautiful | ||
25 | Soup: | ||
26 | |||
27 | Istvan Albert, Andrew Lin, Anthony Baxter, Oliver Beattie, Andrew | ||
28 | Boyko, Tony Chang, Francisco Canas, "Delong", Zephyr Fang, Fuzzy, | ||
29 | Roman Gaufman, Yoni Gilad, Richie Hindle, Toshihiro Kamiya, Peteris | ||
30 | Krumins, Kent Johnson, Marek Kapolka, Andreas Kostyrka, Roel Kramer, | ||
31 | Ben Last, Robert Leftwich, Stefaan Lippens, "liquider", Staffan | ||
32 | Malmgren, Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", | ||
33 | Ed Oskiewicz, Martijn Peters, Greg Phillips, Giles Radford, Stefano | ||
34 | Revera, Arthur Rudolph, Marko Samastur, James Salter, Jouni Seppänen, | ||
35 | Alexander Schmolck, Tim Shirley, Geoffrey Sneddon, Ville Skyttä, | ||
36 | "Vikas", Jens Svalgaard, Andy Theyers, Eric Weiser, Glyn Webster, John | ||
37 | Wiseman, Paul Wright, Danny Yoo | ||
38 | |||
39 | An incomplete list of people who made suggestions or found bugs or | ||
40 | found ways to break Beautiful Soup: | ||
41 | |||
42 | Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel, | ||
43 | Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes, | ||
44 | Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams, | ||
45 | warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison, | ||
46 | Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed | ||
47 | Summers, Dennis Sutch, Chris Smith, Aaron Swartz, Stuart | ||
48 | Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de | ||
49 | Sousa Rocha, Yichun Wei, Per Vognsen | ||
diff --git a/bitbake/lib/bs4/AUTHORS.txt b/bitbake/lib/bs4/AUTHORS.txt deleted file mode 100644 index 2ac8fcc8cc..0000000000 --- a/bitbake/lib/bs4/AUTHORS.txt +++ /dev/null | |||
@@ -1,43 +0,0 @@ | |||
1 | Behold, mortal, the origins of Beautiful Soup... | ||
2 | ================================================ | ||
3 | |||
4 | Leonard Richardson is the primary programmer. | ||
5 | |||
6 | Aaron DeVore is awesome. | ||
7 | |||
8 | Mark Pilgrim provided the encoding detection code that forms the base | ||
9 | of UnicodeDammit. | ||
10 | |||
11 | Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful | ||
12 | Soup 4 working under Python 3. | ||
13 | |||
14 | Simon Willison wrote soupselect, which was used to make Beautiful Soup | ||
15 | support CSS selectors. | ||
16 | |||
17 | Sam Ruby helped with a lot of edge cases. | ||
18 | |||
19 | Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his | ||
20 | work in solving the nestable tags conundrum. | ||
21 | |||
22 | An incomplete list of people have contributed patches to Beautiful | ||
23 | Soup: | ||
24 | |||
25 | Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang, | ||
26 | Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris | ||
27 | Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren, | ||
28 | Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed | ||
29 | Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko | ||
30 | Samastur, Jouni Seppänen, Alexander Schmolck, Andy Theyers, Glyn | ||
31 | Webster, Paul Wright, Danny Yoo | ||
32 | |||
33 | An incomplete list of people who made suggestions or found bugs or | ||
34 | found ways to break Beautiful Soup: | ||
35 | |||
36 | Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel, | ||
37 | Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes, | ||
38 | Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams, | ||
39 | warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison, | ||
40 | Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed | ||
41 | Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart | ||
42 | Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de | ||
43 | Sousa Rocha, Yichun Wei, Per Vognsen | ||
diff --git a/bitbake/lib/bs4/NEWS.txt b/bitbake/lib/bs4/CHANGELOG index 88a60a2458..2701446a6d 100644 --- a/bitbake/lib/bs4/NEWS.txt +++ b/bitbake/lib/bs4/CHANGELOG | |||
@@ -1,3 +1,776 @@ | |||
1 | = 4.12.3 (20240117) | ||
2 | |||
3 | * The Beautiful Soup documentation now has a Spanish translation, thanks | ||
4 | to Carlos Romero. Delong Wang's Chinese translation has been updated | ||
5 | to cover Beautiful Soup 4.12.0. | ||
6 | |||
7 | * Fixed a regression such that if you set .hidden on a tag, the tag | ||
8 | becomes invisible but its contents are still visible. User manipulation | ||
9 | of .hidden is not a documented or supported feature, so don't do this, | ||
10 | but it wasn't too difficult to keep the old behavior working. | ||
11 | |||
12 | * Fixed a case found by Mengyuhan where html.parser giving up on | ||
13 | markup would result in an AssertionError instead of a | ||
14 | ParserRejectedMarkup exception. | ||
15 | |||
16 | * Added the correct stacklevel to instances of the XMLParsedAsHTMLWarning. | ||
17 | [bug=2034451] | ||
18 | |||
19 | * Corrected the syntax of the license definition in pyproject.toml. Patch | ||
20 | by Louis Maddox. [bug=2032848] | ||
21 | |||
22 | * Corrected a typo in a test that was causing test failures when run against | ||
23 | libxml2 2.12.1. [bug=2045481] | ||
24 | |||
25 | = 4.12.2 (20230407) | ||
26 | |||
27 | * Fixed an unhandled exception in BeautifulSoup.decode_contents | ||
28 | and methods that call it. [bug=2015545] | ||
29 | |||
30 | = 4.12.1 (20230405) | ||
31 | |||
32 | NOTE: the following things are likely to be dropped in the next | ||
33 | feature release of Beautiful Soup: | ||
34 | |||
35 | Official support for Python 3.6. | ||
36 | Inclusion of unit tests and test data in the wheel file. | ||
37 | Two scripts: demonstrate_parser_differences.py and test-all-versions. | ||
38 | |||
39 | Changes: | ||
40 | |||
41 | * This version of Beautiful Soup replaces setup.py and setup.cfg | ||
42 | with pyproject.toml. Beautiful Soup now uses tox as its test backend | ||
43 | and hatch to do builds. | ||
44 | |||
45 | * The main functional improvement in this version is a nonrecursive technique | ||
46 | for regenerating a tree. This technique is used to avoid situations where, | ||
47 | in previous versions, doing something to a very deeply nested tree | ||
48 | would overflow the Python interpreter stack: | ||
49 | |||
50 | 1. Outputting a tree as a string, e.g. with | ||
51 | BeautifulSoup.encode() [bug=1471755] | ||
52 | |||
53 | 2. Making copies of trees (copy.copy() and | ||
54 | copy.deepcopy() from the Python standard library). [bug=1709837] | ||
55 | |||
56 | 3. Pickling a BeautifulSoup object. (Note that pickling a Tag | ||
57 | object can still cause an overflow.) | ||
58 | |||
59 | * Making a copy of a BeautifulSoup object no longer parses the | ||
60 | document again, which should improve performance significantly. | ||
61 | |||
62 | * When a BeautifulSoup object is unpickled, Beautiful Soup now | ||
63 | tries to associate an appropriate TreeBuilder object with it. | ||
64 | |||
65 | * Tag.prettify() will now consistently end prettified markup with | ||
66 | a newline. | ||
67 | |||
68 | * Added unit tests for fuzz test cases created by third | ||
69 | parties. Some of these tests are skipped since they point | ||
70 | to problems outside of Beautiful Soup, but this change | ||
71 | puts them all in one convenient place. | ||
72 | |||
73 | * PageElement now implements the known_xml attribute. (This was technically | ||
74 | a bug, but it shouldn't be an issue in normal use.) [bug=2007895] | ||
75 | |||
76 | * The demonstrate_parser_differences.py script was still written in | ||
77 | Python 2. I've converted it to Python 3, but since no one has | ||
78 | mentioned this over the years, it's a sign that no one uses this | ||
79 | script and it's not serving its purpose. | ||
80 | |||
81 | = 4.12.0 (20230320) | ||
82 | |||
83 | * Introduced the .css property, which centralizes all access to | ||
84 | the Soup Sieve API. This allows Beautiful Soup to give direct | ||
85 | access to as much of Soup Sieve that makes sense, without cluttering | ||
86 | the BeautifulSoup and Tag classes with a lot of new methods. | ||
87 | |||
88 | This does mean one addition to the BeautifulSoup and Tag classes | ||
89 | (the .css property itself), so this might be a breaking change if you | ||
90 | happen to use Beautiful Soup to parse XML that includes a tag called | ||
91 | <css>. In particular, code like this will stop working in 4.12.0: | ||
92 | |||
93 | soup.css['id'] | ||
94 | |||
95 | Code like this will work just as before: | ||
96 | |||
97 | soup.find_one('css')['id'] | ||
98 | |||
99 | The Soup Sieve methods supported through the .css property are | ||
100 | select(), select_one(), iselect(), closest(), match(), filter(), | ||
101 | escape(), and compile(). The BeautifulSoup and Tag classes still | ||
102 | support the select() and select_one() methods; they have not been | ||
103 | deprecated, but they have been demoted to convenience methods. | ||
104 | |||
105 | [bug=2003677] | ||
106 | |||
107 | * When the html.parser parser decides it can't parse a document, Beautiful | ||
108 | Soup now consistently propagates this fact by raising a | ||
109 | ParserRejectedMarkup error. [bug=2007343] | ||
110 | |||
111 | * Removed some error checking code from diagnose(), which is redundant with | ||
112 | similar (but more Pythonic) code in the BeautifulSoup constructor. | ||
113 | [bug=2007344] | ||
114 | |||
115 | * Added intersphinx references to the documentation so that other | ||
116 | projects have a target to point to when they reference Beautiful | ||
117 | Soup classes. [bug=1453370] | ||
118 | |||
119 | = 4.11.2 (20230131) | ||
120 | |||
121 | * Fixed test failures caused by nondeterministic behavior of | ||
122 | UnicodeDammit's character detection, depending on the platform setup. | ||
123 | [bug=1973072] | ||
124 | |||
125 | * Fixed another crash when overriding multi_valued_attributes and using the | ||
126 | html5lib parser. [bug=1948488] | ||
127 | |||
128 | * The HTMLFormatter and XMLFormatter constructors no longer return a | ||
129 | value. [bug=1992693] | ||
130 | |||
131 | * Tag.interesting_string_types is now propagated when a tag is | ||
132 | copied. [bug=1990400] | ||
133 | |||
134 | * Warnings now do their best to provide an appropriate stacklevel, | ||
135 | improving the usefulness of the message. [bug=1978744] | ||
136 | |||
137 | * Passing a Tag's .contents into PageElement.extend() now works the | ||
138 | same way as passing the Tag itself. | ||
139 | |||
140 | * Soup Sieve tests will be skipped if the library is not installed. | ||
141 | |||
142 | = 4.11.1 (20220408) | ||
143 | |||
144 | This release was done to ensure that the unit tests are packaged along | ||
145 | with the released source. There are no functionality changes in this | ||
146 | release, but there are a few other packaging changes: | ||
147 | |||
148 | * The Japanese and Korean translations of the documentation are included. | ||
149 | * The changelog is now packaged as CHANGELOG, and the license file is | ||
150 | packaged as LICENSE. NEWS.txt and COPYING.txt are still present, | ||
151 | but may be removed in the future. | ||
152 | * TODO.txt is no longer packaged, since a TODO is not relevant for released | ||
153 | code. | ||
154 | |||
155 | = 4.11.0 (20220407) | ||
156 | |||
157 | * Ported unit tests to use pytest. | ||
158 | |||
159 | * Added special string classes, RubyParenthesisString and RubyTextString, | ||
160 | to make it possible to treat ruby text specially in get_text() calls. | ||
161 | [bug=1941980] | ||
162 | |||
163 | * It's now possible to customize the way output is indented by | ||
164 | providing a value for the 'indent' argument to the Formatter | ||
165 | constructor. The 'indent' argument works very similarly to the | ||
166 | argument of the same name in the Python standard library's | ||
167 | json.dump() function. [bug=1955497] | ||
168 | |||
169 | * If the charset-normalizer Python module | ||
170 | (https://pypi.org/project/charset-normalizer/) is installed, Beautiful | ||
171 | Soup will use it to detect the character sets of incoming documents. | ||
172 | This is also the module used by newer versions of the Requests library. | ||
173 | For the sake of backwards compatibility, chardet and cchardet both take | ||
174 | precedence if installed. [bug=1955346] | ||
175 | |||
176 | * Added a workaround for an lxml bug | ||
177 | (https://bugs.launchpad.net/lxml/+bug/1948551) that causes | ||
178 | problems when parsing a Unicode string beginning with BYTE ORDER MARK. | ||
179 | [bug=1947768] | ||
180 | |||
181 | * Issue a warning when an HTML parser is used to parse a document that | ||
182 | looks like XML but not XHTML. [bug=1939121] | ||
183 | |||
184 | * Do a better job of keeping track of namespaces as an XML document is | ||
185 | parsed, so that CSS selectors that use namespaces will do the right | ||
186 | thing more often. [bug=1946243] | ||
187 | |||
188 | * Some time ago, the misleadingly named "text" argument to find-type | ||
189 | methods was renamed to the more accurate "string." But this supposed | ||
190 | "renaming" didn't make it into important places like the method | ||
191 | signatures or the docstrings. That's corrected in this | ||
192 | version. "text" still works, but will give a DeprecationWarning. | ||
193 | [bug=1947038] | ||
194 | |||
195 | * Fixed a crash when pickling a BeautifulSoup object that has no | ||
196 | tree builder. [bug=1934003] | ||
197 | |||
198 | * Fixed a crash when overriding multi_valued_attributes and using the | ||
199 | html5lib parser. [bug=1948488] | ||
200 | |||
201 | * Standardized the wording of the MarkupResemblesLocatorWarning | ||
202 | warnings to omit untrusted input and make the warnings less | ||
203 | judgmental about what you ought to be doing. [bug=1955450] | ||
204 | |||
205 | * Removed support for the iconv_codec library, which doesn't seem | ||
206 | to exist anymore and was never put up on PyPI. (The closest | ||
207 | replacement on PyPI, iconv_codecs, is GPL-licensed, so we can't use | ||
208 | it--it's also quite old.) | ||
209 | |||
210 | = 4.10.0 (20210907) | ||
211 | |||
212 | * This is the first release of Beautiful Soup to only support Python | ||
213 | 3. I dropped Python 2 support to maintain support for newer versions | ||
214 | (58 and up) of setuptools. See: | ||
215 | https://github.com/pypa/setuptools/issues/2769 [bug=1942919] | ||
216 | |||
217 | * The behavior of methods like .get_text() and .strings now differs | ||
218 | depending on the type of tag. The change is visible with HTML tags | ||
219 | like <script>, <style>, and <template>. Starting in 4.9.0, methods | ||
220 | like get_text() returned no results on such tags, because the | ||
221 | contents of those tags are not considered 'text' within the document | ||
222 | as a whole. | ||
223 | |||
224 | But a user who calls script.get_text() is working from a different | ||
225 | definition of 'text' than a user who calls div.get_text()--otherwise | ||
226 | there would be no need to call script.get_text() at all. In 4.10.0, | ||
227 | the contents of (e.g.) a <script> tag are considered 'text' during a | ||
228 | get_text() call on the tag itself, but not considered 'text' during | ||
229 | a get_text() call on the tag's parent. | ||
230 | |||
231 | Because of this change, calling get_text() on each child of a tag | ||
232 | may now return a different result than calling get_text() on the tag | ||
233 | itself. That's because different tags now have different | ||
234 | understandings of what counts as 'text'. [bug=1906226] [bug=1868861] | ||
235 | |||
236 | * NavigableString and its subclasses now implement the get_text() | ||
237 | method, as well as the properties .strings and | ||
238 | .stripped_strings. These methods will either return the string | ||
239 | itself, or nothing, so the only reason to use this is when iterating | ||
240 | over a list of mixed Tag and NavigableString objects. [bug=1904309] | ||
241 | |||
242 | * The 'html5' formatter now treats attributes whose values are the | ||
243 | empty string as HTML boolean attributes. Previously (and in other | ||
244 | formatters), an attribute value must be set as None to be treated as | ||
245 | a boolean attribute. In a future release, I plan to also give this | ||
246 | behavior to the 'html' formatter. Patch by Isaac Muse. [bug=1915424] | ||
247 | |||
248 | * The 'replace_with()' method now takes a variable number of arguments, | ||
249 | and can be used to replace a single element with a sequence of elements. | ||
250 | Patch by Bill Chandos. [rev=605] | ||
251 | |||
252 | * Corrected output when the namespace prefix associated with a | ||
253 | namespaced attribute is the empty string, as opposed to | ||
254 | None. [bug=1915583] | ||
255 | |||
256 | * Performance improvement when processing tags that speeds up overall | ||
257 | tree construction by 2%. Patch by Morotti. [bug=1899358] | ||
258 | |||
259 | * Corrected the use of special string container classes in cases when a | ||
260 | single tag may contain strings with different containers; such as | ||
261 | the <template> tag, which may contain both TemplateString objects | ||
262 | and Comment objects. [bug=1913406] | ||
263 | |||
264 | * The html.parser tree builder can now handle named entities | ||
265 | found in the HTML5 spec in much the same way that the html5lib | ||
266 | tree builder does. Note that the lxml HTML tree builder doesn't handle | ||
267 | named entities this way. [bug=1924908] | ||
268 | |||
269 | * Added a second way to pass specify encodings to UnicodeDammit and | ||
270 | EncodingDetector, based on the order of precedence defined in the | ||
271 | HTML5 spec, starting at: | ||
272 | https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding | ||
273 | |||
274 | Encodings in 'known_definite_encodings' are tried first, then | ||
275 | byte-order-mark sniffing is run, then encodings in 'user_encodings' | ||
276 | are tried. The old argument, 'override_encodings', is now a | ||
277 | deprecated alias for 'known_definite_encodings'. | ||
278 | |||
279 | This changes the default behavior of the html.parser and lxml tree | ||
280 | builders, in a way that may slightly improve encoding | ||
281 | detection but will probably have no effect. [bug=1889014] | ||
282 | |||
283 | * Improve the warning issued when a directory name (as opposed to | ||
284 | the name of a regular file) is passed as markup into the BeautifulSoup | ||
285 | constructor. [bug=1913628] | ||
286 | |||
287 | = 4.9.3 (20201003) | ||
288 | |||
289 | This is the final release of Beautiful Soup to support Python | ||
290 | 2. Beautiful Soup's official support for Python 2 ended on 01 January, | ||
291 | 2021. In the Launchpad Git repository, the final revision to support | ||
292 | Python 2 was revision 70f546b1e689a70e2f103795efce6d261a3dadf7; it is | ||
293 | tagged as "python2". | ||
294 | |||
295 | * Implemented a significant performance optimization to the process of | ||
296 | searching the parse tree. Patch by Morotti. [bug=1898212] | ||
297 | |||
298 | = 4.9.2 (20200926) | ||
299 | |||
300 | * Fixed a bug that caused too many tags to be popped from the tag | ||
301 | stack during tree building, when encountering a closing tag that had | ||
302 | no matching opening tag. [bug=1880420] | ||
303 | |||
304 | * Fixed a bug that inconsistently moved elements over when passing | ||
305 | a Tag, rather than a list, into Tag.extend(). [bug=1885710] | ||
306 | |||
307 | * Specify the soupsieve dependency in a way that complies with | ||
308 | PEP 508. Patch by Mike Nerone. [bug=1893696] | ||
309 | |||
310 | * Change the signatures for BeautifulSoup.insert_before and insert_after | ||
311 | (which are not implemented) to match PageElement.insert_before and | ||
312 | insert_after, quieting warnings in some IDEs. [bug=1897120] | ||
313 | |||
314 | = 4.9.1 (20200517) | ||
315 | |||
316 | * Added a keyword argument 'on_duplicate_attribute' to the | ||
317 | BeautifulSoupHTMLParser constructor (used by the html.parser tree | ||
318 | builder) which lets you customize the handling of markup that | ||
319 | contains the same attribute more than once, as in: | ||
320 | <a href="url1" href="url2"> [bug=1878209] | ||
321 | |||
322 | * Added a distinct subclass, GuessedAtParserWarning, for the warning | ||
323 | issued when BeautifulSoup is instantiated without a parser being | ||
324 | specified. [bug=1873787] | ||
325 | |||
326 | * Added a distinct subclass, MarkupResemblesLocatorWarning, for the | ||
327 | warning issued when BeautifulSoup is instantiated with 'markup' that | ||
328 | actually seems to be a URL or the path to a file on | ||
329 | disk. [bug=1873787] | ||
330 | |||
331 | * The new NavigableString subclasses (Stylesheet, Script, and | ||
332 | TemplateString) can now be imported directly from the bs4 package. | ||
333 | |||
334 | * If you encode a document with a Python-specific encoding like | ||
335 | 'unicode_escape', that encoding is no longer mentioned in the final | ||
336 | XML or HTML document. Instead, encoding information is omitted or | ||
337 | left blank. [bug=1874955] | ||
338 | |||
339 | * Fixed test failures when run against soupselect 2.0. Patch by Tomáš | ||
340 | Chvátal. [bug=1872279] | ||
341 | |||
342 | = 4.9.0 (20200405) | ||
343 | |||
344 | * Added PageElement.decomposed, a new property which lets you | ||
345 | check whether you've already called decompose() on a Tag or | ||
346 | NavigableString. | ||
347 | |||
348 | * Embedded CSS and Javascript is now stored in distinct Stylesheet and | ||
349 | Script tags, which are ignored by methods like get_text() since most | ||
350 | people don't consider this sort of content to be 'text'. This | ||
351 | feature is not supported by the html5lib treebuilder. [bug=1868861] | ||
352 | |||
353 | * Added a Russian translation by 'authoress' to the repository. | ||
354 | |||
355 | * Fixed an unhandled exception when formatting a Tag that had been | ||
356 | decomposed.[bug=1857767] | ||
357 | |||
358 | * Fixed a bug that happened when passing a Unicode filename containing | ||
359 | non-ASCII characters as markup into Beautiful Soup, on a system that | ||
360 | allows Unicode filenames. [bug=1866717] | ||
361 | |||
362 | * Added a performance optimization to PageElement.extract(). Patch by | ||
363 | Arthur Darcet. | ||
364 | |||
365 | = 4.8.2 (20191224) | ||
366 | |||
367 | * Added Python docstrings to all public methods of the most commonly | ||
368 | used classes. | ||
369 | |||
370 | * Added a Chinese translation by Deron Wang and a Brazilian Portuguese | ||
371 | translation by Cezar Peixeiro to the repository. | ||
372 | |||
373 | * Fixed two deprecation warnings. Patches by Colin | ||
374 | Watson and Nicholas Neumann. [bug=1847592] [bug=1855301] | ||
375 | |||
376 | * The html.parser tree builder now correctly handles DOCTYPEs that are | ||
377 | not uppercase. [bug=1848401] | ||
378 | |||
379 | * PageElement.select() now returns a ResultSet rather than a regular | ||
380 | list, making it consistent with methods like find_all(). | ||
381 | |||
382 | = 4.8.1 (20191006) | ||
383 | |||
384 | * When the html.parser or html5lib parsers are in use, Beautiful Soup | ||
385 | will, by default, record the position in the original document where | ||
386 | each tag was encountered. This includes line number (Tag.sourceline) | ||
387 | and position within a line (Tag.sourcepos). Based on code by Chris | ||
388 | Mayo. [bug=1742921] | ||
389 | |||
390 | * When instantiating a BeautifulSoup object, it's now possible to | ||
391 | provide a dictionary ('element_classes') of the classes you'd like to be | ||
392 | instantiated instead of Tag, NavigableString, etc. | ||
393 | |||
394 | * Fixed the definition of the default XML namespace when using | ||
395 | lxml 4.4. Patch by Isaac Muse. [bug=1840141] | ||
396 | |||
397 | * Fixed a crash when pretty-printing tags that were not created | ||
398 | during initial parsing. [bug=1838903] | ||
399 | |||
400 | * Copying a Tag preserves information that was originally obtained from | ||
401 | the TreeBuilder used to build the original Tag. [bug=1838903] | ||
402 | |||
403 | * Raise an explanatory exception when the underlying parser | ||
404 | completely rejects the incoming markup. [bug=1838877] | ||
405 | |||
406 | * Avoid a crash when trying to detect the declared encoding of a | ||
407 | Unicode document. [bug=1838877] | ||
408 | |||
409 | * Avoid a crash when unpickling certain parse trees generated | ||
410 | using html5lib on Python 3. [bug=1843545] | ||
411 | |||
412 | = 4.8.0 (20190720, "One Small Soup") | ||
413 | |||
414 | This release focuses on making it easier to customize Beautiful Soup's | ||
415 | input mechanism (the TreeBuilder) and output mechanism (the Formatter). | ||
416 | |||
417 | * You can customize the TreeBuilder object by passing keyword | ||
418 | arguments into the BeautifulSoup constructor. Those keyword | ||
419 | arguments will be passed along into the TreeBuilder constructor. | ||
420 | |||
421 | The main reason to do this right now is to change how which | ||
422 | attributes are treated as multi-valued attributes (the way 'class' | ||
423 | is treated by default). You can do this with the | ||
424 | 'multi_valued_attributes' argument. [bug=1832978] | ||
425 | |||
426 | * The role of Formatter objects has been greatly expanded. The Formatter | ||
427 | class now controls the following: | ||
428 | |||
429 | - The function to call to perform entity substitution. (This was | ||
430 | previously Formatter's only job.) | ||
431 | - Which tags should be treated as containing CDATA and have their | ||
432 | contents exempt from entity substitution. | ||
433 | - The order in which a tag's attributes are output. [bug=1812422] | ||
434 | - Whether or not to put a '/' inside a void element, e.g. '<br/>' vs '<br>' | ||
435 | |||
436 | All preexisting code should work as before. | ||
437 | |||
438 | * Added a new method to the API, Tag.smooth(), which consolidates | ||
439 | multiple adjacent NavigableString elements. [bug=1697296] | ||
440 | |||
441 | * ' (which is valid in XML, XHTML, and HTML 5, but not HTML 4) is always | ||
442 | recognized as a named entity and converted to a single quote. [bug=1818721] | ||
443 | |||
444 | = 4.7.1 (20190106) | ||
445 | |||
446 | * Fixed a significant performance problem introduced in 4.7.0. [bug=1810617] | ||
447 | |||
448 | * Fixed an incorrectly raised exception when inserting a tag before or | ||
449 | after an identical tag. [bug=1810692] | ||
450 | |||
451 | * Beautiful Soup will no longer try to keep track of namespaces that | ||
452 | are not defined with a prefix; this can confuse soupselect. [bug=1810680] | ||
453 | |||
454 | * Tried even harder to avoid the deprecation warning originally fixed in | ||
455 | 4.6.1. [bug=1778909] | ||
456 | |||
457 | = 4.7.0 (20181231) | ||
458 | |||
459 | * Beautiful Soup's CSS Selector implementation has been replaced by a | ||
460 | dependency on Isaac Muse's SoupSieve project (the soupsieve package | ||
461 | on PyPI). The good news is that SoupSieve has a much more robust and | ||
462 | complete implementation of CSS selectors, resolving a large number | ||
463 | of longstanding issues. The bad news is that from this point onward, | ||
464 | SoupSieve must be installed if you want to use the select() method. | ||
465 | |||
466 | You don't have to change anything lf you installed Beautiful Soup | ||
467 | through pip (SoupSieve will be automatically installed when you | ||
468 | upgrade Beautiful Soup) or if you don't use CSS selectors from | ||
469 | within Beautiful Soup. | ||
470 | |||
471 | SoupSieve documentation: https://facelessuser.github.io/soupsieve/ | ||
472 | |||
473 | * Added the PageElement.extend() method, which works like list.append(). | ||
474 | [bug=1514970] | ||
475 | |||
476 | * PageElement.insert_before() and insert_after() now take a variable | ||
477 | number of arguments. [bug=1514970] | ||
478 | |||
479 | * Fix a number of problems with the tree builder that caused | ||
480 | trees that were superficially okay, but which fell apart when bits | ||
481 | were extracted. Patch by Isaac Muse. [bug=1782928,1809910] | ||
482 | |||
483 | * Fixed a problem with the tree builder in which elements that | ||
484 | contained no content (such as empty comments and all-whitespace | ||
485 | elements) were not being treated as part of the tree. Patch by Isaac | ||
486 | Muse. [bug=1798699] | ||
487 | |||
488 | * Fixed a problem with multi-valued attributes where the value | ||
489 | contained whitespace. Thanks to Jens Svalgaard for the | ||
490 | fix. [bug=1787453] | ||
491 | |||
492 | * Clarified ambiguous license statements in the source code. Beautiful | ||
493 | Soup is released under the MIT license, and has been since 4.4.0. | ||
494 | |||
495 | * This file has been renamed from NEWS.txt to CHANGELOG. | ||
496 | |||
497 | = 4.6.3 (20180812) | ||
498 | |||
499 | * Exactly the same as 4.6.2. Re-released to make the README file | ||
500 | render properly on PyPI. | ||
501 | |||
502 | = 4.6.2 (20180812) | ||
503 | |||
504 | * Fix an exception when a custom formatter was asked to format a void | ||
505 | element. [bug=1784408] | ||
506 | |||
507 | = 4.6.1 (20180728) | ||
508 | |||
509 | * Stop data loss when encountering an empty numeric entity, and | ||
510 | possibly in other cases. Thanks to tos.kamiya for the fix. [bug=1698503] | ||
511 | |||
512 | * Preserve XML namespaces introduced inside an XML document, not just | ||
513 | the ones introduced at the top level. [bug=1718787] | ||
514 | |||
515 | * Added a new formatter, "html5", which represents void elements | ||
516 | as "<element>" rather than "<element/>". [bug=1716272] | ||
517 | |||
518 | * Fixed a problem where the html.parser tree builder interpreted | ||
519 | a string like "&foo " as the character entity "&foo;" [bug=1728706] | ||
520 | |||
521 | * Correctly handle invalid HTML numeric character entities like “ | ||
522 | which reference code points that are not Unicode code points. Note | ||
523 | that this is only fixed when Beautiful Soup is used with the | ||
524 | html.parser parser -- html5lib already worked and I couldn't fix it | ||
525 | with lxml. [bug=1782933] | ||
526 | |||
527 | * Improved the warning given when no parser is specified. [bug=1780571] | ||
528 | |||
529 | * When markup contains duplicate elements, a select() call that | ||
530 | includes multiple match clauses will match all relevant | ||
531 | elements. [bug=1770596] | ||
532 | |||
533 | * Fixed code that was causing deprecation warnings in recent Python 3 | ||
534 | versions. Includes a patch from Ville Skyttä. [bug=1778909] [bug=1689496] | ||
535 | |||
536 | * Fixed a Windows crash in diagnose() when checking whether a long | ||
537 | markup string is a filename. [bug=1737121] | ||
538 | |||
539 | * Stopped HTMLParser from raising an exception in very rare cases of | ||
540 | bad markup. [bug=1708831] | ||
541 | |||
542 | * Fixed a bug where find_all() was not working when asked to find a | ||
543 | tag with a namespaced name in an XML document that was parsed as | ||
544 | HTML. [bug=1723783] | ||
545 | |||
546 | * You can get finer control over formatting by subclassing | ||
547 | bs4.element.Formatter and passing a Formatter instance into (e.g.) | ||
548 | encode(). [bug=1716272] | ||
549 | |||
550 | * You can pass a dictionary of `attrs` into | ||
551 | BeautifulSoup.new_tag. This makes it possible to create a tag with | ||
552 | an attribute like 'name' that would otherwise be masked by another | ||
553 | argument of new_tag. [bug=1779276] | ||
554 | |||
555 | * Clarified the deprecation warning when accessing tag.fooTag, to cover | ||
556 | the possibility that you might really have been looking for a tag | ||
557 | called 'fooTag'. | ||
558 | |||
559 | = 4.6.0 (20170507) = | ||
560 | |||
561 | * Added the `Tag.get_attribute_list` method, which acts like `Tag.get` for | ||
562 | getting the value of an attribute, but which always returns a list, | ||
563 | whether or not the attribute is a multi-value attribute. [bug=1678589] | ||
564 | |||
565 | * It's now possible to use a tag's namespace prefix when searching, | ||
566 | e.g. soup.find('namespace:tag') [bug=1655332] | ||
567 | |||
568 | * Improved the handling of empty-element tags like <br> when using the | ||
569 | html.parser parser. [bug=1676935] | ||
570 | |||
571 | * HTML parsers treat all HTML4 and HTML5 empty element tags (aka void | ||
572 | element tags) correctly. [bug=1656909] | ||
573 | |||
574 | * Namespace prefix is preserved when an XML tag is copied. Thanks | ||
575 | to Vikas for a patch and test. [bug=1685172] | ||
576 | |||
577 | = 4.5.3 (20170102) = | ||
578 | |||
579 | * Fixed foster parenting when html5lib is the tree builder. Thanks to | ||
580 | Geoffrey Sneddon for a patch and test. | ||
581 | |||
582 | * Fixed yet another problem that caused the html5lib tree builder to | ||
583 | create a disconnected parse tree. [bug=1629825] | ||
584 | |||
585 | = 4.5.2 (20170102) = | ||
586 | |||
587 | * Apart from the version number, this release is identical to | ||
588 | 4.5.3. Due to user error, it could not be completely uploaded to | ||
589 | PyPI. Use 4.5.3 instead. | ||
590 | |||
591 | = 4.5.1 (20160802) = | ||
592 | |||
593 | * Fixed a crash when passing Unicode markup that contained a | ||
594 | processing instruction into the lxml HTML parser on Python | ||
595 | 3. [bug=1608048] | ||
596 | |||
597 | = 4.5.0 (20160719) = | ||
598 | |||
599 | * Beautiful Soup is no longer compatible with Python 2.6. This | ||
600 | actually happened a few releases ago, but it's now official. | ||
601 | |||
602 | * Beautiful Soup will now work with versions of html5lib greater than | ||
603 | 0.99999999. [bug=1603299] | ||
604 | |||
605 | * If a search against each individual value of a multi-valued | ||
606 | attribute fails, the search will be run one final time against the | ||
607 | complete attribute value considered as a single string. That is, if | ||
608 | a tag has class="foo bar" and neither "foo" nor "bar" matches, but | ||
609 | "foo bar" does, the tag is now considered a match. | ||
610 | |||
611 | This happened in previous versions, but only when the value being | ||
612 | searched for was a string. Now it also works when that value is | ||
613 | a regular expression, a list of strings, etc. [bug=1476868] | ||
614 | |||
615 | * Fixed a bug that deranged the tree when a whitespace element was | ||
616 | reparented into a tag that contained an identical whitespace | ||
617 | element. [bug=1505351] | ||
618 | |||
619 | * Added support for CSS selector values that contain quoted spaces, | ||
620 | such as tag[style="display: foo"]. [bug=1540588] | ||
621 | |||
622 | * Corrected handling of XML processing instructions. [bug=1504393] | ||
623 | |||
624 | * Corrected an encoding error that happened when a BeautifulSoup | ||
625 | object was copied. [bug=1554439] | ||
626 | |||
627 | * The contents of <textarea> tags will no longer be modified when the | ||
628 | tree is prettified. [bug=1555829] | ||
629 | |||
630 | * When a BeautifulSoup object is pickled but its tree builder cannot | ||
631 | be pickled, its .builder attribute is set to None instead of being | ||
632 | destroyed. This avoids a performance problem once the object is | ||
633 | unpickled. [bug=1523629] | ||
634 | |||
635 | * Specify the file and line number when warning about a | ||
636 | BeautifulSoup object being instantiated without a parser being | ||
637 | specified. [bug=1574647] | ||
638 | |||
639 | * The `limit` argument to `select()` now works correctly, though it's | ||
640 | not implemented very efficiently. [bug=1520530] | ||
641 | |||
642 | * Fixed a Python 3 ByteWarning when a URL was passed in as though it | ||
643 | were markup. Thanks to James Salter for a patch and | ||
644 | test. [bug=1533762] | ||
645 | |||
646 | * We don't run the check for a filename passed in as markup if the | ||
647 | 'filename' contains a less-than character; the less-than character | ||
648 | indicates it's most likely a very small document. [bug=1577864] | ||
649 | |||
650 | = 4.4.1 (20150928) = | ||
651 | |||
652 | * Fixed a bug that deranged the tree when part of it was | ||
653 | removed. Thanks to Eric Weiser for the patch and John Wiseman for a | ||
654 | test. [bug=1481520] | ||
655 | |||
656 | * Fixed a parse bug with the html5lib tree-builder. Thanks to Roel | ||
657 | Kramer for the patch. [bug=1483781] | ||
658 | |||
659 | * Improved the implementation of CSS selector grouping. Thanks to | ||
660 | Orangain for the patch. [bug=1484543] | ||
661 | |||
662 | * Fixed the test_detect_utf8 test so that it works when chardet is | ||
663 | installed. [bug=1471359] | ||
664 | |||
665 | * Corrected the output of Declaration objects. [bug=1477847] | ||
666 | |||
667 | |||
668 | = 4.4.0 (20150703) = | ||
669 | |||
670 | Especially important changes: | ||
671 | |||
672 | * Added a warning when you instantiate a BeautifulSoup object without | ||
673 | explicitly naming a parser. [bug=1398866] | ||
674 | |||
675 | * __repr__ now returns an ASCII bytestring in Python 2, and a Unicode | ||
676 | string in Python 3, instead of a UTF8-encoded bytestring in both | ||
677 | versions. In Python 3, __str__ now returns a Unicode string instead | ||
678 | of a bytestring. [bug=1420131] | ||
679 | |||
680 | * The `text` argument to the find_* methods is now called `string`, | ||
681 | which is more accurate. `text` still works, but `string` is the | ||
682 | argument described in the documentation. `text` may eventually | ||
683 | change its meaning, but not for a very long time. [bug=1366856] | ||
684 | |||
685 | * Changed the way soup objects work under copy.copy(). Copying a | ||
686 | NavigableString or a Tag will give you a new NavigableString that's | ||
687 | equal to the old one but not connected to the parse tree. Patch by | ||
688 | Martijn Peters. [bug=1307490] | ||
689 | |||
690 | * Started using a standard MIT license. [bug=1294662] | ||
691 | |||
692 | * Added a Chinese translation of the documentation by Delong .w. | ||
693 | |||
694 | New features: | ||
695 | |||
696 | * Introduced the select_one() method, which uses a CSS selector but | ||
697 | only returns the first match, instead of a list of | ||
698 | matches. [bug=1349367] | ||
699 | |||
700 | * You can now create a Tag object without specifying a | ||
701 | TreeBuilder. Patch by Martijn Pieters. [bug=1307471] | ||
702 | |||
703 | * You can now create a NavigableString or a subclass just by invoking | ||
704 | the constructor. [bug=1294315] | ||
705 | |||
706 | * Added an `exclude_encodings` argument to UnicodeDammit and to the | ||
707 | Beautiful Soup constructor, which lets you prohibit the detection of | ||
708 | an encoding that you know is wrong. [bug=1469408] | ||
709 | |||
710 | * The select() method now supports selector grouping. Patch by | ||
711 | Francisco Canas [bug=1191917] | ||
712 | |||
713 | Bug fixes: | ||
714 | |||
715 | * Fixed yet another problem that caused the html5lib tree builder to | ||
716 | create a disconnected parse tree. [bug=1237763] | ||
717 | |||
718 | * Force object_was_parsed() to keep the tree intact even when an element | ||
719 | from later in the document is moved into place. [bug=1430633] | ||
720 | |||
721 | * Fixed yet another bug that caused a disconnected tree when html5lib | ||
722 | copied an element from one part of the tree to another. [bug=1270611] | ||
723 | |||
724 | * Fixed a bug where Element.extract() could create an infinite loop in | ||
725 | the remaining tree. | ||
726 | |||
727 | * The select() method can now find tags whose names contain | ||
728 | dashes. Patch by Francisco Canas. [bug=1276211] | ||
729 | |||
730 | * The select() method can now find tags with attributes whose names | ||
731 | contain dashes. Patch by Marek Kapolka. [bug=1304007] | ||
732 | |||
733 | * Improved the lxml tree builder's handling of processing | ||
734 | instructions. [bug=1294645] | ||
735 | |||
736 | * Restored the helpful syntax error that happens when you try to | ||
737 | import the Python 2 edition of Beautiful Soup under Python | ||
738 | 3. [bug=1213387] | ||
739 | |||
740 | * In Python 3.4 and above, set the new convert_charrefs argument to | ||
741 | the html.parser constructor to avoid a warning and future | ||
742 | failures. Patch by Stefano Revera. [bug=1375721] | ||
743 | |||
744 | * The warning when you pass in a filename or URL as markup will now be | ||
745 | displayed correctly even if the filename or URL is a Unicode | ||
746 | string. [bug=1268888] | ||
747 | |||
748 | * If the initial <html> tag contains a CDATA list attribute such as | ||
749 | 'class', the html5lib tree builder will now turn its value into a | ||
750 | list, as it would with any other tag. [bug=1296481] | ||
751 | |||
752 | * Fixed an import error in Python 3.5 caused by the removal of the | ||
753 | HTMLParseError class. [bug=1420063] | ||
754 | |||
755 | * Improved docstring for encode_contents() and | ||
756 | decode_contents(). [bug=1441543] | ||
757 | |||
758 | * Fixed a crash in Unicode, Dammit's encoding detector when the name | ||
759 | of the encoding itself contained invalid bytes. [bug=1360913] | ||
760 | |||
761 | * Improved the exception raised when you call .unwrap() or | ||
762 | .replace_with() on an element that's not attached to a tree. | ||
763 | |||
764 | * Raise a NotImplementedError whenever an unsupported CSS pseudoclass | ||
765 | is used in select(). Previously some cases did not result in a | ||
766 | NotImplementedError. | ||
767 | |||
768 | * It's now possible to pickle a BeautifulSoup object no matter which | ||
769 | tree builder was used to create it. However, the only tree builder | ||
770 | that survives the pickling process is the HTMLParserTreeBuilder | ||
771 | ('html.parser'). If you unpickle a BeautifulSoup object created with | ||
772 | some other tree builder, soup.builder will be None. [bug=1231545] | ||
773 | |||
1 | = 4.3.2 (20131002) = | 774 | = 4.3.2 (20131002) = |
2 | 775 | ||
3 | * Fixed a bug in which short Unicode input was improperly encoded to | 776 | * Fixed a bug in which short Unicode input was improperly encoded to |
@@ -331,7 +1104,7 @@ | |||
331 | * Renamed Tag.nsprefix to Tag.prefix, for consistency with | 1104 | * Renamed Tag.nsprefix to Tag.prefix, for consistency with |
332 | NamespacedAttribute. | 1105 | NamespacedAttribute. |
333 | 1106 | ||
334 | * Fixed a test failure that occured on Python 3.x when chardet was | 1107 | * Fixed a test failure that occurred on Python 3.x when chardet was |
335 | installed. | 1108 | installed. |
336 | 1109 | ||
337 | * Made prettify() return Unicode by default, so it will look nice on | 1110 | * Made prettify() return Unicode by default, so it will look nice on |
@@ -365,7 +1138,7 @@ | |||
365 | 1138 | ||
366 | * Restored compatibility with Python 2.6. | 1139 | * Restored compatibility with Python 2.6. |
367 | 1140 | ||
368 | * The install process no longer installs docs or auxillary text files. | 1141 | * The install process no longer installs docs or auxiliary text files. |
369 | 1142 | ||
370 | * It's now possible to deepcopy a BeautifulSoup object created with | 1143 | * It's now possible to deepcopy a BeautifulSoup object created with |
371 | Python's built-in HTML parser. | 1144 | Python's built-in HTML parser. |
@@ -604,7 +1377,7 @@ Added an import that makes BS work in Python 2.3. | |||
604 | Fixed a UnicodeDecodeError when unpickling documents that contain | 1377 | Fixed a UnicodeDecodeError when unpickling documents that contain |
605 | non-ASCII characters. | 1378 | non-ASCII characters. |
606 | 1379 | ||
607 | Fixed a TypeError that occured in some circumstances when a tag | 1380 | Fixed a TypeError that occurred in some circumstances when a tag |
608 | contained no text. | 1381 | contained no text. |
609 | 1382 | ||
610 | Jump through hoops to avoid the use of chardet, which can be extremely | 1383 | Jump through hoops to avoid the use of chardet, which can be extremely |
diff --git a/bitbake/lib/bs4/COPYING.txt b/bitbake/lib/bs4/LICENSE index d668d13f04..08e3a9cf8c 100644 --- a/bitbake/lib/bs4/COPYING.txt +++ b/bitbake/lib/bs4/LICENSE | |||
@@ -1,6 +1,6 @@ | |||
1 | Beautiful Soup is made available under the MIT license: | 1 | Beautiful Soup is made available under the MIT license: |
2 | 2 | ||
3 | Copyright (c) 2004-2012 Leonard Richardson | 3 | Copyright (c) Leonard Richardson |
4 | 4 | ||
5 | Permission is hereby granted, free of charge, to any person obtaining | 5 | Permission is hereby granted, free of charge, to any person obtaining |
6 | a copy of this software and associated documentation files (the | 6 | a copy of this software and associated documentation files (the |
@@ -20,7 +20,12 @@ Beautiful Soup is made available under the MIT license: | |||
20 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN | 20 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN |
21 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN | 21 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN |
22 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | 22 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
23 | SOFTWARE, DAMMIT. | 23 | SOFTWARE. |
24 | 24 | ||
25 | Beautiful Soup incorporates code from the html5lib library, which is | 25 | Beautiful Soup incorporates code from the html5lib library, which is |
26 | also made available under the MIT license. | 26 | also made available under the MIT license. Copyright (c) James Graham |
27 | and other contributors | ||
28 | |||
29 | Beautiful Soup has an optional dependency on the soupsieve library, | ||
30 | which is also made available under the MIT license. Copyright (c) | ||
31 | Isaac Muse | ||
diff --git a/bitbake/lib/bs4/__init__.py b/bitbake/lib/bs4/__init__.py index e35725b86e..725203d94a 100644 --- a/bitbake/lib/bs4/__init__.py +++ b/bitbake/lib/bs4/__init__.py | |||
@@ -1,65 +1,99 @@ | |||
1 | """Beautiful Soup | 1 | """Beautiful Soup Elixir and Tonic - "The Screen-Scraper's Friend". |
2 | Elixir and Tonic | 2 | |
3 | "The Screen-Scraper's Friend" | ||
4 | http://www.crummy.com/software/BeautifulSoup/ | 3 | http://www.crummy.com/software/BeautifulSoup/ |
5 | 4 | ||
6 | Beautiful Soup uses a pluggable XML or HTML parser to parse a | 5 | Beautiful Soup uses a pluggable XML or HTML parser to parse a |
7 | (possibly invalid) document into a tree representation. Beautiful Soup | 6 | (possibly invalid) document into a tree representation. Beautiful Soup |
8 | provides provides methods and Pythonic idioms that make it easy to | 7 | provides methods and Pythonic idioms that make it easy to navigate, |
9 | navigate, search, and modify the parse tree. | 8 | search, and modify the parse tree. |
10 | 9 | ||
11 | Beautiful Soup works with Python 2.6 and up. It works better if lxml | 10 | Beautiful Soup works with Python 3.6 and up. It works better if lxml |
12 | and/or html5lib is installed. | 11 | and/or html5lib is installed. |
13 | 12 | ||
14 | For more than you ever wanted to know about Beautiful Soup, see the | 13 | For more than you ever wanted to know about Beautiful Soup, see the |
15 | documentation: | 14 | documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ |
16 | http://www.crummy.com/software/BeautifulSoup/bs4/doc/ | ||
17 | """ | 15 | """ |
18 | 16 | ||
19 | __author__ = "Leonard Richardson (leonardr@segfault.org)" | 17 | __author__ = "Leonard Richardson (leonardr@segfault.org)" |
20 | __version__ = "4.4.1" | 18 | __version__ = "4.12.3" |
21 | __copyright__ = "Copyright (c) 2004-2015 Leonard Richardson" | 19 | __copyright__ = "Copyright (c) 2004-2024 Leonard Richardson" |
20 | # Use of this source code is governed by the MIT license. | ||
22 | __license__ = "MIT" | 21 | __license__ = "MIT" |
23 | 22 | ||
24 | __all__ = ['BeautifulSoup'] | 23 | __all__ = ['BeautifulSoup'] |
25 | 24 | ||
25 | from collections import Counter | ||
26 | import os | 26 | import os |
27 | import re | 27 | import re |
28 | import sys | ||
29 | import traceback | ||
28 | import warnings | 30 | import warnings |
29 | 31 | ||
30 | from .builder import builder_registry, ParserRejectedMarkup | 32 | # The very first thing we do is give a useful error if someone is |
33 | # running this code under Python 2. | ||
34 | if sys.version_info.major < 3: | ||
35 | raise ImportError('You are trying to use a Python 3-specific version of Beautiful Soup under Python 2. This will not work. The final version of Beautiful Soup to support Python 2 was 4.9.3.') | ||
36 | |||
37 | from .builder import ( | ||
38 | builder_registry, | ||
39 | ParserRejectedMarkup, | ||
40 | XMLParsedAsHTMLWarning, | ||
41 | HTMLParserTreeBuilder | ||
42 | ) | ||
31 | from .dammit import UnicodeDammit | 43 | from .dammit import UnicodeDammit |
32 | from .element import ( | 44 | from .element import ( |
33 | CData, | 45 | CData, |
34 | Comment, | 46 | Comment, |
47 | CSS, | ||
35 | DEFAULT_OUTPUT_ENCODING, | 48 | DEFAULT_OUTPUT_ENCODING, |
36 | Declaration, | 49 | Declaration, |
37 | Doctype, | 50 | Doctype, |
38 | NavigableString, | 51 | NavigableString, |
39 | PageElement, | 52 | PageElement, |
40 | ProcessingInstruction, | 53 | ProcessingInstruction, |
54 | PYTHON_SPECIFIC_ENCODINGS, | ||
41 | ResultSet, | 55 | ResultSet, |
56 | Script, | ||
57 | Stylesheet, | ||
42 | SoupStrainer, | 58 | SoupStrainer, |
43 | Tag, | 59 | Tag, |
60 | TemplateString, | ||
44 | ) | 61 | ) |
45 | 62 | ||
46 | # The very first thing we do is give a useful error if someone is | 63 | # Define some custom warnings. |
47 | # running this code under Python 3 without converting it. | 64 | class GuessedAtParserWarning(UserWarning): |
48 | 'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'!='You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' | 65 | """The warning issued when BeautifulSoup has to guess what parser to |
66 | use -- probably because no parser was specified in the constructor. | ||
67 | """ | ||
49 | 68 | ||
50 | class BeautifulSoup(Tag): | 69 | class MarkupResemblesLocatorWarning(UserWarning): |
70 | """The warning issued when BeautifulSoup is given 'markup' that | ||
71 | actually looks like a resource locator -- a URL or a path to a file | ||
72 | on disk. | ||
51 | """ | 73 | """ |
52 | This class defines the basic interface called by the tree builders. | ||
53 | 74 | ||
54 | These methods will be called by the parser: | 75 | |
55 | reset() | 76 | class BeautifulSoup(Tag): |
56 | feed(markup) | 77 | """A data structure representing a parsed HTML or XML document. |
78 | |||
79 | Most of the methods you'll call on a BeautifulSoup object are inherited from | ||
80 | PageElement or Tag. | ||
81 | |||
82 | Internally, this class defines the basic interface called by the | ||
83 | tree builders when converting an HTML/XML document into a data | ||
84 | structure. The interface abstracts away the differences between | ||
85 | parsers. To write a new tree builder, you'll need to understand | ||
86 | these methods as a whole. | ||
87 | |||
88 | These methods will be called by the BeautifulSoup constructor: | ||
89 | * reset() | ||
90 | * feed(markup) | ||
57 | 91 | ||
58 | The tree builder may call these methods from its feed() implementation: | 92 | The tree builder may call these methods from its feed() implementation: |
59 | handle_starttag(name, attrs) # See note about return value | 93 | * handle_starttag(name, attrs) # See note about return value |
60 | handle_endtag(name) | 94 | * handle_endtag(name) |
61 | handle_data(data) # Appends to the current data node | 95 | * handle_data(data) # Appends to the current data node |
62 | endData(containerClass=NavigableString) # Ends the current data node | 96 | * endData(containerClass) # Ends the current data node |
63 | 97 | ||
64 | No matter how complicated the underlying parser is, you should be | 98 | No matter how complicated the underlying parser is, you should be |
65 | able to build a tree using 'start tag' events, 'end tag' events, | 99 | able to build a tree using 'start tag' events, 'end tag' events, |
@@ -69,24 +103,77 @@ class BeautifulSoup(Tag): | |||
69 | like HTML's <br> tag), call handle_starttag and then | 103 | like HTML's <br> tag), call handle_starttag and then |
70 | handle_endtag. | 104 | handle_endtag. |
71 | """ | 105 | """ |
106 | |||
107 | # Since BeautifulSoup subclasses Tag, it's possible to treat it as | ||
108 | # a Tag with a .name. This name makes it clear the BeautifulSoup | ||
109 | # object isn't a real markup tag. | ||
72 | ROOT_TAG_NAME = '[document]' | 110 | ROOT_TAG_NAME = '[document]' |
73 | 111 | ||
74 | # If the end-user gives no indication which tree builder they | 112 | # If the end-user gives no indication which tree builder they |
75 | # want, look for one with these features. | 113 | # want, look for one with these features. |
76 | DEFAULT_BUILDER_FEATURES = ['html', 'fast'] | 114 | DEFAULT_BUILDER_FEATURES = ['html', 'fast'] |
77 | 115 | ||
116 | # A string containing all ASCII whitespace characters, used in | ||
117 | # endData() to detect data chunks that seem 'empty'. | ||
78 | ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' | 118 | ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' |
79 | 119 | ||
80 | NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n" | 120 | NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n" |
81 | 121 | ||
82 | def __init__(self, markup="", features=None, builder=None, | 122 | def __init__(self, markup="", features=None, builder=None, |
83 | parse_only=None, from_encoding=None, exclude_encodings=None, | 123 | parse_only=None, from_encoding=None, exclude_encodings=None, |
84 | **kwargs): | 124 | element_classes=None, **kwargs): |
85 | """The Soup object is initialized as the 'root tag', and the | 125 | """Constructor. |
86 | provided markup (which can be a string or a file-like object) | 126 | |
87 | is fed into the underlying parser.""" | 127 | :param markup: A string or a file-like object representing |
88 | 128 | markup to be parsed. | |
129 | |||
130 | :param features: Desirable features of the parser to be | ||
131 | used. This may be the name of a specific parser ("lxml", | ||
132 | "lxml-xml", "html.parser", or "html5lib") or it may be the | ||
133 | type of markup to be used ("html", "html5", "xml"). It's | ||
134 | recommended that you name a specific parser, so that | ||
135 | Beautiful Soup gives you the same results across platforms | ||
136 | and virtual environments. | ||
137 | |||
138 | :param builder: A TreeBuilder subclass to instantiate (or | ||
139 | instance to use) instead of looking one up based on | ||
140 | `features`. You only need to use this if you've implemented a | ||
141 | custom TreeBuilder. | ||
142 | |||
143 | :param parse_only: A SoupStrainer. Only parts of the document | ||
144 | matching the SoupStrainer will be considered. This is useful | ||
145 | when parsing part of a document that would otherwise be too | ||
146 | large to fit into memory. | ||
147 | |||
148 | :param from_encoding: A string indicating the encoding of the | ||
149 | document to be parsed. Pass this in if Beautiful Soup is | ||
150 | guessing wrongly about the document's encoding. | ||
151 | |||
152 | :param exclude_encodings: A list of strings indicating | ||
153 | encodings known to be wrong. Pass this in if you don't know | ||
154 | the document's encoding but you know Beautiful Soup's guess is | ||
155 | wrong. | ||
156 | |||
157 | :param element_classes: A dictionary mapping BeautifulSoup | ||
158 | classes like Tag and NavigableString, to other classes you'd | ||
159 | like to be instantiated instead as the parse tree is | ||
160 | built. This is useful for subclassing Tag or NavigableString | ||
161 | to modify default behavior. | ||
162 | |||
163 | :param kwargs: For backwards compatibility purposes, the | ||
164 | constructor accepts certain keyword arguments used in | ||
165 | Beautiful Soup 3. None of these arguments do anything in | ||
166 | Beautiful Soup 4; they will result in a warning and then be | ||
167 | ignored. | ||
168 | |||
169 | Apart from this, any keyword arguments passed into the | ||
170 | BeautifulSoup constructor are propagated to the TreeBuilder | ||
171 | constructor. This makes it possible to configure a | ||
172 | TreeBuilder by passing in arguments, not just by saying which | ||
173 | one to use. | ||
174 | """ | ||
89 | if 'convertEntities' in kwargs: | 175 | if 'convertEntities' in kwargs: |
176 | del kwargs['convertEntities'] | ||
90 | warnings.warn( | 177 | warnings.warn( |
91 | "BS4 does not respect the convertEntities argument to the " | 178 | "BS4 does not respect the convertEntities argument to the " |
92 | "BeautifulSoup constructor. Entities are always converted " | 179 | "BeautifulSoup constructor. Entities are always converted " |
@@ -125,10 +212,10 @@ class BeautifulSoup(Tag): | |||
125 | if old_name in kwargs: | 212 | if old_name in kwargs: |
126 | warnings.warn( | 213 | warnings.warn( |
127 | 'The "%s" argument to the BeautifulSoup constructor ' | 214 | 'The "%s" argument to the BeautifulSoup constructor ' |
128 | 'has been renamed to "%s."' % (old_name, new_name)) | 215 | 'has been renamed to "%s."' % (old_name, new_name), |
129 | value = kwargs[old_name] | 216 | DeprecationWarning, stacklevel=3 |
130 | del kwargs[old_name] | 217 | ) |
131 | return value | 218 | return kwargs.pop(old_name) |
132 | return None | 219 | return None |
133 | 220 | ||
134 | parse_only = parse_only or deprecated_argument( | 221 | parse_only = parse_only or deprecated_argument( |
@@ -137,13 +224,23 @@ class BeautifulSoup(Tag): | |||
137 | from_encoding = from_encoding or deprecated_argument( | 224 | from_encoding = from_encoding or deprecated_argument( |
138 | "fromEncoding", "from_encoding") | 225 | "fromEncoding", "from_encoding") |
139 | 226 | ||
140 | if len(kwargs) > 0: | 227 | if from_encoding and isinstance(markup, str): |
141 | arg = list(kwargs.keys()).pop() | 228 | warnings.warn("You provided Unicode markup but also provided a value for from_encoding. Your from_encoding will be ignored.") |
142 | raise TypeError( | 229 | from_encoding = None |
143 | "__init__() got an unexpected keyword argument '%s'" % arg) | 230 | |
144 | 231 | self.element_classes = element_classes or dict() | |
145 | if builder is None: | 232 | |
146 | original_features = features | 233 | # We need this information to track whether or not the builder |
234 | # was specified well enough that we can omit the 'you need to | ||
235 | # specify a parser' warning. | ||
236 | original_builder = builder | ||
237 | original_features = features | ||
238 | |||
239 | if isinstance(builder, type): | ||
240 | # A builder class was passed in; it needs to be instantiated. | ||
241 | builder_class = builder | ||
242 | builder = None | ||
243 | elif builder is None: | ||
147 | if isinstance(features, str): | 244 | if isinstance(features, str): |
148 | features = [features] | 245 | features = [features] |
149 | if features is None or len(features) == 0: | 246 | if features is None or len(features) == 0: |
@@ -154,85 +251,227 @@ class BeautifulSoup(Tag): | |||
154 | "Couldn't find a tree builder with the features you " | 251 | "Couldn't find a tree builder with the features you " |
155 | "requested: %s. Do you need to install a parser library?" | 252 | "requested: %s. Do you need to install a parser library?" |
156 | % ",".join(features)) | 253 | % ",".join(features)) |
157 | builder = builder_class() | 254 | |
158 | if not (original_features == builder.NAME or | 255 | # At this point either we have a TreeBuilder instance in |
159 | original_features in builder.ALTERNATE_NAMES): | 256 | # builder, or we have a builder_class that we can instantiate |
257 | # with the remaining **kwargs. | ||
258 | if builder is None: | ||
259 | builder = builder_class(**kwargs) | ||
260 | if not original_builder and not ( | ||
261 | original_features == builder.NAME or | ||
262 | original_features in builder.ALTERNATE_NAMES | ||
263 | ) and markup: | ||
264 | # The user did not tell us which TreeBuilder to use, | ||
265 | # and we had to guess. Issue a warning. | ||
160 | if builder.is_xml: | 266 | if builder.is_xml: |
161 | markup_type = "XML" | 267 | markup_type = "XML" |
162 | else: | 268 | else: |
163 | markup_type = "HTML" | 269 | markup_type = "HTML" |
164 | warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict( | ||
165 | parser=builder.NAME, | ||
166 | markup_type=markup_type)) | ||
167 | 270 | ||
271 | # This code adapted from warnings.py so that we get the same line | ||
272 | # of code as our warnings.warn() call gets, even if the answer is wrong | ||
273 | # (as it may be in a multithreading situation). | ||
274 | caller = None | ||
275 | try: | ||
276 | caller = sys._getframe(1) | ||
277 | except ValueError: | ||
278 | pass | ||
279 | if caller: | ||
280 | globals = caller.f_globals | ||
281 | line_number = caller.f_lineno | ||
282 | else: | ||
283 | globals = sys.__dict__ | ||
284 | line_number= 1 | ||
285 | filename = globals.get('__file__') | ||
286 | if filename: | ||
287 | fnl = filename.lower() | ||
288 | if fnl.endswith((".pyc", ".pyo")): | ||
289 | filename = filename[:-1] | ||
290 | if filename: | ||
291 | # If there is no filename at all, the user is most likely in a REPL, | ||
292 | # and the warning is not necessary. | ||
293 | values = dict( | ||
294 | filename=filename, | ||
295 | line_number=line_number, | ||
296 | parser=builder.NAME, | ||
297 | markup_type=markup_type | ||
298 | ) | ||
299 | warnings.warn( | ||
300 | self.NO_PARSER_SPECIFIED_WARNING % values, | ||
301 | GuessedAtParserWarning, stacklevel=2 | ||
302 | ) | ||
303 | else: | ||
304 | if kwargs: | ||
305 | warnings.warn("Keyword arguments to the BeautifulSoup constructor will be ignored. These would normally be passed into the TreeBuilder constructor, but a TreeBuilder instance was passed in as `builder`.") | ||
306 | |||
168 | self.builder = builder | 307 | self.builder = builder |
169 | self.is_xml = builder.is_xml | 308 | self.is_xml = builder.is_xml |
170 | self.builder.soup = self | 309 | self.known_xml = self.is_xml |
171 | 310 | self._namespaces = dict() | |
172 | self.parse_only = parse_only | 311 | self.parse_only = parse_only |
173 | 312 | ||
174 | if hasattr(markup, 'read'): # It's a file-type object. | 313 | if hasattr(markup, 'read'): # It's a file-type object. |
175 | markup = markup.read() | 314 | markup = markup.read() |
176 | elif len(markup) <= 256: | 315 | elif len(markup) <= 256 and ( |
177 | # Print out warnings for a couple beginner problems | 316 | (isinstance(markup, bytes) and not b'<' in markup) |
317 | or (isinstance(markup, str) and not '<' in markup) | ||
318 | ): | ||
319 | # Issue warnings for a couple beginner problems | ||
178 | # involving passing non-markup to Beautiful Soup. | 320 | # involving passing non-markup to Beautiful Soup. |
179 | # Beautiful Soup will still parse the input as markup, | 321 | # Beautiful Soup will still parse the input as markup, |
180 | # just in case that's what the user really wants. | 322 | # since that is sometimes the intended behavior. |
181 | if (isinstance(markup, str) | 323 | if not self._markup_is_url(markup): |
182 | and not os.path.supports_unicode_filenames): | 324 | self._markup_resembles_filename(markup) |
183 | possible_filename = markup.encode("utf8") | ||
184 | else: | ||
185 | possible_filename = markup | ||
186 | is_file = False | ||
187 | try: | ||
188 | is_file = os.path.exists(possible_filename) | ||
189 | except Exception as e: | ||
190 | # This is almost certainly a problem involving | ||
191 | # characters not valid in filenames on this | ||
192 | # system. Just let it go. | ||
193 | pass | ||
194 | if is_file: | ||
195 | if isinstance(markup, str): | ||
196 | markup = markup.encode("utf8") | ||
197 | warnings.warn( | ||
198 | '"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup) | ||
199 | if markup[:5] == "http:" or markup[:6] == "https:": | ||
200 | # TODO: This is ugly but I couldn't get it to work in | ||
201 | # Python 3 otherwise. | ||
202 | if ((isinstance(markup, bytes) and not b' ' in markup) | ||
203 | or (isinstance(markup, str) and not ' ' in markup)): | ||
204 | if isinstance(markup, str): | ||
205 | markup = markup.encode("utf8") | ||
206 | warnings.warn( | ||
207 | '"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup) | ||
208 | 325 | ||
326 | rejections = [] | ||
327 | success = False | ||
209 | for (self.markup, self.original_encoding, self.declared_html_encoding, | 328 | for (self.markup, self.original_encoding, self.declared_html_encoding, |
210 | self.contains_replacement_characters) in ( | 329 | self.contains_replacement_characters) in ( |
211 | self.builder.prepare_markup( | 330 | self.builder.prepare_markup( |
212 | markup, from_encoding, exclude_encodings=exclude_encodings)): | 331 | markup, from_encoding, exclude_encodings=exclude_encodings)): |
213 | self.reset() | 332 | self.reset() |
333 | self.builder.initialize_soup(self) | ||
214 | try: | 334 | try: |
215 | self._feed() | 335 | self._feed() |
336 | success = True | ||
216 | break | 337 | break |
217 | except ParserRejectedMarkup: | 338 | except ParserRejectedMarkup as e: |
339 | rejections.append(e) | ||
218 | pass | 340 | pass |
219 | 341 | ||
342 | if not success: | ||
343 | other_exceptions = [str(e) for e in rejections] | ||
344 | raise ParserRejectedMarkup( | ||
345 | "The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.\n\nOriginal exception(s) from parser:\n " + "\n ".join(other_exceptions) | ||
346 | ) | ||
347 | |||
220 | # Clear out the markup and remove the builder's circular | 348 | # Clear out the markup and remove the builder's circular |
221 | # reference to this object. | 349 | # reference to this object. |
222 | self.markup = None | 350 | self.markup = None |
223 | self.builder.soup = None | 351 | self.builder.soup = None |
224 | 352 | ||
225 | def __copy__(self): | 353 | def _clone(self): |
226 | return type(self)(self.encode(), builder=self.builder) | 354 | """Create a new BeautifulSoup object with the same TreeBuilder, |
355 | but not associated with any markup. | ||
356 | |||
357 | This is the first step of the deepcopy process. | ||
358 | """ | ||
359 | clone = type(self)("", None, self.builder) | ||
227 | 360 | ||
361 | # Keep track of the encoding of the original document, | ||
362 | # since we won't be parsing it again. | ||
363 | clone.original_encoding = self.original_encoding | ||
364 | return clone | ||
365 | |||
228 | def __getstate__(self): | 366 | def __getstate__(self): |
229 | # Frequently a tree builder can't be pickled. | 367 | # Frequently a tree builder can't be pickled. |
230 | d = dict(self.__dict__) | 368 | d = dict(self.__dict__) |
231 | if 'builder' in d and not self.builder.picklable: | 369 | if 'builder' in d and d['builder'] is not None and not self.builder.picklable: |
232 | del d['builder'] | 370 | d['builder'] = type(self.builder) |
371 | # Store the contents as a Unicode string. | ||
372 | d['contents'] = [] | ||
373 | d['markup'] = self.decode() | ||
374 | |||
375 | # If _most_recent_element is present, it's a Tag object left | ||
376 | # over from initial parse. It might not be picklable and we | ||
377 | # don't need it. | ||
378 | if '_most_recent_element' in d: | ||
379 | del d['_most_recent_element'] | ||
233 | return d | 380 | return d |
234 | 381 | ||
382 | def __setstate__(self, state): | ||
383 | # If necessary, restore the TreeBuilder by looking it up. | ||
384 | self.__dict__ = state | ||
385 | if isinstance(self.builder, type): | ||
386 | self.builder = self.builder() | ||
387 | elif not self.builder: | ||
388 | # We don't know which builder was used to build this | ||
389 | # parse tree, so use a default we know is always available. | ||
390 | self.builder = HTMLParserTreeBuilder() | ||
391 | self.builder.soup = self | ||
392 | self.reset() | ||
393 | self._feed() | ||
394 | return state | ||
395 | |||
396 | |||
397 | @classmethod | ||
398 | def _decode_markup(cls, markup): | ||
399 | """Ensure `markup` is bytes so it's safe to send into warnings.warn. | ||
400 | |||
401 | TODO: warnings.warn had this problem back in 2010 but it might not | ||
402 | anymore. | ||
403 | """ | ||
404 | if isinstance(markup, bytes): | ||
405 | decoded = markup.decode('utf-8', 'replace') | ||
406 | else: | ||
407 | decoded = markup | ||
408 | return decoded | ||
409 | |||
410 | @classmethod | ||
411 | def _markup_is_url(cls, markup): | ||
412 | """Error-handling method to raise a warning if incoming markup looks | ||
413 | like a URL. | ||
414 | |||
415 | :param markup: A string. | ||
416 | :return: Whether or not the markup resembles a URL | ||
417 | closely enough to justify a warning. | ||
418 | """ | ||
419 | if isinstance(markup, bytes): | ||
420 | space = b' ' | ||
421 | cant_start_with = (b"http:", b"https:") | ||
422 | elif isinstance(markup, str): | ||
423 | space = ' ' | ||
424 | cant_start_with = ("http:", "https:") | ||
425 | else: | ||
426 | return False | ||
427 | |||
428 | if any(markup.startswith(prefix) for prefix in cant_start_with): | ||
429 | if not space in markup: | ||
430 | warnings.warn( | ||
431 | 'The input looks more like a URL than markup. You may want to use' | ||
432 | ' an HTTP client like requests to get the document behind' | ||
433 | ' the URL, and feed that document to Beautiful Soup.', | ||
434 | MarkupResemblesLocatorWarning, | ||
435 | stacklevel=3 | ||
436 | ) | ||
437 | return True | ||
438 | return False | ||
439 | |||
440 | @classmethod | ||
441 | def _markup_resembles_filename(cls, markup): | ||
442 | """Error-handling method to raise a warning if incoming markup | ||
443 | resembles a filename. | ||
444 | |||
445 | :param markup: A bytestring or string. | ||
446 | :return: Whether or not the markup resembles a filename | ||
447 | closely enough to justify a warning. | ||
448 | """ | ||
449 | path_characters = '/\\' | ||
450 | extensions = ['.html', '.htm', '.xml', '.xhtml', '.txt'] | ||
451 | if isinstance(markup, bytes): | ||
452 | path_characters = path_characters.encode("utf8") | ||
453 | extensions = [x.encode('utf8') for x in extensions] | ||
454 | filelike = False | ||
455 | if any(x in markup for x in path_characters): | ||
456 | filelike = True | ||
457 | else: | ||
458 | lower = markup.lower() | ||
459 | if any(lower.endswith(ext) for ext in extensions): | ||
460 | filelike = True | ||
461 | if filelike: | ||
462 | warnings.warn( | ||
463 | 'The input looks more like a filename than markup. You may' | ||
464 | ' want to open this file and pass the filehandle into' | ||
465 | ' Beautiful Soup.', | ||
466 | MarkupResemblesLocatorWarning, stacklevel=3 | ||
467 | ) | ||
468 | return True | ||
469 | return False | ||
470 | |||
235 | def _feed(self): | 471 | def _feed(self): |
472 | """Internal method that parses previously set markup, creating a large | ||
473 | number of Tag and NavigableString objects. | ||
474 | """ | ||
236 | # Convert the document to Unicode. | 475 | # Convert the document to Unicode. |
237 | self.builder.reset() | 476 | self.builder.reset() |
238 | 477 | ||
@@ -243,48 +482,111 @@ class BeautifulSoup(Tag): | |||
243 | self.popTag() | 482 | self.popTag() |
244 | 483 | ||
245 | def reset(self): | 484 | def reset(self): |
485 | """Reset this object to a state as though it had never parsed any | ||
486 | markup. | ||
487 | """ | ||
246 | Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) | 488 | Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) |
247 | self.hidden = 1 | 489 | self.hidden = 1 |
248 | self.builder.reset() | 490 | self.builder.reset() |
249 | self.current_data = [] | 491 | self.current_data = [] |
250 | self.currentTag = None | 492 | self.currentTag = None |
251 | self.tagStack = [] | 493 | self.tagStack = [] |
494 | self.open_tag_counter = Counter() | ||
252 | self.preserve_whitespace_tag_stack = [] | 495 | self.preserve_whitespace_tag_stack = [] |
496 | self.string_container_stack = [] | ||
497 | self._most_recent_element = None | ||
253 | self.pushTag(self) | 498 | self.pushTag(self) |
254 | 499 | ||
255 | def new_tag(self, name, namespace=None, nsprefix=None, **attrs): | 500 | def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, |
256 | """Create a new tag associated with this soup.""" | 501 | sourceline=None, sourcepos=None, **kwattrs): |
257 | return Tag(None, self.builder, name, namespace, nsprefix, attrs) | 502 | """Create a new Tag associated with this BeautifulSoup object. |
503 | |||
504 | :param name: The name of the new Tag. | ||
505 | :param namespace: The URI of the new Tag's XML namespace, if any. | ||
506 | :param prefix: The prefix for the new Tag's XML namespace, if any. | ||
507 | :param attrs: A dictionary of this Tag's attribute values; can | ||
508 | be used instead of `kwattrs` for attributes like 'class' | ||
509 | that are reserved words in Python. | ||
510 | :param sourceline: The line number where this tag was | ||
511 | (purportedly) found in its source document. | ||
512 | :param sourcepos: The character position within `sourceline` where this | ||
513 | tag was (purportedly) found. | ||
514 | :param kwattrs: Keyword arguments for the new Tag's attribute values. | ||
258 | 515 | ||
259 | def new_string(self, s, subclass=NavigableString): | 516 | """ |
260 | """Create a new NavigableString associated with this soup.""" | 517 | kwattrs.update(attrs) |
261 | return subclass(s) | 518 | return self.element_classes.get(Tag, Tag)( |
519 | None, self.builder, name, namespace, nsprefix, kwattrs, | ||
520 | sourceline=sourceline, sourcepos=sourcepos | ||
521 | ) | ||
522 | |||
523 | def string_container(self, base_class=None): | ||
524 | container = base_class or NavigableString | ||
525 | |||
526 | # There may be a general override of NavigableString. | ||
527 | container = self.element_classes.get( | ||
528 | container, container | ||
529 | ) | ||
530 | |||
531 | # On top of that, we may be inside a tag that needs a special | ||
532 | # container class. | ||
533 | if self.string_container_stack and container is NavigableString: | ||
534 | container = self.builder.string_containers.get( | ||
535 | self.string_container_stack[-1].name, container | ||
536 | ) | ||
537 | return container | ||
538 | |||
539 | def new_string(self, s, subclass=None): | ||
540 | """Create a new NavigableString associated with this BeautifulSoup | ||
541 | object. | ||
542 | """ | ||
543 | container = self.string_container(subclass) | ||
544 | return container(s) | ||
262 | 545 | ||
263 | def insert_before(self, successor): | 546 | def insert_before(self, *args): |
547 | """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement | ||
548 | it because there is nothing before or after it in the parse tree. | ||
549 | """ | ||
264 | raise NotImplementedError("BeautifulSoup objects don't support insert_before().") | 550 | raise NotImplementedError("BeautifulSoup objects don't support insert_before().") |
265 | 551 | ||
266 | def insert_after(self, successor): | 552 | def insert_after(self, *args): |
553 | """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement | ||
554 | it because there is nothing before or after it in the parse tree. | ||
555 | """ | ||
267 | raise NotImplementedError("BeautifulSoup objects don't support insert_after().") | 556 | raise NotImplementedError("BeautifulSoup objects don't support insert_after().") |
268 | 557 | ||
269 | def popTag(self): | 558 | def popTag(self): |
559 | """Internal method called by _popToTag when a tag is closed.""" | ||
270 | tag = self.tagStack.pop() | 560 | tag = self.tagStack.pop() |
561 | if tag.name in self.open_tag_counter: | ||
562 | self.open_tag_counter[tag.name] -= 1 | ||
271 | if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: | 563 | if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: |
272 | self.preserve_whitespace_tag_stack.pop() | 564 | self.preserve_whitespace_tag_stack.pop() |
273 | #print "Pop", tag.name | 565 | if self.string_container_stack and tag == self.string_container_stack[-1]: |
566 | self.string_container_stack.pop() | ||
567 | #print("Pop", tag.name) | ||
274 | if self.tagStack: | 568 | if self.tagStack: |
275 | self.currentTag = self.tagStack[-1] | 569 | self.currentTag = self.tagStack[-1] |
276 | return self.currentTag | 570 | return self.currentTag |
277 | 571 | ||
278 | def pushTag(self, tag): | 572 | def pushTag(self, tag): |
279 | #print "Push", tag.name | 573 | """Internal method called by handle_starttag when a tag is opened.""" |
280 | if self.currentTag: | 574 | #print("Push", tag.name) |
575 | if self.currentTag is not None: | ||
281 | self.currentTag.contents.append(tag) | 576 | self.currentTag.contents.append(tag) |
282 | self.tagStack.append(tag) | 577 | self.tagStack.append(tag) |
283 | self.currentTag = self.tagStack[-1] | 578 | self.currentTag = self.tagStack[-1] |
579 | if tag.name != self.ROOT_TAG_NAME: | ||
580 | self.open_tag_counter[tag.name] += 1 | ||
284 | if tag.name in self.builder.preserve_whitespace_tags: | 581 | if tag.name in self.builder.preserve_whitespace_tags: |
285 | self.preserve_whitespace_tag_stack.append(tag) | 582 | self.preserve_whitespace_tag_stack.append(tag) |
583 | if tag.name in self.builder.string_containers: | ||
584 | self.string_container_stack.append(tag) | ||
286 | 585 | ||
287 | def endData(self, containerClass=NavigableString): | 586 | def endData(self, containerClass=None): |
587 | """Method called by the TreeBuilder when the end of a data segment | ||
588 | occurs. | ||
589 | """ | ||
288 | if self.current_data: | 590 | if self.current_data: |
289 | current_data = ''.join(self.current_data) | 591 | current_data = ''.join(self.current_data) |
290 | # If whitespace is not preserved, and this string contains | 592 | # If whitespace is not preserved, and this string contains |
@@ -311,61 +613,93 @@ class BeautifulSoup(Tag): | |||
311 | not self.parse_only.search(current_data)): | 613 | not self.parse_only.search(current_data)): |
312 | return | 614 | return |
313 | 615 | ||
616 | containerClass = self.string_container(containerClass) | ||
314 | o = containerClass(current_data) | 617 | o = containerClass(current_data) |
315 | self.object_was_parsed(o) | 618 | self.object_was_parsed(o) |
316 | 619 | ||
317 | def object_was_parsed(self, o, parent=None, most_recent_element=None): | 620 | def object_was_parsed(self, o, parent=None, most_recent_element=None): |
318 | """Add an object to the parse tree.""" | 621 | """Method called by the TreeBuilder to integrate an object into the parse tree.""" |
319 | parent = parent or self.currentTag | 622 | if parent is None: |
320 | previous_element = most_recent_element or self._most_recent_element | 623 | parent = self.currentTag |
624 | if most_recent_element is not None: | ||
625 | previous_element = most_recent_element | ||
626 | else: | ||
627 | previous_element = self._most_recent_element | ||
321 | 628 | ||
322 | next_element = previous_sibling = next_sibling = None | 629 | next_element = previous_sibling = next_sibling = None |
323 | if isinstance(o, Tag): | 630 | if isinstance(o, Tag): |
324 | next_element = o.next_element | 631 | next_element = o.next_element |
325 | next_sibling = o.next_sibling | 632 | next_sibling = o.next_sibling |
326 | previous_sibling = o.previous_sibling | 633 | previous_sibling = o.previous_sibling |
327 | if not previous_element: | 634 | if previous_element is None: |
328 | previous_element = o.previous_element | 635 | previous_element = o.previous_element |
329 | 636 | ||
637 | fix = parent.next_element is not None | ||
638 | |||
330 | o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) | 639 | o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) |
331 | 640 | ||
332 | self._most_recent_element = o | 641 | self._most_recent_element = o |
333 | parent.contents.append(o) | 642 | parent.contents.append(o) |
334 | 643 | ||
335 | if parent.next_sibling: | 644 | # Check if we are inserting into an already parsed node. |
336 | # This node is being inserted into an element that has | 645 | if fix: |
337 | # already been parsed. Deal with any dangling references. | 646 | self._linkage_fixer(parent) |
338 | index = parent.contents.index(o) | 647 | |
339 | if index == 0: | 648 | def _linkage_fixer(self, el): |
340 | previous_element = parent | 649 | """Make sure linkage of this fragment is sound.""" |
341 | previous_sibling = None | 650 | |
342 | else: | 651 | first = el.contents[0] |
343 | previous_element = previous_sibling = parent.contents[index-1] | 652 | child = el.contents[-1] |
344 | if index == len(parent.contents)-1: | 653 | descendant = child |
345 | next_element = parent.next_sibling | 654 | |
346 | next_sibling = None | 655 | if child is first and el.parent is not None: |
347 | else: | 656 | # Parent should be linked to first child |
348 | next_element = next_sibling = parent.contents[index+1] | 657 | el.next_element = child |
349 | 658 | # We are no longer linked to whatever this element is | |
350 | o.previous_element = previous_element | 659 | prev_el = child.previous_element |
351 | if previous_element: | 660 | if prev_el is not None and prev_el is not el: |
352 | previous_element.next_element = o | 661 | prev_el.next_element = None |
353 | o.next_element = next_element | 662 | # First child should be linked to the parent, and no previous siblings. |
354 | if next_element: | 663 | child.previous_element = el |
355 | next_element.previous_element = o | 664 | child.previous_sibling = None |
356 | o.next_sibling = next_sibling | 665 | |
357 | if next_sibling: | 666 | # We have no sibling as we've been appended as the last. |
358 | next_sibling.previous_sibling = o | 667 | child.next_sibling = None |
359 | o.previous_sibling = previous_sibling | 668 | |
360 | if previous_sibling: | 669 | # This index is a tag, dig deeper for a "last descendant" |
361 | previous_sibling.next_sibling = o | 670 | if isinstance(child, Tag) and child.contents: |
671 | descendant = child._last_descendant(False) | ||
672 | |||
673 | # As the final step, link last descendant. It should be linked | ||
674 | # to the parent's next sibling (if found), else walk up the chain | ||
675 | # and find a parent with a sibling. It should have no next sibling. | ||
676 | descendant.next_element = None | ||
677 | descendant.next_sibling = None | ||
678 | target = el | ||
679 | while True: | ||
680 | if target is None: | ||
681 | break | ||
682 | elif target.next_sibling is not None: | ||
683 | descendant.next_element = target.next_sibling | ||
684 | target.next_sibling.previous_element = child | ||
685 | break | ||
686 | target = target.parent | ||
362 | 687 | ||
363 | def _popToTag(self, name, nsprefix=None, inclusivePop=True): | 688 | def _popToTag(self, name, nsprefix=None, inclusivePop=True): |
364 | """Pops the tag stack up to and including the most recent | 689 | """Pops the tag stack up to and including the most recent |
365 | instance of the given tag. If inclusivePop is false, pops the tag | 690 | instance of the given tag. |
366 | stack up to but *not* including the most recent instqance of | 691 | |
367 | the given tag.""" | 692 | If there are no open tags with the given name, nothing will be |
368 | #print "Popping to %s" % name | 693 | popped. |
694 | |||
695 | :param name: Pop up to the most recent tag with this name. | ||
696 | :param nsprefix: The namespace prefix that goes with `name`. | ||
697 | :param inclusivePop: It this is false, pops the tag stack up | ||
698 | to but *not* including the most recent instqance of the | ||
699 | given tag. | ||
700 | |||
701 | """ | ||
702 | #print("Popping to %s" % name) | ||
369 | if name == self.ROOT_TAG_NAME: | 703 | if name == self.ROOT_TAG_NAME: |
370 | # The BeautifulSoup object itself can never be popped. | 704 | # The BeautifulSoup object itself can never be popped. |
371 | return | 705 | return |
@@ -374,6 +708,8 @@ class BeautifulSoup(Tag): | |||
374 | 708 | ||
375 | stack_size = len(self.tagStack) | 709 | stack_size = len(self.tagStack) |
376 | for i in range(stack_size - 1, 0, -1): | 710 | for i in range(stack_size - 1, 0, -1): |
711 | if not self.open_tag_counter.get(name): | ||
712 | break | ||
377 | t = self.tagStack[i] | 713 | t = self.tagStack[i] |
378 | if (name == t.name and nsprefix == t.prefix): | 714 | if (name == t.name and nsprefix == t.prefix): |
379 | if inclusivePop: | 715 | if inclusivePop: |
@@ -383,16 +719,26 @@ class BeautifulSoup(Tag): | |||
383 | 719 | ||
384 | return most_recently_popped | 720 | return most_recently_popped |
385 | 721 | ||
386 | def handle_starttag(self, name, namespace, nsprefix, attrs): | 722 | def handle_starttag(self, name, namespace, nsprefix, attrs, sourceline=None, |
387 | """Push a start tag on to the stack. | 723 | sourcepos=None, namespaces=None): |
388 | 724 | """Called by the tree builder when a new tag is encountered. | |
389 | If this method returns None, the tag was rejected by the | 725 | |
390 | SoupStrainer. You should proceed as if the tag had not occured | 726 | :param name: Name of the tag. |
727 | :param nsprefix: Namespace prefix for the tag. | ||
728 | :param attrs: A dictionary of attribute values. | ||
729 | :param sourceline: The line number where this tag was found in its | ||
730 | source document. | ||
731 | :param sourcepos: The character position within `sourceline` where this | ||
732 | tag was found. | ||
733 | :param namespaces: A dictionary of all namespace prefix mappings | ||
734 | currently in scope in the document. | ||
735 | |||
736 | If this method returns None, the tag was rejected by an active | ||
737 | SoupStrainer. You should proceed as if the tag had not occurred | ||
391 | in the document. For instance, if this was a self-closing tag, | 738 | in the document. For instance, if this was a self-closing tag, |
392 | don't call handle_endtag. | 739 | don't call handle_endtag. |
393 | """ | 740 | """ |
394 | 741 | # print("Start tag %s: %s" % (name, attrs)) | |
395 | # print "Start tag %s: %s" % (name, attrs) | ||
396 | self.endData() | 742 | self.endData() |
397 | 743 | ||
398 | if (self.parse_only and len(self.tagStack) <= 1 | 744 | if (self.parse_only and len(self.tagStack) <= 1 |
@@ -400,34 +746,54 @@ class BeautifulSoup(Tag): | |||
400 | or not self.parse_only.search_tag(name, attrs))): | 746 | or not self.parse_only.search_tag(name, attrs))): |
401 | return None | 747 | return None |
402 | 748 | ||
403 | tag = Tag(self, self.builder, name, namespace, nsprefix, attrs, | 749 | tag = self.element_classes.get(Tag, Tag)( |
404 | self.currentTag, self._most_recent_element) | 750 | self, self.builder, name, namespace, nsprefix, attrs, |
751 | self.currentTag, self._most_recent_element, | ||
752 | sourceline=sourceline, sourcepos=sourcepos, | ||
753 | namespaces=namespaces | ||
754 | ) | ||
405 | if tag is None: | 755 | if tag is None: |
406 | return tag | 756 | return tag |
407 | if self._most_recent_element: | 757 | if self._most_recent_element is not None: |
408 | self._most_recent_element.next_element = tag | 758 | self._most_recent_element.next_element = tag |
409 | self._most_recent_element = tag | 759 | self._most_recent_element = tag |
410 | self.pushTag(tag) | 760 | self.pushTag(tag) |
411 | return tag | 761 | return tag |
412 | 762 | ||
413 | def handle_endtag(self, name, nsprefix=None): | 763 | def handle_endtag(self, name, nsprefix=None): |
414 | #print "End tag: " + name | 764 | """Called by the tree builder when an ending tag is encountered. |
765 | |||
766 | :param name: Name of the tag. | ||
767 | :param nsprefix: Namespace prefix for the tag. | ||
768 | """ | ||
769 | #print("End tag: " + name) | ||
415 | self.endData() | 770 | self.endData() |
416 | self._popToTag(name, nsprefix) | 771 | self._popToTag(name, nsprefix) |
417 | 772 | ||
418 | def handle_data(self, data): | 773 | def handle_data(self, data): |
774 | """Called by the tree builder when a chunk of textual data is encountered.""" | ||
419 | self.current_data.append(data) | 775 | self.current_data.append(data) |
420 | 776 | ||
421 | def decode(self, pretty_print=False, | 777 | def decode(self, pretty_print=False, |
422 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, | 778 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, |
423 | formatter="minimal"): | 779 | formatter="minimal", iterator=None): |
424 | """Returns a string or Unicode representation of this document. | 780 | """Returns a string or Unicode representation of the parse tree |
425 | To get Unicode, pass None for encoding.""" | 781 | as an HTML or XML document. |
426 | 782 | ||
783 | :param pretty_print: If this is True, indentation will be used to | ||
784 | make the document more readable. | ||
785 | :param eventual_encoding: The encoding of the final document. | ||
786 | If this is None, the document will be a Unicode string. | ||
787 | """ | ||
427 | if self.is_xml: | 788 | if self.is_xml: |
428 | # Print the XML declaration | 789 | # Print the XML declaration |
429 | encoding_part = '' | 790 | encoding_part = '' |
430 | if eventual_encoding is not None: | 791 | if eventual_encoding in PYTHON_SPECIFIC_ENCODINGS: |
792 | # This is a special Python encoding; it can't actually | ||
793 | # go into an XML document because it means nothing | ||
794 | # outside of Python. | ||
795 | eventual_encoding = None | ||
796 | if eventual_encoding != None: | ||
431 | encoding_part = ' encoding="%s"' % eventual_encoding | 797 | encoding_part = ' encoding="%s"' % eventual_encoding |
432 | prefix = '<?xml version="1.0"%s?>\n' % encoding_part | 798 | prefix = '<?xml version="1.0"%s?>\n' % encoding_part |
433 | else: | 799 | else: |
@@ -437,9 +803,9 @@ class BeautifulSoup(Tag): | |||
437 | else: | 803 | else: |
438 | indent_level = 0 | 804 | indent_level = 0 |
439 | return prefix + super(BeautifulSoup, self).decode( | 805 | return prefix + super(BeautifulSoup, self).decode( |
440 | indent_level, eventual_encoding, formatter) | 806 | indent_level, eventual_encoding, formatter, iterator) |
441 | 807 | ||
442 | # Alias to make it easier to type import: 'from bs4 import _soup' | 808 | # Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup' |
443 | _s = BeautifulSoup | 809 | _s = BeautifulSoup |
444 | _soup = BeautifulSoup | 810 | _soup = BeautifulSoup |
445 | 811 | ||
@@ -450,19 +816,24 @@ class BeautifulStoneSoup(BeautifulSoup): | |||
450 | kwargs['features'] = 'xml' | 816 | kwargs['features'] = 'xml' |
451 | warnings.warn( | 817 | warnings.warn( |
452 | 'The BeautifulStoneSoup class is deprecated. Instead of using ' | 818 | 'The BeautifulStoneSoup class is deprecated. Instead of using ' |
453 | 'it, pass features="xml" into the BeautifulSoup constructor.') | 819 | 'it, pass features="xml" into the BeautifulSoup constructor.', |
820 | DeprecationWarning, stacklevel=2 | ||
821 | ) | ||
454 | super(BeautifulStoneSoup, self).__init__(*args, **kwargs) | 822 | super(BeautifulStoneSoup, self).__init__(*args, **kwargs) |
455 | 823 | ||
456 | 824 | ||
457 | class StopParsing(Exception): | 825 | class StopParsing(Exception): |
826 | """Exception raised by a TreeBuilder if it's unable to continue parsing.""" | ||
458 | pass | 827 | pass |
459 | 828 | ||
460 | class FeatureNotFound(ValueError): | 829 | class FeatureNotFound(ValueError): |
830 | """Exception raised by the BeautifulSoup constructor if no parser with the | ||
831 | requested features is found. | ||
832 | """ | ||
461 | pass | 833 | pass |
462 | 834 | ||
463 | 835 | ||
464 | #By default, act as an HTML pretty-printer. | 836 | #If this file is run as a script, act as an HTML pretty-printer. |
465 | if __name__ == '__main__': | 837 | if __name__ == '__main__': |
466 | import sys | ||
467 | soup = BeautifulSoup(sys.stdin) | 838 | soup = BeautifulSoup(sys.stdin) |
468 | print(soup.prettify()) | 839 | print((soup.prettify())) |
diff --git a/bitbake/lib/bs4/builder/__init__.py b/bitbake/lib/bs4/builder/__init__.py index 6ccd4d23d6..ffb31fc25e 100644 --- a/bitbake/lib/bs4/builder/__init__.py +++ b/bitbake/lib/bs4/builder/__init__.py | |||
@@ -1,11 +1,21 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
2 | __license__ = "MIT" | ||
3 | |||
1 | from collections import defaultdict | 4 | from collections import defaultdict |
2 | import itertools | 5 | import itertools |
6 | import re | ||
7 | import warnings | ||
3 | import sys | 8 | import sys |
4 | from bs4.element import ( | 9 | from bs4.element import ( |
5 | CharsetMetaAttributeValue, | 10 | CharsetMetaAttributeValue, |
6 | ContentMetaAttributeValue, | 11 | ContentMetaAttributeValue, |
7 | whitespace_re | 12 | RubyParenthesisString, |
8 | ) | 13 | RubyTextString, |
14 | Stylesheet, | ||
15 | Script, | ||
16 | TemplateString, | ||
17 | nonwhitespace_re | ||
18 | ) | ||
9 | 19 | ||
10 | __all__ = [ | 20 | __all__ = [ |
11 | 'HTMLTreeBuilder', | 21 | 'HTMLTreeBuilder', |
@@ -22,20 +32,41 @@ XML = 'xml' | |||
22 | HTML = 'html' | 32 | HTML = 'html' |
23 | HTML_5 = 'html5' | 33 | HTML_5 = 'html5' |
24 | 34 | ||
35 | class XMLParsedAsHTMLWarning(UserWarning): | ||
36 | """The warning issued when an HTML parser is used to parse | ||
37 | XML that is not XHTML. | ||
38 | """ | ||
39 | MESSAGE = """It looks like you're parsing an XML document using an HTML parser. If this really is an HTML document (maybe it's XHTML?), you can ignore or filter this warning. If it's XML, you should know that using an XML parser will be more reliable. To parse this document as XML, make sure you have the lxml package installed, and pass the keyword argument `features="xml"` into the BeautifulSoup constructor.""" | ||
40 | |||
25 | 41 | ||
26 | class TreeBuilderRegistry(object): | 42 | class TreeBuilderRegistry(object): |
27 | 43 | """A way of looking up TreeBuilder subclasses by their name or by desired | |
44 | features. | ||
45 | """ | ||
46 | |||
28 | def __init__(self): | 47 | def __init__(self): |
29 | self.builders_for_feature = defaultdict(list) | 48 | self.builders_for_feature = defaultdict(list) |
30 | self.builders = [] | 49 | self.builders = [] |
31 | 50 | ||
32 | def register(self, treebuilder_class): | 51 | def register(self, treebuilder_class): |
33 | """Register a treebuilder based on its advertised features.""" | 52 | """Register a treebuilder based on its advertised features. |
53 | |||
54 | :param treebuilder_class: A subclass of Treebuilder. its .features | ||
55 | attribute should list its features. | ||
56 | """ | ||
34 | for feature in treebuilder_class.features: | 57 | for feature in treebuilder_class.features: |
35 | self.builders_for_feature[feature].insert(0, treebuilder_class) | 58 | self.builders_for_feature[feature].insert(0, treebuilder_class) |
36 | self.builders.insert(0, treebuilder_class) | 59 | self.builders.insert(0, treebuilder_class) |
37 | 60 | ||
38 | def lookup(self, *features): | 61 | def lookup(self, *features): |
62 | """Look up a TreeBuilder subclass with the desired features. | ||
63 | |||
64 | :param features: A list of features to look for. If none are | ||
65 | provided, the most recently registered TreeBuilder subclass | ||
66 | will be used. | ||
67 | :return: A TreeBuilder subclass, or None if there's no | ||
68 | registered subclass with all the requested features. | ||
69 | """ | ||
39 | if len(self.builders) == 0: | 70 | if len(self.builders) == 0: |
40 | # There are no builders at all. | 71 | # There are no builders at all. |
41 | return None | 72 | return None |
@@ -78,7 +109,7 @@ class TreeBuilderRegistry(object): | |||
78 | builder_registry = TreeBuilderRegistry() | 109 | builder_registry = TreeBuilderRegistry() |
79 | 110 | ||
80 | class TreeBuilder(object): | 111 | class TreeBuilder(object): |
81 | """Turn a document into a Beautiful Soup object tree.""" | 112 | """Turn a textual document into a Beautiful Soup object tree.""" |
82 | 113 | ||
83 | NAME = "[Unknown tree builder]" | 114 | NAME = "[Unknown tree builder]" |
84 | ALTERNATE_NAMES = [] | 115 | ALTERNATE_NAMES = [] |
@@ -86,19 +117,89 @@ class TreeBuilder(object): | |||
86 | 117 | ||
87 | is_xml = False | 118 | is_xml = False |
88 | picklable = False | 119 | picklable = False |
89 | preserve_whitespace_tags = set() | ||
90 | empty_element_tags = None # A tag will be considered an empty-element | 120 | empty_element_tags = None # A tag will be considered an empty-element |
91 | # tag when and only when it has no contents. | 121 | # tag when and only when it has no contents. |
92 | 122 | ||
93 | # A value for these tag/attribute combinations is a space- or | 123 | # A value for these tag/attribute combinations is a space- or |
94 | # comma-separated list of CDATA, rather than a single CDATA. | 124 | # comma-separated list of CDATA, rather than a single CDATA. |
95 | cdata_list_attributes = {} | 125 | DEFAULT_CDATA_LIST_ATTRIBUTES = defaultdict(list) |
96 | 126 | ||
97 | 127 | # Whitespace should be preserved inside these tags. | |
98 | def __init__(self): | 128 | DEFAULT_PRESERVE_WHITESPACE_TAGS = set() |
129 | |||
130 | # The textual contents of tags with these names should be | ||
131 | # instantiated with some class other than NavigableString. | ||
132 | DEFAULT_STRING_CONTAINERS = {} | ||
133 | |||
134 | USE_DEFAULT = object() | ||
135 | |||
136 | # Most parsers don't keep track of line numbers. | ||
137 | TRACKS_LINE_NUMBERS = False | ||
138 | |||
139 | def __init__(self, multi_valued_attributes=USE_DEFAULT, | ||
140 | preserve_whitespace_tags=USE_DEFAULT, | ||
141 | store_line_numbers=USE_DEFAULT, | ||
142 | string_containers=USE_DEFAULT, | ||
143 | ): | ||
144 | """Constructor. | ||
145 | |||
146 | :param multi_valued_attributes: If this is set to None, the | ||
147 | TreeBuilder will not turn any values for attributes like | ||
148 | 'class' into lists. Setting this to a dictionary will | ||
149 | customize this behavior; look at DEFAULT_CDATA_LIST_ATTRIBUTES | ||
150 | for an example. | ||
151 | |||
152 | Internally, these are called "CDATA list attributes", but that | ||
153 | probably doesn't make sense to an end-user, so the argument name | ||
154 | is `multi_valued_attributes`. | ||
155 | |||
156 | :param preserve_whitespace_tags: A list of tags to treat | ||
157 | the way <pre> tags are treated in HTML. Tags in this list | ||
158 | are immune from pretty-printing; their contents will always be | ||
159 | output as-is. | ||
160 | |||
161 | :param string_containers: A dictionary mapping tag names to | ||
162 | the classes that should be instantiated to contain the textual | ||
163 | contents of those tags. The default is to use NavigableString | ||
164 | for every tag, no matter what the name. You can override the | ||
165 | default by changing DEFAULT_STRING_CONTAINERS. | ||
166 | |||
167 | :param store_line_numbers: If the parser keeps track of the | ||
168 | line numbers and positions of the original markup, that | ||
169 | information will, by default, be stored in each corresponding | ||
170 | `Tag` object. You can turn this off by passing | ||
171 | store_line_numbers=False. If the parser you're using doesn't | ||
172 | keep track of this information, then setting store_line_numbers=True | ||
173 | will do nothing. | ||
174 | """ | ||
99 | self.soup = None | 175 | self.soup = None |
100 | 176 | if multi_valued_attributes is self.USE_DEFAULT: | |
177 | multi_valued_attributes = self.DEFAULT_CDATA_LIST_ATTRIBUTES | ||
178 | self.cdata_list_attributes = multi_valued_attributes | ||
179 | if preserve_whitespace_tags is self.USE_DEFAULT: | ||
180 | preserve_whitespace_tags = self.DEFAULT_PRESERVE_WHITESPACE_TAGS | ||
181 | self.preserve_whitespace_tags = preserve_whitespace_tags | ||
182 | if store_line_numbers == self.USE_DEFAULT: | ||
183 | store_line_numbers = self.TRACKS_LINE_NUMBERS | ||
184 | self.store_line_numbers = store_line_numbers | ||
185 | if string_containers == self.USE_DEFAULT: | ||
186 | string_containers = self.DEFAULT_STRING_CONTAINERS | ||
187 | self.string_containers = string_containers | ||
188 | |||
189 | def initialize_soup(self, soup): | ||
190 | """The BeautifulSoup object has been initialized and is now | ||
191 | being associated with the TreeBuilder. | ||
192 | |||
193 | :param soup: A BeautifulSoup object. | ||
194 | """ | ||
195 | self.soup = soup | ||
196 | |||
101 | def reset(self): | 197 | def reset(self): |
198 | """Do any work necessary to reset the underlying parser | ||
199 | for a new document. | ||
200 | |||
201 | By default, this does nothing. | ||
202 | """ | ||
102 | pass | 203 | pass |
103 | 204 | ||
104 | def can_be_empty_element(self, tag_name): | 205 | def can_be_empty_element(self, tag_name): |
@@ -110,24 +211,58 @@ class TreeBuilder(object): | |||
110 | For instance: an HTMLBuilder does not consider a <p> tag to be | 211 | For instance: an HTMLBuilder does not consider a <p> tag to be |
111 | an empty-element tag (it's not in | 212 | an empty-element tag (it's not in |
112 | HTMLBuilder.empty_element_tags). This means an empty <p> tag | 213 | HTMLBuilder.empty_element_tags). This means an empty <p> tag |
113 | will be presented as "<p></p>", not "<p />". | 214 | will be presented as "<p></p>", not "<p/>" or "<p>". |
114 | 215 | ||
115 | The default implementation has no opinion about which tags are | 216 | The default implementation has no opinion about which tags are |
116 | empty-element tags, so a tag will be presented as an | 217 | empty-element tags, so a tag will be presented as an |
117 | empty-element tag if and only if it has no contents. | 218 | empty-element tag if and only if it has no children. |
118 | "<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will | 219 | "<foo></foo>" will become "<foo/>", and "<foo>bar</foo>" will |
119 | be left alone. | 220 | be left alone. |
221 | |||
222 | :param tag_name: The name of a markup tag. | ||
120 | """ | 223 | """ |
121 | if self.empty_element_tags is None: | 224 | if self.empty_element_tags is None: |
122 | return True | 225 | return True |
123 | return tag_name in self.empty_element_tags | 226 | return tag_name in self.empty_element_tags |
124 | 227 | ||
125 | def feed(self, markup): | 228 | def feed(self, markup): |
229 | """Run some incoming markup through some parsing process, | ||
230 | populating the `BeautifulSoup` object in self.soup. | ||
231 | |||
232 | This method is not implemented in TreeBuilder; it must be | ||
233 | implemented in subclasses. | ||
234 | |||
235 | :return: None. | ||
236 | """ | ||
126 | raise NotImplementedError() | 237 | raise NotImplementedError() |
127 | 238 | ||
128 | def prepare_markup(self, markup, user_specified_encoding=None, | 239 | def prepare_markup(self, markup, user_specified_encoding=None, |
129 | document_declared_encoding=None): | 240 | document_declared_encoding=None, exclude_encodings=None): |
130 | return markup, None, None, False | 241 | """Run any preliminary steps necessary to make incoming markup |
242 | acceptable to the parser. | ||
243 | |||
244 | :param markup: Some markup -- probably a bytestring. | ||
245 | :param user_specified_encoding: The user asked to try this encoding. | ||
246 | :param document_declared_encoding: The markup itself claims to be | ||
247 | in this encoding. NOTE: This argument is not used by the | ||
248 | calling code and can probably be removed. | ||
249 | :param exclude_encodings: The user asked _not_ to try any of | ||
250 | these encodings. | ||
251 | |||
252 | :yield: A series of 4-tuples: | ||
253 | (markup, encoding, declared encoding, | ||
254 | has undergone character replacement) | ||
255 | |||
256 | Each 4-tuple represents a strategy for converting the | ||
257 | document to Unicode and parsing it. Each strategy will be tried | ||
258 | in turn. | ||
259 | |||
260 | By default, the only strategy is to parse the markup | ||
261 | as-is. See `LXMLTreeBuilderForXML` and | ||
262 | `HTMLParserTreeBuilder` for implementations that take into | ||
263 | account the quirks of particular parsers. | ||
264 | """ | ||
265 | yield markup, None, None, False | ||
131 | 266 | ||
132 | def test_fragment_to_document(self, fragment): | 267 | def test_fragment_to_document(self, fragment): |
133 | """Wrap an HTML fragment to make it look like a document. | 268 | """Wrap an HTML fragment to make it look like a document. |
@@ -139,16 +274,36 @@ class TreeBuilder(object): | |||
139 | results against other HTML fragments. | 274 | results against other HTML fragments. |
140 | 275 | ||
141 | This method should not be used outside of tests. | 276 | This method should not be used outside of tests. |
277 | |||
278 | :param fragment: A string -- fragment of HTML. | ||
279 | :return: A string -- a full HTML document. | ||
142 | """ | 280 | """ |
143 | return fragment | 281 | return fragment |
144 | 282 | ||
145 | def set_up_substitutions(self, tag): | 283 | def set_up_substitutions(self, tag): |
284 | """Set up any substitutions that will need to be performed on | ||
285 | a `Tag` when it's output as a string. | ||
286 | |||
287 | By default, this does nothing. See `HTMLTreeBuilder` for a | ||
288 | case where this is used. | ||
289 | |||
290 | :param tag: A `Tag` | ||
291 | :return: Whether or not a substitution was performed. | ||
292 | """ | ||
146 | return False | 293 | return False |
147 | 294 | ||
148 | def _replace_cdata_list_attribute_values(self, tag_name, attrs): | 295 | def _replace_cdata_list_attribute_values(self, tag_name, attrs): |
149 | """Replaces class="foo bar" with class=["foo", "bar"] | 296 | """When an attribute value is associated with a tag that can |
297 | have multiple values for that attribute, convert the string | ||
298 | value to a list of strings. | ||
150 | 299 | ||
151 | Modifies its input in place. | 300 | Basically, replaces class="foo bar" with class=["foo", "bar"] |
301 | |||
302 | NOTE: This method modifies its input in place. | ||
303 | |||
304 | :param tag_name: The name of a tag. | ||
305 | :param attrs: A dictionary containing the tag's attributes. | ||
306 | Any appropriate attribute values will be modified in place. | ||
152 | """ | 307 | """ |
153 | if not attrs: | 308 | if not attrs: |
154 | return attrs | 309 | return attrs |
@@ -163,7 +318,7 @@ class TreeBuilder(object): | |||
163 | # values. Split it into a list. | 318 | # values. Split it into a list. |
164 | value = attrs[attr] | 319 | value = attrs[attr] |
165 | if isinstance(value, str): | 320 | if isinstance(value, str): |
166 | values = whitespace_re.split(value) | 321 | values = nonwhitespace_re.findall(value) |
167 | else: | 322 | else: |
168 | # html5lib sometimes calls setAttributes twice | 323 | # html5lib sometimes calls setAttributes twice |
169 | # for the same tag when rearranging the parse | 324 | # for the same tag when rearranging the parse |
@@ -174,9 +329,13 @@ class TreeBuilder(object): | |||
174 | values = value | 329 | values = value |
175 | attrs[attr] = values | 330 | attrs[attr] = values |
176 | return attrs | 331 | return attrs |
177 | 332 | ||
178 | class SAXTreeBuilder(TreeBuilder): | 333 | class SAXTreeBuilder(TreeBuilder): |
179 | """A Beautiful Soup treebuilder that listens for SAX events.""" | 334 | """A Beautiful Soup treebuilder that listens for SAX events. |
335 | |||
336 | This is not currently used for anything, but it demonstrates | ||
337 | how a simple TreeBuilder would work. | ||
338 | """ | ||
180 | 339 | ||
181 | def feed(self, markup): | 340 | def feed(self, markup): |
182 | raise NotImplementedError() | 341 | raise NotImplementedError() |
@@ -186,11 +345,11 @@ class SAXTreeBuilder(TreeBuilder): | |||
186 | 345 | ||
187 | def startElement(self, name, attrs): | 346 | def startElement(self, name, attrs): |
188 | attrs = dict((key[1], value) for key, value in list(attrs.items())) | 347 | attrs = dict((key[1], value) for key, value in list(attrs.items())) |
189 | #print "Start %s, %r" % (name, attrs) | 348 | #print("Start %s, %r" % (name, attrs)) |
190 | self.soup.handle_starttag(name, attrs) | 349 | self.soup.handle_starttag(name, attrs) |
191 | 350 | ||
192 | def endElement(self, name): | 351 | def endElement(self, name): |
193 | #print "End %s" % name | 352 | #print("End %s" % name) |
194 | self.soup.handle_endtag(name) | 353 | self.soup.handle_endtag(name) |
195 | 354 | ||
196 | def startElementNS(self, nsTuple, nodeName, attrs): | 355 | def startElementNS(self, nsTuple, nodeName, attrs): |
@@ -227,10 +386,44 @@ class HTMLTreeBuilder(TreeBuilder): | |||
227 | Such as which tags are empty-element tags. | 386 | Such as which tags are empty-element tags. |
228 | """ | 387 | """ |
229 | 388 | ||
230 | preserve_whitespace_tags = set(['pre', 'textarea']) | 389 | empty_element_tags = set([ |
231 | empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta', | 390 | # These are from HTML5. |
232 | 'spacer', 'link', 'frame', 'base']) | 391 | 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr', |
233 | 392 | ||
393 | # These are from earlier versions of HTML and are removed in HTML5. | ||
394 | 'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer' | ||
395 | ]) | ||
396 | |||
397 | # The HTML standard defines these as block-level elements. Beautiful | ||
398 | # Soup does not treat these elements differently from other elements, | ||
399 | # but it may do so eventually, and this information is available if | ||
400 | # you need to use it. | ||
401 | block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"]) | ||
402 | |||
403 | # These HTML tags need special treatment so they can be | ||
404 | # represented by a string class other than NavigableString. | ||
405 | # | ||
406 | # For some of these tags, it's because the HTML standard defines | ||
407 | # an unusual content model for them. I made this list by going | ||
408 | # through the HTML spec | ||
409 | # (https://html.spec.whatwg.org/#metadata-content) and looking for | ||
410 | # "metadata content" elements that can contain strings. | ||
411 | # | ||
412 | # The Ruby tags (<rt> and <rp>) are here despite being normal | ||
413 | # "phrasing content" tags, because the content they contain is | ||
414 | # qualitatively different from other text in the document, and it | ||
415 | # can be useful to be able to distinguish it. | ||
416 | # | ||
417 | # TODO: Arguably <noscript> could go here but it seems | ||
418 | # qualitatively different from the other tags. | ||
419 | DEFAULT_STRING_CONTAINERS = { | ||
420 | 'rt' : RubyTextString, | ||
421 | 'rp' : RubyParenthesisString, | ||
422 | 'style': Stylesheet, | ||
423 | 'script': Script, | ||
424 | 'template': TemplateString, | ||
425 | } | ||
426 | |||
234 | # The HTML standard defines these attributes as containing a | 427 | # The HTML standard defines these attributes as containing a |
235 | # space-separated list of values, not a single value. That is, | 428 | # space-separated list of values, not a single value. That is, |
236 | # class="foo bar" means that the 'class' attribute has two values, | 429 | # class="foo bar" means that the 'class' attribute has two values, |
@@ -238,7 +431,7 @@ class HTMLTreeBuilder(TreeBuilder): | |||
238 | # encounter one of these attributes, we will parse its value into | 431 | # encounter one of these attributes, we will parse its value into |
239 | # a list of values if possible. Upon output, the list will be | 432 | # a list of values if possible. Upon output, the list will be |
240 | # converted back into a string. | 433 | # converted back into a string. |
241 | cdata_list_attributes = { | 434 | DEFAULT_CDATA_LIST_ATTRIBUTES = { |
242 | "*" : ['class', 'accesskey', 'dropzone'], | 435 | "*" : ['class', 'accesskey', 'dropzone'], |
243 | "a" : ['rel', 'rev'], | 436 | "a" : ['rel', 'rev'], |
244 | "link" : ['rel', 'rev'], | 437 | "link" : ['rel', 'rev'], |
@@ -255,7 +448,19 @@ class HTMLTreeBuilder(TreeBuilder): | |||
255 | "output" : ["for"], | 448 | "output" : ["for"], |
256 | } | 449 | } |
257 | 450 | ||
451 | DEFAULT_PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea']) | ||
452 | |||
258 | def set_up_substitutions(self, tag): | 453 | def set_up_substitutions(self, tag): |
454 | """Replace the declared encoding in a <meta> tag with a placeholder, | ||
455 | to be substituted when the tag is output to a string. | ||
456 | |||
457 | An HTML document may come in to Beautiful Soup as one | ||
458 | encoding, but exit in a different encoding, and the <meta> tag | ||
459 | needs to be changed to reflect this. | ||
460 | |||
461 | :param tag: A `Tag` | ||
462 | :return: Whether or not a substitution was performed. | ||
463 | """ | ||
259 | # We are only interested in <meta> tags | 464 | # We are only interested in <meta> tags |
260 | if tag.name != 'meta': | 465 | if tag.name != 'meta': |
261 | return False | 466 | return False |
@@ -288,10 +493,107 @@ class HTMLTreeBuilder(TreeBuilder): | |||
288 | 493 | ||
289 | return (meta_encoding is not None) | 494 | return (meta_encoding is not None) |
290 | 495 | ||
496 | class DetectsXMLParsedAsHTML(object): | ||
497 | """A mixin class for any class (a TreeBuilder, or some class used by a | ||
498 | TreeBuilder) that's in a position to detect whether an XML | ||
499 | document is being incorrectly parsed as HTML, and issue an | ||
500 | appropriate warning. | ||
501 | |||
502 | This requires being able to observe an incoming processing | ||
503 | instruction that might be an XML declaration, and also able to | ||
504 | observe tags as they're opened. If you can't do that for a given | ||
505 | TreeBuilder, there's a less reliable implementation based on | ||
506 | examining the raw markup. | ||
507 | """ | ||
508 | |||
509 | # Regular expression for seeing if markup has an <html> tag. | ||
510 | LOOKS_LIKE_HTML = re.compile("<[^ +]html", re.I) | ||
511 | LOOKS_LIKE_HTML_B = re.compile(b"<[^ +]html", re.I) | ||
512 | |||
513 | XML_PREFIX = '<?xml' | ||
514 | XML_PREFIX_B = b'<?xml' | ||
515 | |||
516 | @classmethod | ||
517 | def warn_if_markup_looks_like_xml(cls, markup, stacklevel=3): | ||
518 | """Perform a check on some markup to see if it looks like XML | ||
519 | that's not XHTML. If so, issue a warning. | ||
520 | |||
521 | This is much less reliable than doing the check while parsing, | ||
522 | but some of the tree builders can't do that. | ||
523 | |||
524 | :param stacklevel: The stacklevel of the code calling this | ||
525 | function. | ||
526 | |||
527 | :return: True if the markup looks like non-XHTML XML, False | ||
528 | otherwise. | ||
529 | |||
530 | """ | ||
531 | if isinstance(markup, bytes): | ||
532 | prefix = cls.XML_PREFIX_B | ||
533 | looks_like_html = cls.LOOKS_LIKE_HTML_B | ||
534 | else: | ||
535 | prefix = cls.XML_PREFIX | ||
536 | looks_like_html = cls.LOOKS_LIKE_HTML | ||
537 | |||
538 | if (markup is not None | ||
539 | and markup.startswith(prefix) | ||
540 | and not looks_like_html.search(markup[:500]) | ||
541 | ): | ||
542 | cls._warn(stacklevel=stacklevel+2) | ||
543 | return True | ||
544 | return False | ||
545 | |||
546 | @classmethod | ||
547 | def _warn(cls, stacklevel=5): | ||
548 | """Issue a warning about XML being parsed as HTML.""" | ||
549 | warnings.warn( | ||
550 | XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning, | ||
551 | stacklevel=stacklevel | ||
552 | ) | ||
553 | |||
554 | def _initialize_xml_detector(self): | ||
555 | """Call this method before parsing a document.""" | ||
556 | self._first_processing_instruction = None | ||
557 | self._root_tag = None | ||
558 | |||
559 | def _document_might_be_xml(self, processing_instruction): | ||
560 | """Call this method when encountering an XML declaration, or a | ||
561 | "processing instruction" that might be an XML declaration. | ||
562 | """ | ||
563 | if (self._first_processing_instruction is not None | ||
564 | or self._root_tag is not None): | ||
565 | # The document has already started. Don't bother checking | ||
566 | # anymore. | ||
567 | return | ||
568 | |||
569 | self._first_processing_instruction = processing_instruction | ||
570 | |||
571 | # We won't know until we encounter the first tag whether or | ||
572 | # not this is actually a problem. | ||
573 | |||
574 | def _root_tag_encountered(self, name): | ||
575 | """Call this when you encounter the document's root tag. | ||
576 | |||
577 | This is where we actually check whether an XML document is | ||
578 | being incorrectly parsed as HTML, and issue the warning. | ||
579 | """ | ||
580 | if self._root_tag is not None: | ||
581 | # This method was incorrectly called multiple times. Do | ||
582 | # nothing. | ||
583 | return | ||
584 | |||
585 | self._root_tag = name | ||
586 | if (name != 'html' and self._first_processing_instruction is not None | ||
587 | and self._first_processing_instruction.lower().startswith('xml ')): | ||
588 | # We encountered an XML declaration and then a tag other | ||
589 | # than 'html'. This is a reliable indicator that a | ||
590 | # non-XHTML document is being parsed as XML. | ||
591 | self._warn() | ||
592 | |||
593 | |||
291 | def register_treebuilders_from(module): | 594 | def register_treebuilders_from(module): |
292 | """Copy TreeBuilders from the given module into this module.""" | 595 | """Copy TreeBuilders from the given module into this module.""" |
293 | # I'm fairly sure this is not the best way to do this. | 596 | this_module = sys.modules[__name__] |
294 | this_module = sys.modules['bs4.builder'] | ||
295 | for name in module.__all__: | 597 | for name in module.__all__: |
296 | obj = getattr(module, name) | 598 | obj = getattr(module, name) |
297 | 599 | ||
@@ -302,12 +604,22 @@ def register_treebuilders_from(module): | |||
302 | this_module.builder_registry.register(obj) | 604 | this_module.builder_registry.register(obj) |
303 | 605 | ||
304 | class ParserRejectedMarkup(Exception): | 606 | class ParserRejectedMarkup(Exception): |
305 | pass | 607 | """An Exception to be raised when the underlying parser simply |
306 | 608 | refuses to parse the given markup. | |
609 | """ | ||
610 | def __init__(self, message_or_exception): | ||
611 | """Explain why the parser rejected the given markup, either | ||
612 | with a textual explanation or another exception. | ||
613 | """ | ||
614 | if isinstance(message_or_exception, Exception): | ||
615 | e = message_or_exception | ||
616 | message_or_exception = "%s: %s" % (e.__class__.__name__, str(e)) | ||
617 | super(ParserRejectedMarkup, self).__init__(message_or_exception) | ||
618 | |||
307 | # Builders are registered in reverse order of priority, so that custom | 619 | # Builders are registered in reverse order of priority, so that custom |
308 | # builder registrations will take precedence. In general, we want lxml | 620 | # builder registrations will take precedence. In general, we want lxml |
309 | # to take precedence over html5lib, because it's faster. And we only | 621 | # to take precedence over html5lib, because it's faster. And we only |
310 | # want to use HTMLParser as a last result. | 622 | # want to use HTMLParser as a last resort. |
311 | from . import _htmlparser | 623 | from . import _htmlparser |
312 | register_treebuilders_from(_htmlparser) | 624 | register_treebuilders_from(_htmlparser) |
313 | try: | 625 | try: |
diff --git a/bitbake/lib/bs4/builder/_html5lib.py b/bitbake/lib/bs4/builder/_html5lib.py index 9e9216ef9c..7c46a85118 100644 --- a/bitbake/lib/bs4/builder/_html5lib.py +++ b/bitbake/lib/bs4/builder/_html5lib.py | |||
@@ -1,9 +1,14 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
2 | __license__ = "MIT" | ||
3 | |||
1 | __all__ = [ | 4 | __all__ = [ |
2 | 'HTML5TreeBuilder', | 5 | 'HTML5TreeBuilder', |
3 | ] | 6 | ] |
4 | 7 | ||
5 | import warnings | 8 | import warnings |
9 | import re | ||
6 | from bs4.builder import ( | 10 | from bs4.builder import ( |
11 | DetectsXMLParsedAsHTML, | ||
7 | PERMISSIVE, | 12 | PERMISSIVE, |
8 | HTML, | 13 | HTML, |
9 | HTML_5, | 14 | HTML_5, |
@@ -11,17 +16,13 @@ from bs4.builder import ( | |||
11 | ) | 16 | ) |
12 | from bs4.element import ( | 17 | from bs4.element import ( |
13 | NamespacedAttribute, | 18 | NamespacedAttribute, |
14 | whitespace_re, | 19 | nonwhitespace_re, |
15 | ) | 20 | ) |
16 | import html5lib | 21 | import html5lib |
17 | try: | 22 | from html5lib.constants import ( |
18 | # html5lib >= 0.99999999/1.0b9 | 23 | namespaces, |
19 | from html5lib.treebuilders import base as treebuildersbase | 24 | prefixes, |
20 | except ImportError: | 25 | ) |
21 | # html5lib <= 0.9999999/1.0b8 | ||
22 | from html5lib.treebuilders import _base as treebuildersbase | ||
23 | from html5lib.constants import namespaces | ||
24 | |||
25 | from bs4.element import ( | 26 | from bs4.element import ( |
26 | Comment, | 27 | Comment, |
27 | Doctype, | 28 | Doctype, |
@@ -29,13 +30,37 @@ from bs4.element import ( | |||
29 | Tag, | 30 | Tag, |
30 | ) | 31 | ) |
31 | 32 | ||
33 | try: | ||
34 | # Pre-0.99999999 | ||
35 | from html5lib.treebuilders import _base as treebuilder_base | ||
36 | new_html5lib = False | ||
37 | except ImportError as e: | ||
38 | # 0.99999999 and up | ||
39 | from html5lib.treebuilders import base as treebuilder_base | ||
40 | new_html5lib = True | ||
41 | |||
32 | class HTML5TreeBuilder(HTMLTreeBuilder): | 42 | class HTML5TreeBuilder(HTMLTreeBuilder): |
33 | """Use html5lib to build a tree.""" | 43 | """Use html5lib to build a tree. |
44 | |||
45 | Note that this TreeBuilder does not support some features common | ||
46 | to HTML TreeBuilders. Some of these features could theoretically | ||
47 | be implemented, but at the very least it's quite difficult, | ||
48 | because html5lib moves the parse tree around as it's being built. | ||
49 | |||
50 | * This TreeBuilder doesn't use different subclasses of NavigableString | ||
51 | based on the name of the tag in which the string was found. | ||
52 | |||
53 | * You can't use a SoupStrainer to parse only part of a document. | ||
54 | """ | ||
34 | 55 | ||
35 | NAME = "html5lib" | 56 | NAME = "html5lib" |
36 | 57 | ||
37 | features = [NAME, PERMISSIVE, HTML_5, HTML] | 58 | features = [NAME, PERMISSIVE, HTML_5, HTML] |
38 | 59 | ||
60 | # html5lib can tell us which line number and position in the | ||
61 | # original file is the source of an element. | ||
62 | TRACKS_LINE_NUMBERS = True | ||
63 | |||
39 | def prepare_markup(self, markup, user_specified_encoding, | 64 | def prepare_markup(self, markup, user_specified_encoding, |
40 | document_declared_encoding=None, exclude_encodings=None): | 65 | document_declared_encoding=None, exclude_encodings=None): |
41 | # Store the user-specified encoding for use later on. | 66 | # Store the user-specified encoding for use later on. |
@@ -45,27 +70,56 @@ class HTML5TreeBuilder(HTMLTreeBuilder): | |||
45 | # ATM because the html5lib TreeBuilder doesn't use | 70 | # ATM because the html5lib TreeBuilder doesn't use |
46 | # UnicodeDammit. | 71 | # UnicodeDammit. |
47 | if exclude_encodings: | 72 | if exclude_encodings: |
48 | warnings.warn("You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.") | 73 | warnings.warn( |
74 | "You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.", | ||
75 | stacklevel=3 | ||
76 | ) | ||
77 | |||
78 | # html5lib only parses HTML, so if it's given XML that's worth | ||
79 | # noting. | ||
80 | DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( | ||
81 | markup, stacklevel=3 | ||
82 | ) | ||
83 | |||
49 | yield (markup, None, None, False) | 84 | yield (markup, None, None, False) |
50 | 85 | ||
51 | # These methods are defined by Beautiful Soup. | 86 | # These methods are defined by Beautiful Soup. |
52 | def feed(self, markup): | 87 | def feed(self, markup): |
53 | if self.soup.parse_only is not None: | 88 | if self.soup.parse_only is not None: |
54 | warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.") | 89 | warnings.warn( |
90 | "You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.", | ||
91 | stacklevel=4 | ||
92 | ) | ||
55 | parser = html5lib.HTMLParser(tree=self.create_treebuilder) | 93 | parser = html5lib.HTMLParser(tree=self.create_treebuilder) |
56 | doc = parser.parse(markup, encoding=self.user_specified_encoding) | 94 | self.underlying_builder.parser = parser |
57 | 95 | extra_kwargs = dict() | |
96 | if not isinstance(markup, str): | ||
97 | if new_html5lib: | ||
98 | extra_kwargs['override_encoding'] = self.user_specified_encoding | ||
99 | else: | ||
100 | extra_kwargs['encoding'] = self.user_specified_encoding | ||
101 | doc = parser.parse(markup, **extra_kwargs) | ||
102 | |||
58 | # Set the character encoding detected by the tokenizer. | 103 | # Set the character encoding detected by the tokenizer. |
59 | if isinstance(markup, str): | 104 | if isinstance(markup, str): |
60 | # We need to special-case this because html5lib sets | 105 | # We need to special-case this because html5lib sets |
61 | # charEncoding to UTF-8 if it gets Unicode input. | 106 | # charEncoding to UTF-8 if it gets Unicode input. |
62 | doc.original_encoding = None | 107 | doc.original_encoding = None |
63 | else: | 108 | else: |
64 | doc.original_encoding = parser.tokenizer.stream.charEncoding[0] | 109 | original_encoding = parser.tokenizer.stream.charEncoding[0] |
65 | 110 | if not isinstance(original_encoding, str): | |
111 | # In 0.99999999 and up, the encoding is an html5lib | ||
112 | # Encoding object. We want to use a string for compatibility | ||
113 | # with other tree builders. | ||
114 | original_encoding = original_encoding.name | ||
115 | doc.original_encoding = original_encoding | ||
116 | self.underlying_builder.parser = None | ||
117 | |||
66 | def create_treebuilder(self, namespaceHTMLElements): | 118 | def create_treebuilder(self, namespaceHTMLElements): |
67 | self.underlying_builder = TreeBuilderForHtml5lib( | 119 | self.underlying_builder = TreeBuilderForHtml5lib( |
68 | self.soup, namespaceHTMLElements) | 120 | namespaceHTMLElements, self.soup, |
121 | store_line_numbers=self.store_line_numbers | ||
122 | ) | ||
69 | return self.underlying_builder | 123 | return self.underlying_builder |
70 | 124 | ||
71 | def test_fragment_to_document(self, fragment): | 125 | def test_fragment_to_document(self, fragment): |
@@ -73,12 +127,30 @@ class HTML5TreeBuilder(HTMLTreeBuilder): | |||
73 | return '<html><head></head><body>%s</body></html>' % fragment | 127 | return '<html><head></head><body>%s</body></html>' % fragment |
74 | 128 | ||
75 | 129 | ||
76 | class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): | 130 | class TreeBuilderForHtml5lib(treebuilder_base.TreeBuilder): |
77 | 131 | ||
78 | def __init__(self, soup, namespaceHTMLElements): | 132 | def __init__(self, namespaceHTMLElements, soup=None, |
79 | self.soup = soup | 133 | store_line_numbers=True, **kwargs): |
134 | if soup: | ||
135 | self.soup = soup | ||
136 | else: | ||
137 | from bs4 import BeautifulSoup | ||
138 | # TODO: Why is the parser 'html.parser' here? To avoid an | ||
139 | # infinite loop? | ||
140 | self.soup = BeautifulSoup( | ||
141 | "", "html.parser", store_line_numbers=store_line_numbers, | ||
142 | **kwargs | ||
143 | ) | ||
144 | # TODO: What are **kwargs exactly? Should they be passed in | ||
145 | # here in addition to/instead of being passed to the BeautifulSoup | ||
146 | # constructor? | ||
80 | super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements) | 147 | super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements) |
81 | 148 | ||
149 | # This will be set later to an html5lib.html5parser.HTMLParser | ||
150 | # object, which we can use to track the current line number. | ||
151 | self.parser = None | ||
152 | self.store_line_numbers = store_line_numbers | ||
153 | |||
82 | def documentClass(self): | 154 | def documentClass(self): |
83 | self.soup.reset() | 155 | self.soup.reset() |
84 | return Element(self.soup, self.soup, None) | 156 | return Element(self.soup, self.soup, None) |
@@ -92,14 +164,26 @@ class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): | |||
92 | self.soup.object_was_parsed(doctype) | 164 | self.soup.object_was_parsed(doctype) |
93 | 165 | ||
94 | def elementClass(self, name, namespace): | 166 | def elementClass(self, name, namespace): |
95 | tag = self.soup.new_tag(name, namespace) | 167 | kwargs = {} |
168 | if self.parser and self.store_line_numbers: | ||
169 | # This represents the point immediately after the end of the | ||
170 | # tag. We don't know when the tag started, but we do know | ||
171 | # where it ended -- the character just before this one. | ||
172 | sourceline, sourcepos = self.parser.tokenizer.stream.position() | ||
173 | kwargs['sourceline'] = sourceline | ||
174 | kwargs['sourcepos'] = sourcepos-1 | ||
175 | tag = self.soup.new_tag(name, namespace, **kwargs) | ||
176 | |||
96 | return Element(tag, self.soup, namespace) | 177 | return Element(tag, self.soup, namespace) |
97 | 178 | ||
98 | def commentClass(self, data): | 179 | def commentClass(self, data): |
99 | return TextNode(Comment(data), self.soup) | 180 | return TextNode(Comment(data), self.soup) |
100 | 181 | ||
101 | def fragmentClass(self): | 182 | def fragmentClass(self): |
102 | self.soup = BeautifulSoup("") | 183 | from bs4 import BeautifulSoup |
184 | # TODO: Why is the parser 'html.parser' here? To avoid an | ||
185 | # infinite loop? | ||
186 | self.soup = BeautifulSoup("", "html.parser") | ||
103 | self.soup.name = "[document_fragment]" | 187 | self.soup.name = "[document_fragment]" |
104 | return Element(self.soup, self.soup, None) | 188 | return Element(self.soup, self.soup, None) |
105 | 189 | ||
@@ -111,7 +195,57 @@ class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): | |||
111 | return self.soup | 195 | return self.soup |
112 | 196 | ||
113 | def getFragment(self): | 197 | def getFragment(self): |
114 | return treebuildersbase.TreeBuilder.getFragment(self).element | 198 | return treebuilder_base.TreeBuilder.getFragment(self).element |
199 | |||
200 | def testSerializer(self, element): | ||
201 | from bs4 import BeautifulSoup | ||
202 | rv = [] | ||
203 | doctype_re = re.compile(r'^(.*?)(?: PUBLIC "(.*?)"(?: "(.*?)")?| SYSTEM "(.*?)")?$') | ||
204 | |||
205 | def serializeElement(element, indent=0): | ||
206 | if isinstance(element, BeautifulSoup): | ||
207 | pass | ||
208 | if isinstance(element, Doctype): | ||
209 | m = doctype_re.match(element) | ||
210 | if m: | ||
211 | name = m.group(1) | ||
212 | if m.lastindex > 1: | ||
213 | publicId = m.group(2) or "" | ||
214 | systemId = m.group(3) or m.group(4) or "" | ||
215 | rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" % | ||
216 | (' ' * indent, name, publicId, systemId)) | ||
217 | else: | ||
218 | rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, name)) | ||
219 | else: | ||
220 | rv.append("|%s<!DOCTYPE >" % (' ' * indent,)) | ||
221 | elif isinstance(element, Comment): | ||
222 | rv.append("|%s<!-- %s -->" % (' ' * indent, element)) | ||
223 | elif isinstance(element, NavigableString): | ||
224 | rv.append("|%s\"%s\"" % (' ' * indent, element)) | ||
225 | else: | ||
226 | if element.namespace: | ||
227 | name = "%s %s" % (prefixes[element.namespace], | ||
228 | element.name) | ||
229 | else: | ||
230 | name = element.name | ||
231 | rv.append("|%s<%s>" % (' ' * indent, name)) | ||
232 | if element.attrs: | ||
233 | attributes = [] | ||
234 | for name, value in list(element.attrs.items()): | ||
235 | if isinstance(name, NamespacedAttribute): | ||
236 | name = "%s %s" % (prefixes[name.namespace], name.name) | ||
237 | if isinstance(value, list): | ||
238 | value = " ".join(value) | ||
239 | attributes.append((name, value)) | ||
240 | |||
241 | for name, value in sorted(attributes): | ||
242 | rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) | ||
243 | indent += 2 | ||
244 | for child in element.children: | ||
245 | serializeElement(child, indent) | ||
246 | serializeElement(element, 0) | ||
247 | |||
248 | return "\n".join(rv) | ||
115 | 249 | ||
116 | class AttrList(object): | 250 | class AttrList(object): |
117 | def __init__(self, element): | 251 | def __init__(self, element): |
@@ -122,14 +256,14 @@ class AttrList(object): | |||
122 | def __setitem__(self, name, value): | 256 | def __setitem__(self, name, value): |
123 | # If this attribute is a multi-valued attribute for this element, | 257 | # If this attribute is a multi-valued attribute for this element, |
124 | # turn its value into a list. | 258 | # turn its value into a list. |
125 | list_attr = HTML5TreeBuilder.cdata_list_attributes | 259 | list_attr = self.element.cdata_list_attributes or {} |
126 | if (name in list_attr['*'] | 260 | if (name in list_attr.get('*', []) |
127 | or (self.element.name in list_attr | 261 | or (self.element.name in list_attr |
128 | and name in list_attr[self.element.name])): | 262 | and name in list_attr.get(self.element.name, []))): |
129 | # A node that is being cloned may have already undergone | 263 | # A node that is being cloned may have already undergone |
130 | # this procedure. | 264 | # this procedure. |
131 | if not isinstance(value, list): | 265 | if not isinstance(value, list): |
132 | value = whitespace_re.split(value) | 266 | value = nonwhitespace_re.findall(value) |
133 | self.element[name] = value | 267 | self.element[name] = value |
134 | def items(self): | 268 | def items(self): |
135 | return list(self.attrs.items()) | 269 | return list(self.attrs.items()) |
@@ -143,9 +277,9 @@ class AttrList(object): | |||
143 | return name in list(self.attrs.keys()) | 277 | return name in list(self.attrs.keys()) |
144 | 278 | ||
145 | 279 | ||
146 | class Element(treebuildersbase.Node): | 280 | class Element(treebuilder_base.Node): |
147 | def __init__(self, element, soup, namespace): | 281 | def __init__(self, element, soup, namespace): |
148 | treebuildersbase.Node.__init__(self, element.name) | 282 | treebuilder_base.Node.__init__(self, element.name) |
149 | self.element = element | 283 | self.element = element |
150 | self.soup = soup | 284 | self.soup = soup |
151 | self.namespace = namespace | 285 | self.namespace = namespace |
@@ -164,13 +298,15 @@ class Element(treebuildersbase.Node): | |||
164 | child = node | 298 | child = node |
165 | elif node.element.__class__ == NavigableString: | 299 | elif node.element.__class__ == NavigableString: |
166 | string_child = child = node.element | 300 | string_child = child = node.element |
301 | node.parent = self | ||
167 | else: | 302 | else: |
168 | child = node.element | 303 | child = node.element |
304 | node.parent = self | ||
169 | 305 | ||
170 | if not isinstance(child, str) and child.parent is not None: | 306 | if not isinstance(child, str) and child.parent is not None: |
171 | node.element.extract() | 307 | node.element.extract() |
172 | 308 | ||
173 | if (string_child and self.element.contents | 309 | if (string_child is not None and self.element.contents |
174 | and self.element.contents[-1].__class__ == NavigableString): | 310 | and self.element.contents[-1].__class__ == NavigableString): |
175 | # We are appending a string onto another string. | 311 | # We are appending a string onto another string. |
176 | # TODO This has O(n^2) performance, for input like | 312 | # TODO This has O(n^2) performance, for input like |
@@ -203,12 +339,12 @@ class Element(treebuildersbase.Node): | |||
203 | most_recent_element=most_recent_element) | 339 | most_recent_element=most_recent_element) |
204 | 340 | ||
205 | def getAttributes(self): | 341 | def getAttributes(self): |
342 | if isinstance(self.element, Comment): | ||
343 | return {} | ||
206 | return AttrList(self.element) | 344 | return AttrList(self.element) |
207 | 345 | ||
208 | def setAttributes(self, attributes): | 346 | def setAttributes(self, attributes): |
209 | |||
210 | if attributes is not None and len(attributes) > 0: | 347 | if attributes is not None and len(attributes) > 0: |
211 | |||
212 | converted_attributes = [] | 348 | converted_attributes = [] |
213 | for name, value in list(attributes.items()): | 349 | for name, value in list(attributes.items()): |
214 | if isinstance(name, tuple): | 350 | if isinstance(name, tuple): |
@@ -230,11 +366,11 @@ class Element(treebuildersbase.Node): | |||
230 | attributes = property(getAttributes, setAttributes) | 366 | attributes = property(getAttributes, setAttributes) |
231 | 367 | ||
232 | def insertText(self, data, insertBefore=None): | 368 | def insertText(self, data, insertBefore=None): |
369 | text = TextNode(self.soup.new_string(data), self.soup) | ||
233 | if insertBefore: | 370 | if insertBefore: |
234 | text = TextNode(self.soup.new_string(data), self.soup) | 371 | self.insertBefore(text, insertBefore) |
235 | self.insertBefore(data, insertBefore) | ||
236 | else: | 372 | else: |
237 | self.appendChild(data) | 373 | self.appendChild(text) |
238 | 374 | ||
239 | def insertBefore(self, node, refNode): | 375 | def insertBefore(self, node, refNode): |
240 | index = self.element.index(refNode.element) | 376 | index = self.element.index(refNode.element) |
@@ -253,9 +389,10 @@ class Element(treebuildersbase.Node): | |||
253 | 389 | ||
254 | def reparentChildren(self, new_parent): | 390 | def reparentChildren(self, new_parent): |
255 | """Move all of this tag's children into another tag.""" | 391 | """Move all of this tag's children into another tag.""" |
256 | # print "MOVE", self.element.contents | 392 | # print("MOVE", self.element.contents) |
257 | # print "FROM", self.element | 393 | # print("FROM", self.element) |
258 | # print "TO", new_parent.element | 394 | # print("TO", new_parent.element) |
395 | |||
259 | element = self.element | 396 | element = self.element |
260 | new_parent_element = new_parent.element | 397 | new_parent_element = new_parent.element |
261 | # Determine what this tag's next_element will be once all the children | 398 | # Determine what this tag's next_element will be once all the children |
@@ -274,29 +411,35 @@ class Element(treebuildersbase.Node): | |||
274 | new_parents_last_descendant_next_element = new_parent_element.next_element | 411 | new_parents_last_descendant_next_element = new_parent_element.next_element |
275 | 412 | ||
276 | to_append = element.contents | 413 | to_append = element.contents |
277 | append_after = new_parent_element.contents | ||
278 | if len(to_append) > 0: | 414 | if len(to_append) > 0: |
279 | # Set the first child's previous_element and previous_sibling | 415 | # Set the first child's previous_element and previous_sibling |
280 | # to elements within the new parent | 416 | # to elements within the new parent |
281 | first_child = to_append[0] | 417 | first_child = to_append[0] |
282 | if new_parents_last_descendant: | 418 | if new_parents_last_descendant is not None: |
283 | first_child.previous_element = new_parents_last_descendant | 419 | first_child.previous_element = new_parents_last_descendant |
284 | else: | 420 | else: |
285 | first_child.previous_element = new_parent_element | 421 | first_child.previous_element = new_parent_element |
286 | first_child.previous_sibling = new_parents_last_child | 422 | first_child.previous_sibling = new_parents_last_child |
287 | if new_parents_last_descendant: | 423 | if new_parents_last_descendant is not None: |
288 | new_parents_last_descendant.next_element = first_child | 424 | new_parents_last_descendant.next_element = first_child |
289 | else: | 425 | else: |
290 | new_parent_element.next_element = first_child | 426 | new_parent_element.next_element = first_child |
291 | if new_parents_last_child: | 427 | if new_parents_last_child is not None: |
292 | new_parents_last_child.next_sibling = first_child | 428 | new_parents_last_child.next_sibling = first_child |
293 | 429 | ||
294 | # Fix the last child's next_element and next_sibling | 430 | # Find the very last element being moved. It is now the |
295 | last_child = to_append[-1] | 431 | # parent's last descendant. It has no .next_sibling and |
296 | last_child.next_element = new_parents_last_descendant_next_element | 432 | # its .next_element is whatever the previous last |
297 | if new_parents_last_descendant_next_element: | 433 | # descendant had. |
298 | new_parents_last_descendant_next_element.previous_element = last_child | 434 | last_childs_last_descendant = to_append[-1]._last_descendant(False, True) |
299 | last_child.next_sibling = None | 435 | |
436 | last_childs_last_descendant.next_element = new_parents_last_descendant_next_element | ||
437 | if new_parents_last_descendant_next_element is not None: | ||
438 | # TODO: This code has no test coverage and I'm not sure | ||
439 | # how to get html5lib to go through this path, but it's | ||
440 | # just the other side of the previous line. | ||
441 | new_parents_last_descendant_next_element.previous_element = last_childs_last_descendant | ||
442 | last_childs_last_descendant.next_sibling = None | ||
300 | 443 | ||
301 | for child in to_append: | 444 | for child in to_append: |
302 | child.parent = new_parent_element | 445 | child.parent = new_parent_element |
@@ -306,9 +449,9 @@ class Element(treebuildersbase.Node): | |||
306 | element.contents = [] | 449 | element.contents = [] |
307 | element.next_element = final_next_element | 450 | element.next_element = final_next_element |
308 | 451 | ||
309 | # print "DONE WITH MOVE" | 452 | # print("DONE WITH MOVE") |
310 | # print "FROM", self.element | 453 | # print("FROM", self.element) |
311 | # print "TO", new_parent_element | 454 | # print("TO", new_parent_element) |
312 | 455 | ||
313 | def cloneNode(self): | 456 | def cloneNode(self): |
314 | tag = self.soup.new_tag(self.element.name, self.namespace) | 457 | tag = self.soup.new_tag(self.element.name, self.namespace) |
@@ -321,7 +464,7 @@ class Element(treebuildersbase.Node): | |||
321 | return self.element.contents | 464 | return self.element.contents |
322 | 465 | ||
323 | def getNameTuple(self): | 466 | def getNameTuple(self): |
324 | if self.namespace is None: | 467 | if self.namespace == None: |
325 | return namespaces["html"], self.name | 468 | return namespaces["html"], self.name |
326 | else: | 469 | else: |
327 | return self.namespace, self.name | 470 | return self.namespace, self.name |
@@ -330,7 +473,7 @@ class Element(treebuildersbase.Node): | |||
330 | 473 | ||
331 | class TextNode(Element): | 474 | class TextNode(Element): |
332 | def __init__(self, element, soup): | 475 | def __init__(self, element, soup): |
333 | treebuildersbase.Node.__init__(self, None) | 476 | treebuilder_base.Node.__init__(self, None) |
334 | self.element = element | 477 | self.element = element |
335 | self.soup = soup | 478 | self.soup = soup |
336 | 479 | ||
diff --git a/bitbake/lib/bs4/builder/_htmlparser.py b/bitbake/lib/bs4/builder/_htmlparser.py index bb0a63f2f3..3cc187f892 100644 --- a/bitbake/lib/bs4/builder/_htmlparser.py +++ b/bitbake/lib/bs4/builder/_htmlparser.py | |||
@@ -1,35 +1,18 @@ | |||
1 | # encoding: utf-8 | ||
1 | """Use the HTMLParser library to parse HTML files that aren't too bad.""" | 2 | """Use the HTMLParser library to parse HTML files that aren't too bad.""" |
2 | 3 | ||
4 | # Use of this source code is governed by the MIT license. | ||
5 | __license__ = "MIT" | ||
6 | |||
3 | __all__ = [ | 7 | __all__ = [ |
4 | 'HTMLParserTreeBuilder', | 8 | 'HTMLParserTreeBuilder', |
5 | ] | 9 | ] |
6 | 10 | ||
7 | from html.parser import HTMLParser | 11 | from html.parser import HTMLParser |
8 | 12 | ||
9 | try: | ||
10 | from html.parser import HTMLParseError | ||
11 | except ImportError as e: | ||
12 | # HTMLParseError is removed in Python 3.5. Since it can never be | ||
13 | # thrown in 3.5, we can just define our own class as a placeholder. | ||
14 | class HTMLParseError(Exception): | ||
15 | pass | ||
16 | |||
17 | import sys | 13 | import sys |
18 | import warnings | 14 | import warnings |
19 | 15 | ||
20 | # Starting in Python 3.2, the HTMLParser constructor takes a 'strict' | ||
21 | # argument, which we'd like to set to False. Unfortunately, | ||
22 | # http://bugs.python.org/issue13273 makes strict=True a better bet | ||
23 | # before Python 3.2.3. | ||
24 | # | ||
25 | # At the end of this file, we monkeypatch HTMLParser so that | ||
26 | # strict=True works well on Python 3.2.2. | ||
27 | major, minor, release = sys.version_info[:3] | ||
28 | CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3 | ||
29 | CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3 | ||
30 | CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4 | ||
31 | |||
32 | |||
33 | from bs4.element import ( | 16 | from bs4.element import ( |
34 | CData, | 17 | CData, |
35 | Comment, | 18 | Comment, |
@@ -40,6 +23,8 @@ from bs4.element import ( | |||
40 | from bs4.dammit import EntitySubstitution, UnicodeDammit | 23 | from bs4.dammit import EntitySubstitution, UnicodeDammit |
41 | 24 | ||
42 | from bs4.builder import ( | 25 | from bs4.builder import ( |
26 | DetectsXMLParsedAsHTML, | ||
27 | ParserRejectedMarkup, | ||
43 | HTML, | 28 | HTML, |
44 | HTMLTreeBuilder, | 29 | HTMLTreeBuilder, |
45 | STRICT, | 30 | STRICT, |
@@ -48,8 +33,84 @@ from bs4.builder import ( | |||
48 | 33 | ||
49 | HTMLPARSER = 'html.parser' | 34 | HTMLPARSER = 'html.parser' |
50 | 35 | ||
51 | class BeautifulSoupHTMLParser(HTMLParser): | 36 | class BeautifulSoupHTMLParser(HTMLParser, DetectsXMLParsedAsHTML): |
52 | def handle_starttag(self, name, attrs): | 37 | """A subclass of the Python standard library's HTMLParser class, which |
38 | listens for HTMLParser events and translates them into calls | ||
39 | to Beautiful Soup's tree construction API. | ||
40 | """ | ||
41 | |||
42 | # Strategies for handling duplicate attributes | ||
43 | IGNORE = 'ignore' | ||
44 | REPLACE = 'replace' | ||
45 | |||
46 | def __init__(self, *args, **kwargs): | ||
47 | """Constructor. | ||
48 | |||
49 | :param on_duplicate_attribute: A strategy for what to do if a | ||
50 | tag includes the same attribute more than once. Accepted | ||
51 | values are: REPLACE (replace earlier values with later | ||
52 | ones, the default), IGNORE (keep the earliest value | ||
53 | encountered), or a callable. A callable must take three | ||
54 | arguments: the dictionary of attributes already processed, | ||
55 | the name of the duplicate attribute, and the most recent value | ||
56 | encountered. | ||
57 | """ | ||
58 | self.on_duplicate_attribute = kwargs.pop( | ||
59 | 'on_duplicate_attribute', self.REPLACE | ||
60 | ) | ||
61 | HTMLParser.__init__(self, *args, **kwargs) | ||
62 | |||
63 | # Keep a list of empty-element tags that were encountered | ||
64 | # without an explicit closing tag. If we encounter a closing tag | ||
65 | # of this type, we'll associate it with one of those entries. | ||
66 | # | ||
67 | # This isn't a stack because we don't care about the | ||
68 | # order. It's a list of closing tags we've already handled and | ||
69 | # will ignore, assuming they ever show up. | ||
70 | self.already_closed_empty_element = [] | ||
71 | |||
72 | self._initialize_xml_detector() | ||
73 | |||
74 | def error(self, message): | ||
75 | # NOTE: This method is required so long as Python 3.9 is | ||
76 | # supported. The corresponding code is removed from HTMLParser | ||
77 | # in 3.5, but not removed from ParserBase until 3.10. | ||
78 | # https://github.com/python/cpython/issues/76025 | ||
79 | # | ||
80 | # The original implementation turned the error into a warning, | ||
81 | # but in every case I discovered, this made HTMLParser | ||
82 | # immediately crash with an error message that was less | ||
83 | # helpful than the warning. The new implementation makes it | ||
84 | # more clear that html.parser just can't parse this | ||
85 | # markup. The 3.10 implementation does the same, though it | ||
86 | # raises AssertionError rather than calling a method. (We | ||
87 | # catch this error and wrap it in a ParserRejectedMarkup.) | ||
88 | raise ParserRejectedMarkup(message) | ||
89 | |||
90 | def handle_startendtag(self, name, attrs): | ||
91 | """Handle an incoming empty-element tag. | ||
92 | |||
93 | This is only called when the markup looks like <tag/>. | ||
94 | |||
95 | :param name: Name of the tag. | ||
96 | :param attrs: Dictionary of the tag's attributes. | ||
97 | """ | ||
98 | # is_startend() tells handle_starttag not to close the tag | ||
99 | # just because its name matches a known empty-element tag. We | ||
100 | # know that this is an empty-element tag and we want to call | ||
101 | # handle_endtag ourselves. | ||
102 | tag = self.handle_starttag(name, attrs, handle_empty_element=False) | ||
103 | self.handle_endtag(name) | ||
104 | |||
105 | def handle_starttag(self, name, attrs, handle_empty_element=True): | ||
106 | """Handle an opening tag, e.g. '<tag>' | ||
107 | |||
108 | :param name: Name of the tag. | ||
109 | :param attrs: Dictionary of the tag's attributes. | ||
110 | :param handle_empty_element: True if this tag is known to be | ||
111 | an empty-element tag (i.e. there is not expected to be any | ||
112 | closing tag). | ||
113 | """ | ||
53 | # XXX namespace | 114 | # XXX namespace |
54 | attr_dict = {} | 115 | attr_dict = {} |
55 | for key, value in attrs: | 116 | for key, value in attrs: |
@@ -57,20 +118,78 @@ class BeautifulSoupHTMLParser(HTMLParser): | |||
57 | # for consistency with the other tree builders. | 118 | # for consistency with the other tree builders. |
58 | if value is None: | 119 | if value is None: |
59 | value = '' | 120 | value = '' |
60 | attr_dict[key] = value | 121 | if key in attr_dict: |
122 | # A single attribute shows up multiple times in this | ||
123 | # tag. How to handle it depends on the | ||
124 | # on_duplicate_attribute setting. | ||
125 | on_dupe = self.on_duplicate_attribute | ||
126 | if on_dupe == self.IGNORE: | ||
127 | pass | ||
128 | elif on_dupe in (None, self.REPLACE): | ||
129 | attr_dict[key] = value | ||
130 | else: | ||
131 | on_dupe(attr_dict, key, value) | ||
132 | else: | ||
133 | attr_dict[key] = value | ||
61 | attrvalue = '""' | 134 | attrvalue = '""' |
62 | self.soup.handle_starttag(name, None, None, attr_dict) | 135 | #print("START", name) |
63 | 136 | sourceline, sourcepos = self.getpos() | |
64 | def handle_endtag(self, name): | 137 | tag = self.soup.handle_starttag( |
65 | self.soup.handle_endtag(name) | 138 | name, None, None, attr_dict, sourceline=sourceline, |
66 | 139 | sourcepos=sourcepos | |
140 | ) | ||
141 | if tag and tag.is_empty_element and handle_empty_element: | ||
142 | # Unlike other parsers, html.parser doesn't send separate end tag | ||
143 | # events for empty-element tags. (It's handled in | ||
144 | # handle_startendtag, but only if the original markup looked like | ||
145 | # <tag/>.) | ||
146 | # | ||
147 | # So we need to call handle_endtag() ourselves. Since we | ||
148 | # know the start event is identical to the end event, we | ||
149 | # don't want handle_endtag() to cross off any previous end | ||
150 | # events for tags of this name. | ||
151 | self.handle_endtag(name, check_already_closed=False) | ||
152 | |||
153 | # But we might encounter an explicit closing tag for this tag | ||
154 | # later on. If so, we want to ignore it. | ||
155 | self.already_closed_empty_element.append(name) | ||
156 | |||
157 | if self._root_tag is None: | ||
158 | self._root_tag_encountered(name) | ||
159 | |||
160 | def handle_endtag(self, name, check_already_closed=True): | ||
161 | """Handle a closing tag, e.g. '</tag>' | ||
162 | |||
163 | :param name: A tag name. | ||
164 | :param check_already_closed: True if this tag is expected to | ||
165 | be the closing portion of an empty-element tag, | ||
166 | e.g. '<tag></tag>'. | ||
167 | """ | ||
168 | #print("END", name) | ||
169 | if check_already_closed and name in self.already_closed_empty_element: | ||
170 | # This is a redundant end tag for an empty-element tag. | ||
171 | # We've already called handle_endtag() for it, so just | ||
172 | # check it off the list. | ||
173 | #print("ALREADY CLOSED", name) | ||
174 | self.already_closed_empty_element.remove(name) | ||
175 | else: | ||
176 | self.soup.handle_endtag(name) | ||
177 | |||
67 | def handle_data(self, data): | 178 | def handle_data(self, data): |
179 | """Handle some textual data that shows up between tags.""" | ||
68 | self.soup.handle_data(data) | 180 | self.soup.handle_data(data) |
69 | 181 | ||
70 | def handle_charref(self, name): | 182 | def handle_charref(self, name): |
71 | # XXX workaround for a bug in HTMLParser. Remove this once | 183 | """Handle a numeric character reference by converting it to the |
72 | # it's fixed in all supported versions. | 184 | corresponding Unicode character and treating it as textual |
73 | # http://bugs.python.org/issue13633 | 185 | data. |
186 | |||
187 | :param name: Character number, possibly in hexadecimal. | ||
188 | """ | ||
189 | # TODO: This was originally a workaround for a bug in | ||
190 | # HTMLParser. (http://bugs.python.org/issue13633) The bug has | ||
191 | # been fixed, but removing this code still makes some | ||
192 | # Beautiful Soup tests fail. This needs investigation. | ||
74 | if name.startswith('x'): | 193 | if name.startswith('x'): |
75 | real_name = int(name.lstrip('x'), 16) | 194 | real_name = int(name.lstrip('x'), 16) |
76 | elif name.startswith('X'): | 195 | elif name.startswith('X'): |
@@ -78,37 +197,71 @@ class BeautifulSoupHTMLParser(HTMLParser): | |||
78 | else: | 197 | else: |
79 | real_name = int(name) | 198 | real_name = int(name) |
80 | 199 | ||
81 | try: | 200 | data = None |
82 | data = chr(real_name) | 201 | if real_name < 256: |
83 | except (ValueError, OverflowError) as e: | 202 | # HTML numeric entities are supposed to reference Unicode |
84 | data = "\N{REPLACEMENT CHARACTER}" | 203 | # code points, but sometimes they reference code points in |
85 | 204 | # some other encoding (ahem, Windows-1252). E.g. “ | |
205 | # instead of É for LEFT DOUBLE QUOTATION MARK. This | ||
206 | # code tries to detect this situation and compensate. | ||
207 | for encoding in (self.soup.original_encoding, 'windows-1252'): | ||
208 | if not encoding: | ||
209 | continue | ||
210 | try: | ||
211 | data = bytearray([real_name]).decode(encoding) | ||
212 | except UnicodeDecodeError as e: | ||
213 | pass | ||
214 | if not data: | ||
215 | try: | ||
216 | data = chr(real_name) | ||
217 | except (ValueError, OverflowError) as e: | ||
218 | pass | ||
219 | data = data or "\N{REPLACEMENT CHARACTER}" | ||
86 | self.handle_data(data) | 220 | self.handle_data(data) |
87 | 221 | ||
88 | def handle_entityref(self, name): | 222 | def handle_entityref(self, name): |
223 | """Handle a named entity reference by converting it to the | ||
224 | corresponding Unicode character(s) and treating it as textual | ||
225 | data. | ||
226 | |||
227 | :param name: Name of the entity reference. | ||
228 | """ | ||
89 | character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name) | 229 | character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name) |
90 | if character is not None: | 230 | if character is not None: |
91 | data = character | 231 | data = character |
92 | else: | 232 | else: |
93 | data = "&%s;" % name | 233 | # If this were XML, it would be ambiguous whether "&foo" |
234 | # was an character entity reference with a missing | ||
235 | # semicolon or the literal string "&foo". Since this is | ||
236 | # HTML, we have a complete list of all character entity references, | ||
237 | # and this one wasn't found, so assume it's the literal string "&foo". | ||
238 | data = "&%s" % name | ||
94 | self.handle_data(data) | 239 | self.handle_data(data) |
95 | 240 | ||
96 | def handle_comment(self, data): | 241 | def handle_comment(self, data): |
242 | """Handle an HTML comment. | ||
243 | |||
244 | :param data: The text of the comment. | ||
245 | """ | ||
97 | self.soup.endData() | 246 | self.soup.endData() |
98 | self.soup.handle_data(data) | 247 | self.soup.handle_data(data) |
99 | self.soup.endData(Comment) | 248 | self.soup.endData(Comment) |
100 | 249 | ||
101 | def handle_decl(self, data): | 250 | def handle_decl(self, data): |
251 | """Handle a DOCTYPE declaration. | ||
252 | |||
253 | :param data: The text of the declaration. | ||
254 | """ | ||
102 | self.soup.endData() | 255 | self.soup.endData() |
103 | if data.startswith("DOCTYPE "): | 256 | data = data[len("DOCTYPE "):] |
104 | data = data[len("DOCTYPE "):] | ||
105 | elif data == 'DOCTYPE': | ||
106 | # i.e. "<!DOCTYPE>" | ||
107 | data = '' | ||
108 | self.soup.handle_data(data) | 257 | self.soup.handle_data(data) |
109 | self.soup.endData(Doctype) | 258 | self.soup.endData(Doctype) |
110 | 259 | ||
111 | def unknown_decl(self, data): | 260 | def unknown_decl(self, data): |
261 | """Handle a declaration of unknown type -- probably a CDATA block. | ||
262 | |||
263 | :param data: The text of the declaration. | ||
264 | """ | ||
112 | if data.upper().startswith('CDATA['): | 265 | if data.upper().startswith('CDATA['): |
113 | cls = CData | 266 | cls = CData |
114 | data = data[len('CDATA['):] | 267 | data = data[len('CDATA['):] |
@@ -119,144 +272,116 @@ class BeautifulSoupHTMLParser(HTMLParser): | |||
119 | self.soup.endData(cls) | 272 | self.soup.endData(cls) |
120 | 273 | ||
121 | def handle_pi(self, data): | 274 | def handle_pi(self, data): |
275 | """Handle a processing instruction. | ||
276 | |||
277 | :param data: The text of the instruction. | ||
278 | """ | ||
122 | self.soup.endData() | 279 | self.soup.endData() |
123 | self.soup.handle_data(data) | 280 | self.soup.handle_data(data) |
281 | self._document_might_be_xml(data) | ||
124 | self.soup.endData(ProcessingInstruction) | 282 | self.soup.endData(ProcessingInstruction) |
125 | 283 | ||
126 | 284 | ||
127 | class HTMLParserTreeBuilder(HTMLTreeBuilder): | 285 | class HTMLParserTreeBuilder(HTMLTreeBuilder): |
128 | 286 | """A Beautiful soup `TreeBuilder` that uses the `HTMLParser` parser, | |
287 | found in the Python standard library. | ||
288 | """ | ||
129 | is_xml = False | 289 | is_xml = False |
130 | picklable = True | 290 | picklable = True |
131 | NAME = HTMLPARSER | 291 | NAME = HTMLPARSER |
132 | features = [NAME, HTML, STRICT] | 292 | features = [NAME, HTML, STRICT] |
133 | 293 | ||
134 | def __init__(self, *args, **kwargs): | 294 | # The html.parser knows which line number and position in the |
135 | if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED: | 295 | # original file is the source of an element. |
136 | kwargs['strict'] = False | 296 | TRACKS_LINE_NUMBERS = True |
137 | if CONSTRUCTOR_TAKES_CONVERT_CHARREFS: | ||
138 | kwargs['convert_charrefs'] = False | ||
139 | self.parser_args = (args, kwargs) | ||
140 | 297 | ||
298 | def __init__(self, parser_args=None, parser_kwargs=None, **kwargs): | ||
299 | """Constructor. | ||
300 | |||
301 | :param parser_args: Positional arguments to pass into | ||
302 | the BeautifulSoupHTMLParser constructor, once it's | ||
303 | invoked. | ||
304 | :param parser_kwargs: Keyword arguments to pass into | ||
305 | the BeautifulSoupHTMLParser constructor, once it's | ||
306 | invoked. | ||
307 | :param kwargs: Keyword arguments for the superclass constructor. | ||
308 | """ | ||
309 | # Some keyword arguments will be pulled out of kwargs and placed | ||
310 | # into parser_kwargs. | ||
311 | extra_parser_kwargs = dict() | ||
312 | for arg in ('on_duplicate_attribute',): | ||
313 | if arg in kwargs: | ||
314 | value = kwargs.pop(arg) | ||
315 | extra_parser_kwargs[arg] = value | ||
316 | super(HTMLParserTreeBuilder, self).__init__(**kwargs) | ||
317 | parser_args = parser_args or [] | ||
318 | parser_kwargs = parser_kwargs or {} | ||
319 | parser_kwargs.update(extra_parser_kwargs) | ||
320 | parser_kwargs['convert_charrefs'] = False | ||
321 | self.parser_args = (parser_args, parser_kwargs) | ||
322 | |||
141 | def prepare_markup(self, markup, user_specified_encoding=None, | 323 | def prepare_markup(self, markup, user_specified_encoding=None, |
142 | document_declared_encoding=None, exclude_encodings=None): | 324 | document_declared_encoding=None, exclude_encodings=None): |
143 | """ | 325 | |
144 | :return: A 4-tuple (markup, original encoding, encoding | 326 | """Run any preliminary steps necessary to make incoming markup |
145 | declared within markup, whether any characters had to be | 327 | acceptable to the parser. |
146 | replaced with REPLACEMENT CHARACTER). | 328 | |
329 | :param markup: Some markup -- probably a bytestring. | ||
330 | :param user_specified_encoding: The user asked to try this encoding. | ||
331 | :param document_declared_encoding: The markup itself claims to be | ||
332 | in this encoding. | ||
333 | :param exclude_encodings: The user asked _not_ to try any of | ||
334 | these encodings. | ||
335 | |||
336 | :yield: A series of 4-tuples: | ||
337 | (markup, encoding, declared encoding, | ||
338 | has undergone character replacement) | ||
339 | |||
340 | Each 4-tuple represents a strategy for converting the | ||
341 | document to Unicode and parsing it. Each strategy will be tried | ||
342 | in turn. | ||
147 | """ | 343 | """ |
148 | if isinstance(markup, str): | 344 | if isinstance(markup, str): |
345 | # Parse Unicode as-is. | ||
149 | yield (markup, None, None, False) | 346 | yield (markup, None, None, False) |
150 | return | 347 | return |
151 | 348 | ||
349 | # Ask UnicodeDammit to sniff the most likely encoding. | ||
350 | |||
351 | # This was provided by the end-user; treat it as a known | ||
352 | # definite encoding per the algorithm laid out in the HTML5 | ||
353 | # spec. (See the EncodingDetector class for details.) | ||
354 | known_definite_encodings = [user_specified_encoding] | ||
355 | |||
356 | # This was found in the document; treat it as a slightly lower-priority | ||
357 | # user encoding. | ||
358 | user_encodings = [document_declared_encoding] | ||
359 | |||
152 | try_encodings = [user_specified_encoding, document_declared_encoding] | 360 | try_encodings = [user_specified_encoding, document_declared_encoding] |
153 | dammit = UnicodeDammit(markup, try_encodings, is_html=True, | 361 | dammit = UnicodeDammit( |
154 | exclude_encodings=exclude_encodings) | 362 | markup, |
363 | known_definite_encodings=known_definite_encodings, | ||
364 | user_encodings=user_encodings, | ||
365 | is_html=True, | ||
366 | exclude_encodings=exclude_encodings | ||
367 | ) | ||
155 | yield (dammit.markup, dammit.original_encoding, | 368 | yield (dammit.markup, dammit.original_encoding, |
156 | dammit.declared_html_encoding, | 369 | dammit.declared_html_encoding, |
157 | dammit.contains_replacement_characters) | 370 | dammit.contains_replacement_characters) |
158 | 371 | ||
159 | def feed(self, markup): | 372 | def feed(self, markup): |
373 | """Run some incoming markup through some parsing process, | ||
374 | populating the `BeautifulSoup` object in self.soup. | ||
375 | """ | ||
160 | args, kwargs = self.parser_args | 376 | args, kwargs = self.parser_args |
161 | parser = BeautifulSoupHTMLParser(*args, **kwargs) | 377 | parser = BeautifulSoupHTMLParser(*args, **kwargs) |
162 | parser.soup = self.soup | 378 | parser.soup = self.soup |
163 | try: | 379 | try: |
164 | parser.feed(markup) | 380 | parser.feed(markup) |
165 | except HTMLParseError as e: | 381 | parser.close() |
166 | warnings.warn(RuntimeWarning( | 382 | except AssertionError as e: |
167 | "Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help.")) | 383 | # html.parser raises AssertionError in rare cases to |
168 | raise e | 384 | # indicate a fatal problem with the markup, especially |
169 | 385 | # when there's an error in the doctype declaration. | |
170 | # Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some | 386 | raise ParserRejectedMarkup(e) |
171 | # 3.2.3 code. This ensures they don't treat markup like <p></p> as a | 387 | parser.already_closed_empty_element = [] |
172 | # string. | ||
173 | # | ||
174 | # XXX This code can be removed once most Python 3 users are on 3.2.3. | ||
175 | if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT: | ||
176 | import re | ||
177 | attrfind_tolerant = re.compile( | ||
178 | r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*' | ||
179 | r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?') | ||
180 | HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant | ||
181 | |||
182 | locatestarttagend = re.compile(r""" | ||
183 | <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name | ||
184 | (?:\s+ # whitespace before attribute name | ||
185 | (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name | ||
186 | (?:\s*=\s* # value indicator | ||
187 | (?:'[^']*' # LITA-enclosed value | ||
188 | |\"[^\"]*\" # LIT-enclosed value | ||
189 | |[^'\">\s]+ # bare value | ||
190 | ) | ||
191 | )? | ||
192 | ) | ||
193 | )* | ||
194 | \s* # trailing whitespace | ||
195 | """, re.VERBOSE) | ||
196 | BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend | ||
197 | |||
198 | from html.parser import tagfind, attrfind | ||
199 | |||
200 | def parse_starttag(self, i): | ||
201 | self.__starttag_text = None | ||
202 | endpos = self.check_for_whole_start_tag(i) | ||
203 | if endpos < 0: | ||
204 | return endpos | ||
205 | rawdata = self.rawdata | ||
206 | self.__starttag_text = rawdata[i:endpos] | ||
207 | |||
208 | # Now parse the data between i+1 and j into a tag and attrs | ||
209 | attrs = [] | ||
210 | match = tagfind.match(rawdata, i+1) | ||
211 | assert match, 'unexpected call to parse_starttag()' | ||
212 | k = match.end() | ||
213 | self.lasttag = tag = rawdata[i+1:k].lower() | ||
214 | while k < endpos: | ||
215 | if self.strict: | ||
216 | m = attrfind.match(rawdata, k) | ||
217 | else: | ||
218 | m = attrfind_tolerant.match(rawdata, k) | ||
219 | if not m: | ||
220 | break | ||
221 | attrname, rest, attrvalue = m.group(1, 2, 3) | ||
222 | if not rest: | ||
223 | attrvalue = None | ||
224 | elif attrvalue[:1] == '\'' == attrvalue[-1:] or \ | ||
225 | attrvalue[:1] == '"' == attrvalue[-1:]: | ||
226 | attrvalue = attrvalue[1:-1] | ||
227 | if attrvalue: | ||
228 | attrvalue = self.unescape(attrvalue) | ||
229 | attrs.append((attrname.lower(), attrvalue)) | ||
230 | k = m.end() | ||
231 | |||
232 | end = rawdata[k:endpos].strip() | ||
233 | if end not in (">", "/>"): | ||
234 | lineno, offset = self.getpos() | ||
235 | if "\n" in self.__starttag_text: | ||
236 | lineno = lineno + self.__starttag_text.count("\n") | ||
237 | offset = len(self.__starttag_text) \ | ||
238 | - self.__starttag_text.rfind("\n") | ||
239 | else: | ||
240 | offset = offset + len(self.__starttag_text) | ||
241 | if self.strict: | ||
242 | self.error("junk characters in start tag: %r" | ||
243 | % (rawdata[k:endpos][:20],)) | ||
244 | self.handle_data(rawdata[i:endpos]) | ||
245 | return endpos | ||
246 | if end.endswith('/>'): | ||
247 | # XHTML-style empty tag: <span attr="value" /> | ||
248 | self.handle_startendtag(tag, attrs) | ||
249 | else: | ||
250 | self.handle_starttag(tag, attrs) | ||
251 | if tag in self.CDATA_CONTENT_ELEMENTS: | ||
252 | self.set_cdata_mode(tag) | ||
253 | return endpos | ||
254 | |||
255 | def set_cdata_mode(self, elem): | ||
256 | self.cdata_elem = elem.lower() | ||
257 | self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I) | ||
258 | |||
259 | BeautifulSoupHTMLParser.parse_starttag = parse_starttag | ||
260 | BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode | ||
261 | |||
262 | CONSTRUCTOR_TAKES_STRICT = True | ||
diff --git a/bitbake/lib/bs4/builder/_lxml.py b/bitbake/lib/bs4/builder/_lxml.py index 9c6c14ee65..4f7cf74681 100644 --- a/bitbake/lib/bs4/builder/_lxml.py +++ b/bitbake/lib/bs4/builder/_lxml.py | |||
@@ -1,19 +1,28 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
2 | __license__ = "MIT" | ||
3 | |||
1 | __all__ = [ | 4 | __all__ = [ |
2 | 'LXMLTreeBuilderForXML', | 5 | 'LXMLTreeBuilderForXML', |
3 | 'LXMLTreeBuilder', | 6 | 'LXMLTreeBuilder', |
4 | ] | 7 | ] |
5 | 8 | ||
9 | try: | ||
10 | from collections.abc import Callable # Python 3.6 | ||
11 | except ImportError as e: | ||
12 | from collections import Callable | ||
13 | |||
6 | from io import BytesIO | 14 | from io import BytesIO |
7 | from io import StringIO | 15 | from io import StringIO |
8 | import collections | ||
9 | from lxml import etree | 16 | from lxml import etree |
10 | from bs4.element import ( | 17 | from bs4.element import ( |
11 | Comment, | 18 | Comment, |
12 | Doctype, | 19 | Doctype, |
13 | NamespacedAttribute, | 20 | NamespacedAttribute, |
14 | ProcessingInstruction, | 21 | ProcessingInstruction, |
22 | XMLProcessingInstruction, | ||
15 | ) | 23 | ) |
16 | from bs4.builder import ( | 24 | from bs4.builder import ( |
25 | DetectsXMLParsedAsHTML, | ||
17 | FAST, | 26 | FAST, |
18 | HTML, | 27 | HTML, |
19 | HTMLTreeBuilder, | 28 | HTMLTreeBuilder, |
@@ -25,10 +34,15 @@ from bs4.dammit import EncodingDetector | |||
25 | 34 | ||
26 | LXML = 'lxml' | 35 | LXML = 'lxml' |
27 | 36 | ||
37 | def _invert(d): | ||
38 | "Invert a dictionary." | ||
39 | return dict((v,k) for k, v in list(d.items())) | ||
40 | |||
28 | class LXMLTreeBuilderForXML(TreeBuilder): | 41 | class LXMLTreeBuilderForXML(TreeBuilder): |
29 | DEFAULT_PARSER_CLASS = etree.XMLParser | 42 | DEFAULT_PARSER_CLASS = etree.XMLParser |
30 | 43 | ||
31 | is_xml = True | 44 | is_xml = True |
45 | processing_instruction_class = XMLProcessingInstruction | ||
32 | 46 | ||
33 | NAME = "lxml-xml" | 47 | NAME = "lxml-xml" |
34 | ALTERNATE_NAMES = ["xml"] | 48 | ALTERNATE_NAMES = ["xml"] |
@@ -40,26 +54,79 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
40 | 54 | ||
41 | # This namespace mapping is specified in the XML Namespace | 55 | # This namespace mapping is specified in the XML Namespace |
42 | # standard. | 56 | # standard. |
43 | DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"} | 57 | DEFAULT_NSMAPS = dict(xml='http://www.w3.org/XML/1998/namespace') |
58 | |||
59 | DEFAULT_NSMAPS_INVERTED = _invert(DEFAULT_NSMAPS) | ||
60 | |||
61 | # NOTE: If we parsed Element objects and looked at .sourceline, | ||
62 | # we'd be able to see the line numbers from the original document. | ||
63 | # But instead we build an XMLParser or HTMLParser object to serve | ||
64 | # as the target of parse messages, and those messages don't include | ||
65 | # line numbers. | ||
66 | # See: https://bugs.launchpad.net/lxml/+bug/1846906 | ||
67 | |||
68 | def initialize_soup(self, soup): | ||
69 | """Let the BeautifulSoup object know about the standard namespace | ||
70 | mapping. | ||
71 | |||
72 | :param soup: A `BeautifulSoup`. | ||
73 | """ | ||
74 | super(LXMLTreeBuilderForXML, self).initialize_soup(soup) | ||
75 | self._register_namespaces(self.DEFAULT_NSMAPS) | ||
76 | |||
77 | def _register_namespaces(self, mapping): | ||
78 | """Let the BeautifulSoup object know about namespaces encountered | ||
79 | while parsing the document. | ||
80 | |||
81 | This might be useful later on when creating CSS selectors. | ||
82 | |||
83 | This will track (almost) all namespaces, even ones that were | ||
84 | only in scope for part of the document. If two namespaces have | ||
85 | the same prefix, only the first one encountered will be | ||
86 | tracked. Un-prefixed namespaces are not tracked. | ||
44 | 87 | ||
88 | :param mapping: A dictionary mapping namespace prefixes to URIs. | ||
89 | """ | ||
90 | for key, value in list(mapping.items()): | ||
91 | # This is 'if key' and not 'if key is not None' because we | ||
92 | # don't track un-prefixed namespaces. Soupselect will | ||
93 | # treat an un-prefixed namespace as the default, which | ||
94 | # causes confusion in some cases. | ||
95 | if key and key not in self.soup._namespaces: | ||
96 | # Let the BeautifulSoup object know about a new namespace. | ||
97 | # If there are multiple namespaces defined with the same | ||
98 | # prefix, the first one in the document takes precedence. | ||
99 | self.soup._namespaces[key] = value | ||
100 | |||
45 | def default_parser(self, encoding): | 101 | def default_parser(self, encoding): |
46 | # This can either return a parser object or a class, which | 102 | """Find the default parser for the given encoding. |
47 | # will be instantiated with default arguments. | 103 | |
104 | :param encoding: A string. | ||
105 | :return: Either a parser object or a class, which | ||
106 | will be instantiated with default arguments. | ||
107 | """ | ||
48 | if self._default_parser is not None: | 108 | if self._default_parser is not None: |
49 | return self._default_parser | 109 | return self._default_parser |
50 | return etree.XMLParser( | 110 | return etree.XMLParser( |
51 | target=self, strip_cdata=False, recover=True, encoding=encoding) | 111 | target=self, strip_cdata=False, recover=True, encoding=encoding) |
52 | 112 | ||
53 | def parser_for(self, encoding): | 113 | def parser_for(self, encoding): |
114 | """Instantiate an appropriate parser for the given encoding. | ||
115 | |||
116 | :param encoding: A string. | ||
117 | :return: A parser object such as an `etree.XMLParser`. | ||
118 | """ | ||
54 | # Use the default parser. | 119 | # Use the default parser. |
55 | parser = self.default_parser(encoding) | 120 | parser = self.default_parser(encoding) |
56 | 121 | ||
57 | if isinstance(parser, collections.Callable): | 122 | if isinstance(parser, Callable): |
58 | # Instantiate the parser with default arguments | 123 | # Instantiate the parser with default arguments |
59 | parser = parser(target=self, strip_cdata=False, encoding=encoding) | 124 | parser = parser( |
125 | target=self, strip_cdata=False, recover=True, encoding=encoding | ||
126 | ) | ||
60 | return parser | 127 | return parser |
61 | 128 | ||
62 | def __init__(self, parser=None, empty_element_tags=None): | 129 | def __init__(self, parser=None, empty_element_tags=None, **kwargs): |
63 | # TODO: Issue a warning if parser is present but not a | 130 | # TODO: Issue a warning if parser is present but not a |
64 | # callable, since that means there's no way to create new | 131 | # callable, since that means there's no way to create new |
65 | # parsers for different encodings. | 132 | # parsers for different encodings. |
@@ -67,8 +134,10 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
67 | if empty_element_tags is not None: | 134 | if empty_element_tags is not None: |
68 | self.empty_element_tags = set(empty_element_tags) | 135 | self.empty_element_tags = set(empty_element_tags) |
69 | self.soup = None | 136 | self.soup = None |
70 | self.nsmaps = [self.DEFAULT_NSMAPS] | 137 | self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED] |
71 | 138 | self.active_namespace_prefixes = [dict(self.DEFAULT_NSMAPS)] | |
139 | super(LXMLTreeBuilderForXML, self).__init__(**kwargs) | ||
140 | |||
72 | def _getNsTag(self, tag): | 141 | def _getNsTag(self, tag): |
73 | # Split the namespace URL out of a fully-qualified lxml tag | 142 | # Split the namespace URL out of a fully-qualified lxml tag |
74 | # name. Copied from lxml's src/lxml/sax.py. | 143 | # name. Copied from lxml's src/lxml/sax.py. |
@@ -80,16 +149,51 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
80 | def prepare_markup(self, markup, user_specified_encoding=None, | 149 | def prepare_markup(self, markup, user_specified_encoding=None, |
81 | exclude_encodings=None, | 150 | exclude_encodings=None, |
82 | document_declared_encoding=None): | 151 | document_declared_encoding=None): |
83 | """ | 152 | """Run any preliminary steps necessary to make incoming markup |
84 | :yield: A series of 4-tuples. | 153 | acceptable to the parser. |
154 | |||
155 | lxml really wants to get a bytestring and convert it to | ||
156 | Unicode itself. So instead of using UnicodeDammit to convert | ||
157 | the bytestring to Unicode using different encodings, this | ||
158 | implementation uses EncodingDetector to iterate over the | ||
159 | encodings, and tell lxml to try to parse the document as each | ||
160 | one in turn. | ||
161 | |||
162 | :param markup: Some markup -- hopefully a bytestring. | ||
163 | :param user_specified_encoding: The user asked to try this encoding. | ||
164 | :param document_declared_encoding: The markup itself claims to be | ||
165 | in this encoding. | ||
166 | :param exclude_encodings: The user asked _not_ to try any of | ||
167 | these encodings. | ||
168 | |||
169 | :yield: A series of 4-tuples: | ||
85 | (markup, encoding, declared encoding, | 170 | (markup, encoding, declared encoding, |
86 | has undergone character replacement) | 171 | has undergone character replacement) |
87 | 172 | ||
88 | Each 4-tuple represents a strategy for parsing the document. | 173 | Each 4-tuple represents a strategy for converting the |
174 | document to Unicode and parsing it. Each strategy will be tried | ||
175 | in turn. | ||
89 | """ | 176 | """ |
177 | is_html = not self.is_xml | ||
178 | if is_html: | ||
179 | self.processing_instruction_class = ProcessingInstruction | ||
180 | # We're in HTML mode, so if we're given XML, that's worth | ||
181 | # noting. | ||
182 | DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( | ||
183 | markup, stacklevel=3 | ||
184 | ) | ||
185 | else: | ||
186 | self.processing_instruction_class = XMLProcessingInstruction | ||
187 | |||
90 | if isinstance(markup, str): | 188 | if isinstance(markup, str): |
91 | # We were given Unicode. Maybe lxml can parse Unicode on | 189 | # We were given Unicode. Maybe lxml can parse Unicode on |
92 | # this system? | 190 | # this system? |
191 | |||
192 | # TODO: This is a workaround for | ||
193 | # https://bugs.launchpad.net/lxml/+bug/1948551. | ||
194 | # We can remove it once the upstream issue is fixed. | ||
195 | if len(markup) > 0 and markup[0] == u'\N{BYTE ORDER MARK}': | ||
196 | markup = markup[1:] | ||
93 | yield markup, None, document_declared_encoding, False | 197 | yield markup, None, document_declared_encoding, False |
94 | 198 | ||
95 | if isinstance(markup, str): | 199 | if isinstance(markup, str): |
@@ -98,14 +202,19 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
98 | yield (markup.encode("utf8"), "utf8", | 202 | yield (markup.encode("utf8"), "utf8", |
99 | document_declared_encoding, False) | 203 | document_declared_encoding, False) |
100 | 204 | ||
101 | # Instead of using UnicodeDammit to convert the bytestring to | 205 | # This was provided by the end-user; treat it as a known |
102 | # Unicode using different encodings, use EncodingDetector to | 206 | # definite encoding per the algorithm laid out in the HTML5 |
103 | # iterate over the encodings, and tell lxml to try to parse | 207 | # spec. (See the EncodingDetector class for details.) |
104 | # the document as each one in turn. | 208 | known_definite_encodings = [user_specified_encoding] |
105 | is_html = not self.is_xml | 209 | |
106 | try_encodings = [user_specified_encoding, document_declared_encoding] | 210 | # This was found in the document; treat it as a slightly lower-priority |
211 | # user encoding. | ||
212 | user_encodings = [document_declared_encoding] | ||
107 | detector = EncodingDetector( | 213 | detector = EncodingDetector( |
108 | markup, try_encodings, is_html, exclude_encodings) | 214 | markup, known_definite_encodings=known_definite_encodings, |
215 | user_encodings=user_encodings, is_html=is_html, | ||
216 | exclude_encodings=exclude_encodings | ||
217 | ) | ||
109 | for encoding in detector.encodings: | 218 | for encoding in detector.encodings: |
110 | yield (detector.markup, encoding, document_declared_encoding, False) | 219 | yield (detector.markup, encoding, document_declared_encoding, False) |
111 | 220 | ||
@@ -128,25 +237,45 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
128 | self.parser.feed(data) | 237 | self.parser.feed(data) |
129 | self.parser.close() | 238 | self.parser.close() |
130 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: | 239 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: |
131 | raise ParserRejectedMarkup(str(e)) | 240 | raise ParserRejectedMarkup(e) |
132 | 241 | ||
133 | def close(self): | 242 | def close(self): |
134 | self.nsmaps = [self.DEFAULT_NSMAPS] | 243 | self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED] |
135 | 244 | ||
136 | def start(self, name, attrs, nsmap={}): | 245 | def start(self, name, attrs, nsmap={}): |
137 | # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. | 246 | # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. |
138 | attrs = dict(attrs) | 247 | attrs = dict(attrs) |
139 | nsprefix = None | 248 | nsprefix = None |
140 | # Invert each namespace map as it comes in. | 249 | # Invert each namespace map as it comes in. |
141 | if len(self.nsmaps) > 1: | 250 | if len(nsmap) == 0 and len(self.nsmaps) > 1: |
142 | # There are no new namespaces for this tag, but | 251 | # There are no new namespaces for this tag, but |
143 | # non-default namespaces are in play, so we need a | 252 | # non-default namespaces are in play, so we need a |
144 | # separate tag stack to know when they end. | 253 | # separate tag stack to know when they end. |
145 | self.nsmaps.append(None) | 254 | self.nsmaps.append(None) |
146 | elif len(nsmap) > 0: | 255 | elif len(nsmap) > 0: |
147 | # A new namespace mapping has come into play. | 256 | # A new namespace mapping has come into play. |
148 | inverted_nsmap = dict((value, key) for key, value in list(nsmap.items())) | 257 | |
149 | self.nsmaps.append(inverted_nsmap) | 258 | # First, Let the BeautifulSoup object know about it. |
259 | self._register_namespaces(nsmap) | ||
260 | |||
261 | # Then, add it to our running list of inverted namespace | ||
262 | # mappings. | ||
263 | self.nsmaps.append(_invert(nsmap)) | ||
264 | |||
265 | # The currently active namespace prefixes have | ||
266 | # changed. Calculate the new mapping so it can be stored | ||
267 | # with all Tag objects created while these prefixes are in | ||
268 | # scope. | ||
269 | current_mapping = dict(self.active_namespace_prefixes[-1]) | ||
270 | current_mapping.update(nsmap) | ||
271 | |||
272 | # We should not track un-prefixed namespaces as we can only hold one | ||
273 | # and it will be recognized as the default namespace by soupsieve, | ||
274 | # which may be confusing in some situations. | ||
275 | if '' in current_mapping: | ||
276 | del current_mapping[''] | ||
277 | self.active_namespace_prefixes.append(current_mapping) | ||
278 | |||
150 | # Also treat the namespace mapping as a set of attributes on the | 279 | # Also treat the namespace mapping as a set of attributes on the |
151 | # tag, so we can recreate it later. | 280 | # tag, so we can recreate it later. |
152 | attrs = attrs.copy() | 281 | attrs = attrs.copy() |
@@ -171,8 +300,11 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
171 | 300 | ||
172 | namespace, name = self._getNsTag(name) | 301 | namespace, name = self._getNsTag(name) |
173 | nsprefix = self._prefix_for_namespace(namespace) | 302 | nsprefix = self._prefix_for_namespace(namespace) |
174 | self.soup.handle_starttag(name, namespace, nsprefix, attrs) | 303 | self.soup.handle_starttag( |
175 | 304 | name, namespace, nsprefix, attrs, | |
305 | namespaces=self.active_namespace_prefixes[-1] | ||
306 | ) | ||
307 | |||
176 | def _prefix_for_namespace(self, namespace): | 308 | def _prefix_for_namespace(self, namespace): |
177 | """Find the currently active prefix for the given namespace.""" | 309 | """Find the currently active prefix for the given namespace.""" |
178 | if namespace is None: | 310 | if namespace is None: |
@@ -196,13 +328,20 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
196 | if len(self.nsmaps) > 1: | 328 | if len(self.nsmaps) > 1: |
197 | # This tag, or one of its parents, introduced a namespace | 329 | # This tag, or one of its parents, introduced a namespace |
198 | # mapping, so pop it off the stack. | 330 | # mapping, so pop it off the stack. |
199 | self.nsmaps.pop() | 331 | out_of_scope_nsmap = self.nsmaps.pop() |
200 | 332 | ||
333 | if out_of_scope_nsmap is not None: | ||
334 | # This tag introduced a namespace mapping which is no | ||
335 | # longer in scope. Recalculate the currently active | ||
336 | # namespace prefixes. | ||
337 | self.active_namespace_prefixes.pop() | ||
338 | |||
201 | def pi(self, target, data): | 339 | def pi(self, target, data): |
202 | self.soup.endData() | 340 | self.soup.endData() |
203 | self.soup.handle_data(target + ' ' + data) | 341 | data = target + ' ' + data |
204 | self.soup.endData(ProcessingInstruction) | 342 | self.soup.handle_data(data) |
205 | 343 | self.soup.endData(self.processing_instruction_class) | |
344 | |||
206 | def data(self, content): | 345 | def data(self, content): |
207 | self.soup.handle_data(content) | 346 | self.soup.handle_data(content) |
208 | 347 | ||
@@ -229,6 +368,7 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): | |||
229 | 368 | ||
230 | features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] | 369 | features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] |
231 | is_xml = False | 370 | is_xml = False |
371 | processing_instruction_class = ProcessingInstruction | ||
232 | 372 | ||
233 | def default_parser(self, encoding): | 373 | def default_parser(self, encoding): |
234 | return etree.HTMLParser | 374 | return etree.HTMLParser |
@@ -240,7 +380,7 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): | |||
240 | self.parser.feed(markup) | 380 | self.parser.feed(markup) |
241 | self.parser.close() | 381 | self.parser.close() |
242 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: | 382 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: |
243 | raise ParserRejectedMarkup(str(e)) | 383 | raise ParserRejectedMarkup(e) |
244 | 384 | ||
245 | 385 | ||
246 | def test_fragment_to_document(self, fragment): | 386 | def test_fragment_to_document(self, fragment): |
diff --git a/bitbake/lib/bs4/css.py b/bitbake/lib/bs4/css.py new file mode 100644 index 0000000000..cd1fd2df88 --- /dev/null +++ b/bitbake/lib/bs4/css.py | |||
@@ -0,0 +1,274 @@ | |||
1 | """Integration code for CSS selectors using Soup Sieve (pypi: soupsieve).""" | ||
2 | |||
3 | # We don't use soupsieve | ||
4 | soupsieve = None | ||
5 | |||
6 | |||
7 | class CSS(object): | ||
8 | """A proxy object against the soupsieve library, to simplify its | ||
9 | CSS selector API. | ||
10 | |||
11 | Acquire this object through the .css attribute on the | ||
12 | BeautifulSoup object, or on the Tag you want to use as the | ||
13 | starting point for a CSS selector. | ||
14 | |||
15 | The main advantage of doing this is that the tag to be selected | ||
16 | against doesn't need to be explicitly specified in the function | ||
17 | calls, since it's already scoped to a tag. | ||
18 | """ | ||
19 | |||
20 | def __init__(self, tag, api=soupsieve): | ||
21 | """Constructor. | ||
22 | |||
23 | You don't need to instantiate this class yourself; instead, | ||
24 | access the .css attribute on the BeautifulSoup object, or on | ||
25 | the Tag you want to use as the starting point for your CSS | ||
26 | selector. | ||
27 | |||
28 | :param tag: All CSS selectors will use this as their starting | ||
29 | point. | ||
30 | |||
31 | :param api: A plug-in replacement for the soupsieve module, | ||
32 | designed mainly for use in tests. | ||
33 | """ | ||
34 | if api is None: | ||
35 | raise NotImplementedError( | ||
36 | "Cannot execute CSS selectors because the soupsieve package is not installed." | ||
37 | ) | ||
38 | self.api = api | ||
39 | self.tag = tag | ||
40 | |||
41 | def escape(self, ident): | ||
42 | """Escape a CSS identifier. | ||
43 | |||
44 | This is a simple wrapper around soupselect.escape(). See the | ||
45 | documentation for that function for more information. | ||
46 | """ | ||
47 | if soupsieve is None: | ||
48 | raise NotImplementedError( | ||
49 | "Cannot escape CSS identifiers because the soupsieve package is not installed." | ||
50 | ) | ||
51 | return self.api.escape(ident) | ||
52 | |||
53 | def _ns(self, ns, select): | ||
54 | """Normalize a dictionary of namespaces.""" | ||
55 | if not isinstance(select, self.api.SoupSieve) and ns is None: | ||
56 | # If the selector is a precompiled pattern, it already has | ||
57 | # a namespace context compiled in, which cannot be | ||
58 | # replaced. | ||
59 | ns = self.tag._namespaces | ||
60 | return ns | ||
61 | |||
62 | def _rs(self, results): | ||
63 | """Normalize a list of results to a Resultset. | ||
64 | |||
65 | A ResultSet is more consistent with the rest of Beautiful | ||
66 | Soup's API, and ResultSet.__getattr__ has a helpful error | ||
67 | message if you try to treat a list of results as a single | ||
68 | result (a common mistake). | ||
69 | """ | ||
70 | # Import here to avoid circular import | ||
71 | from bs4.element import ResultSet | ||
72 | return ResultSet(None, results) | ||
73 | |||
74 | def compile(self, select, namespaces=None, flags=0, **kwargs): | ||
75 | """Pre-compile a selector and return the compiled object. | ||
76 | |||
77 | :param selector: A CSS selector. | ||
78 | |||
79 | :param namespaces: A dictionary mapping namespace prefixes | ||
80 | used in the CSS selector to namespace URIs. By default, | ||
81 | Beautiful Soup will use the prefixes it encountered while | ||
82 | parsing the document. | ||
83 | |||
84 | :param flags: Flags to be passed into Soup Sieve's | ||
85 | soupsieve.compile() method. | ||
86 | |||
87 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
88 | soupsieve.compile() method. | ||
89 | |||
90 | :return: A precompiled selector object. | ||
91 | :rtype: soupsieve.SoupSieve | ||
92 | """ | ||
93 | return self.api.compile( | ||
94 | select, self._ns(namespaces, select), flags, **kwargs | ||
95 | ) | ||
96 | |||
97 | def select_one(self, select, namespaces=None, flags=0, **kwargs): | ||
98 | """Perform a CSS selection operation on the current Tag and return the | ||
99 | first result. | ||
100 | |||
101 | This uses the Soup Sieve library. For more information, see | ||
102 | that library's documentation for the soupsieve.select_one() | ||
103 | method. | ||
104 | |||
105 | :param selector: A CSS selector. | ||
106 | |||
107 | :param namespaces: A dictionary mapping namespace prefixes | ||
108 | used in the CSS selector to namespace URIs. By default, | ||
109 | Beautiful Soup will use the prefixes it encountered while | ||
110 | parsing the document. | ||
111 | |||
112 | :param flags: Flags to be passed into Soup Sieve's | ||
113 | soupsieve.select_one() method. | ||
114 | |||
115 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
116 | soupsieve.select_one() method. | ||
117 | |||
118 | :return: A Tag, or None if the selector has no match. | ||
119 | :rtype: bs4.element.Tag | ||
120 | |||
121 | """ | ||
122 | return self.api.select_one( | ||
123 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
124 | ) | ||
125 | |||
126 | def select(self, select, namespaces=None, limit=0, flags=0, **kwargs): | ||
127 | """Perform a CSS selection operation on the current Tag. | ||
128 | |||
129 | This uses the Soup Sieve library. For more information, see | ||
130 | that library's documentation for the soupsieve.select() | ||
131 | method. | ||
132 | |||
133 | :param selector: A string containing a CSS selector. | ||
134 | |||
135 | :param namespaces: A dictionary mapping namespace prefixes | ||
136 | used in the CSS selector to namespace URIs. By default, | ||
137 | Beautiful Soup will pass in the prefixes it encountered while | ||
138 | parsing the document. | ||
139 | |||
140 | :param limit: After finding this number of results, stop looking. | ||
141 | |||
142 | :param flags: Flags to be passed into Soup Sieve's | ||
143 | soupsieve.select() method. | ||
144 | |||
145 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
146 | soupsieve.select() method. | ||
147 | |||
148 | :return: A ResultSet of Tag objects. | ||
149 | :rtype: bs4.element.ResultSet | ||
150 | |||
151 | """ | ||
152 | if limit is None: | ||
153 | limit = 0 | ||
154 | |||
155 | return self._rs( | ||
156 | self.api.select( | ||
157 | select, self.tag, self._ns(namespaces, select), limit, flags, | ||
158 | **kwargs | ||
159 | ) | ||
160 | ) | ||
161 | |||
162 | def iselect(self, select, namespaces=None, limit=0, flags=0, **kwargs): | ||
163 | """Perform a CSS selection operation on the current Tag. | ||
164 | |||
165 | This uses the Soup Sieve library. For more information, see | ||
166 | that library's documentation for the soupsieve.iselect() | ||
167 | method. It is the same as select(), but it returns a generator | ||
168 | instead of a list. | ||
169 | |||
170 | :param selector: A string containing a CSS selector. | ||
171 | |||
172 | :param namespaces: A dictionary mapping namespace prefixes | ||
173 | used in the CSS selector to namespace URIs. By default, | ||
174 | Beautiful Soup will pass in the prefixes it encountered while | ||
175 | parsing the document. | ||
176 | |||
177 | :param limit: After finding this number of results, stop looking. | ||
178 | |||
179 | :param flags: Flags to be passed into Soup Sieve's | ||
180 | soupsieve.iselect() method. | ||
181 | |||
182 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
183 | soupsieve.iselect() method. | ||
184 | |||
185 | :return: A generator | ||
186 | :rtype: types.GeneratorType | ||
187 | """ | ||
188 | return self.api.iselect( | ||
189 | select, self.tag, self._ns(namespaces, select), limit, flags, **kwargs | ||
190 | ) | ||
191 | |||
192 | def closest(self, select, namespaces=None, flags=0, **kwargs): | ||
193 | """Find the Tag closest to this one that matches the given selector. | ||
194 | |||
195 | This uses the Soup Sieve library. For more information, see | ||
196 | that library's documentation for the soupsieve.closest() | ||
197 | method. | ||
198 | |||
199 | :param selector: A string containing a CSS selector. | ||
200 | |||
201 | :param namespaces: A dictionary mapping namespace prefixes | ||
202 | used in the CSS selector to namespace URIs. By default, | ||
203 | Beautiful Soup will pass in the prefixes it encountered while | ||
204 | parsing the document. | ||
205 | |||
206 | :param flags: Flags to be passed into Soup Sieve's | ||
207 | soupsieve.closest() method. | ||
208 | |||
209 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
210 | soupsieve.closest() method. | ||
211 | |||
212 | :return: A Tag, or None if there is no match. | ||
213 | :rtype: bs4.Tag | ||
214 | |||
215 | """ | ||
216 | return self.api.closest( | ||
217 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
218 | ) | ||
219 | |||
220 | def match(self, select, namespaces=None, flags=0, **kwargs): | ||
221 | """Check whether this Tag matches the given CSS selector. | ||
222 | |||
223 | This uses the Soup Sieve library. For more information, see | ||
224 | that library's documentation for the soupsieve.match() | ||
225 | method. | ||
226 | |||
227 | :param: a CSS selector. | ||
228 | |||
229 | :param namespaces: A dictionary mapping namespace prefixes | ||
230 | used in the CSS selector to namespace URIs. By default, | ||
231 | Beautiful Soup will pass in the prefixes it encountered while | ||
232 | parsing the document. | ||
233 | |||
234 | :param flags: Flags to be passed into Soup Sieve's | ||
235 | soupsieve.match() method. | ||
236 | |||
237 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
238 | soupsieve.match() method. | ||
239 | |||
240 | :return: True if this Tag matches the selector; False otherwise. | ||
241 | :rtype: bool | ||
242 | """ | ||
243 | return self.api.match( | ||
244 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
245 | ) | ||
246 | |||
247 | def filter(self, select, namespaces=None, flags=0, **kwargs): | ||
248 | """Filter this Tag's direct children based on the given CSS selector. | ||
249 | |||
250 | This uses the Soup Sieve library. It works the same way as | ||
251 | passing this Tag into that library's soupsieve.filter() | ||
252 | method. More information, for more information see the | ||
253 | documentation for soupsieve.filter(). | ||
254 | |||
255 | :param namespaces: A dictionary mapping namespace prefixes | ||
256 | used in the CSS selector to namespace URIs. By default, | ||
257 | Beautiful Soup will pass in the prefixes it encountered while | ||
258 | parsing the document. | ||
259 | |||
260 | :param flags: Flags to be passed into Soup Sieve's | ||
261 | soupsieve.filter() method. | ||
262 | |||
263 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
264 | soupsieve.filter() method. | ||
265 | |||
266 | :return: A ResultSet of Tag objects. | ||
267 | :rtype: bs4.element.ResultSet | ||
268 | |||
269 | """ | ||
270 | return self._rs( | ||
271 | self.api.filter( | ||
272 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
273 | ) | ||
274 | ) | ||
diff --git a/bitbake/lib/bs4/dammit.py b/bitbake/lib/bs4/dammit.py index 7ad9e0dd1e..692433c57a 100644 --- a/bitbake/lib/bs4/dammit.py +++ b/bitbake/lib/bs4/dammit.py | |||
@@ -6,61 +6,185 @@ necessary. It is heavily based on code from Mark Pilgrim's Universal | |||
6 | Feed Parser. It works best on XML and HTML, but it does not rewrite the | 6 | Feed Parser. It works best on XML and HTML, but it does not rewrite the |
7 | XML or HTML to reflect a new encoding; that's the tree builder's job. | 7 | XML or HTML to reflect a new encoding; that's the tree builder's job. |
8 | """ | 8 | """ |
9 | # Use of this source code is governed by the MIT license. | ||
9 | __license__ = "MIT" | 10 | __license__ = "MIT" |
10 | 11 | ||
11 | import codecs | ||
12 | from html.entities import codepoint2name | 12 | from html.entities import codepoint2name |
13 | from collections import defaultdict | ||
14 | import codecs | ||
13 | import re | 15 | import re |
14 | import logging | 16 | import logging |
15 | 17 | import string | |
16 | # Import a library to autodetect character encodings. | 18 | |
17 | chardet_type = None | 19 | # Import a library to autodetect character encodings. We'll support |
20 | # any of a number of libraries that all support the same API: | ||
21 | # | ||
22 | # * cchardet | ||
23 | # * chardet | ||
24 | # * charset-normalizer | ||
25 | chardet_module = None | ||
18 | try: | 26 | try: |
19 | # First try the fast C implementation. | ||
20 | # PyPI package: cchardet | 27 | # PyPI package: cchardet |
21 | import cchardet | 28 | import cchardet as chardet_module |
22 | def chardet_dammit(s): | ||
23 | return cchardet.detect(s)['encoding'] | ||
24 | except ImportError: | 29 | except ImportError: |
25 | try: | 30 | try: |
26 | # Fall back to the pure Python implementation | ||
27 | # Debian package: python-chardet | 31 | # Debian package: python-chardet |
28 | # PyPI package: chardet | 32 | # PyPI package: chardet |
29 | import chardet | 33 | import chardet as chardet_module |
30 | def chardet_dammit(s): | ||
31 | return chardet.detect(s)['encoding'] | ||
32 | #import chardet.constants | ||
33 | #chardet.constants._debug = 1 | ||
34 | except ImportError: | 34 | except ImportError: |
35 | # No chardet available. | 35 | try: |
36 | def chardet_dammit(s): | 36 | # PyPI package: charset-normalizer |
37 | import charset_normalizer as chardet_module | ||
38 | except ImportError: | ||
39 | # No chardet available. | ||
40 | chardet_module = None | ||
41 | |||
42 | if chardet_module: | ||
43 | def chardet_dammit(s): | ||
44 | if isinstance(s, str): | ||
37 | return None | 45 | return None |
46 | return chardet_module.detect(s)['encoding'] | ||
47 | else: | ||
48 | def chardet_dammit(s): | ||
49 | return None | ||
38 | 50 | ||
39 | xml_encoding_re = re.compile( | 51 | # Build bytestring and Unicode versions of regular expressions for finding |
40 | r'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I) | 52 | # a declared encoding inside an XML or HTML document. |
41 | html_meta_re = re.compile( | 53 | xml_encoding = '^\\s*<\\?.*encoding=[\'"](.*?)[\'"].*\\?>' |
42 | r'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I) | 54 | html_meta = '<\\s*meta[^>]+charset\\s*=\\s*["\']?([^>]*?)[ /;\'">]' |
55 | encoding_res = dict() | ||
56 | encoding_res[bytes] = { | ||
57 | 'html' : re.compile(html_meta.encode("ascii"), re.I), | ||
58 | 'xml' : re.compile(xml_encoding.encode("ascii"), re.I), | ||
59 | } | ||
60 | encoding_res[str] = { | ||
61 | 'html' : re.compile(html_meta, re.I), | ||
62 | 'xml' : re.compile(xml_encoding, re.I) | ||
63 | } | ||
64 | |||
65 | from html.entities import html5 | ||
43 | 66 | ||
44 | class EntitySubstitution(object): | 67 | class EntitySubstitution(object): |
45 | 68 | """The ability to substitute XML or HTML entities for certain characters.""" | |
46 | """Substitute XML or HTML entities for the corresponding characters.""" | ||
47 | 69 | ||
48 | def _populate_class_variables(): | 70 | def _populate_class_variables(): |
49 | lookup = {} | 71 | """Initialize variables used by this class to manage the plethora of |
50 | reverse_lookup = {} | 72 | HTML5 named entities. |
51 | characters_for_re = [] | 73 | |
74 | This function returns a 3-tuple containing two dictionaries | ||
75 | and a regular expression: | ||
76 | |||
77 | unicode_to_name - A mapping of Unicode strings like "⦨" to | ||
78 | entity names like "angmsdaa". When a single Unicode string has | ||
79 | multiple entity names, we try to choose the most commonly-used | ||
80 | name. | ||
81 | |||
82 | name_to_unicode: A mapping of entity names like "angmsdaa" to | ||
83 | Unicode strings like "⦨". | ||
84 | |||
85 | named_entity_re: A regular expression matching (almost) any | ||
86 | Unicode string that corresponds to an HTML5 named entity. | ||
87 | """ | ||
88 | unicode_to_name = {} | ||
89 | name_to_unicode = {} | ||
90 | |||
91 | short_entities = set() | ||
92 | long_entities_by_first_character = defaultdict(set) | ||
93 | |||
94 | for name_with_semicolon, character in sorted(html5.items()): | ||
95 | # "It is intentional, for legacy compatibility, that many | ||
96 | # code points have multiple character reference names. For | ||
97 | # example, some appear both with and without the trailing | ||
98 | # semicolon, or with different capitalizations." | ||
99 | # - https://html.spec.whatwg.org/multipage/named-characters.html#named-character-references | ||
100 | # | ||
101 | # The parsers are in charge of handling (or not) character | ||
102 | # references with no trailing semicolon, so we remove the | ||
103 | # semicolon whenever it appears. | ||
104 | if name_with_semicolon.endswith(';'): | ||
105 | name = name_with_semicolon[:-1] | ||
106 | else: | ||
107 | name = name_with_semicolon | ||
108 | |||
109 | # When parsing HTML, we want to recognize any known named | ||
110 | # entity and convert it to a sequence of Unicode | ||
111 | # characters. | ||
112 | if name not in name_to_unicode: | ||
113 | name_to_unicode[name] = character | ||
114 | |||
115 | # When _generating_ HTML, we want to recognize special | ||
116 | # character sequences that _could_ be converted to named | ||
117 | # entities. | ||
118 | unicode_to_name[character] = name | ||
119 | |||
120 | # We also need to build a regular expression that lets us | ||
121 | # _find_ those characters in output strings so we can | ||
122 | # replace them. | ||
123 | # | ||
124 | # This is tricky, for two reasons. | ||
125 | |||
126 | if (len(character) == 1 and ord(character) < 128 | ||
127 | and character not in '<>&'): | ||
128 | # First, it would be annoying to turn single ASCII | ||
129 | # characters like | into named entities like | ||
130 | # |. The exceptions are <>&, which we _must_ | ||
131 | # turn into named entities to produce valid HTML. | ||
132 | continue | ||
133 | |||
134 | if len(character) > 1 and all(ord(x) < 128 for x in character): | ||
135 | # We also do not want to turn _combinations_ of ASCII | ||
136 | # characters like 'fj' into named entities like 'fj', | ||
137 | # though that's more debateable. | ||
138 | continue | ||
139 | |||
140 | # Second, some named entities have a Unicode value that's | ||
141 | # a subset of the Unicode value for some _other_ named | ||
142 | # entity. As an example, \u2267' is ≧, | ||
143 | # but '\u2267\u0338' is ≧̸. Our regular | ||
144 | # expression needs to match the first two characters of | ||
145 | # "\u2267\u0338foo", but only the first character of | ||
146 | # "\u2267foo". | ||
147 | # | ||
148 | # In this step, we build two sets of characters that | ||
149 | # _eventually_ need to go into the regular expression. But | ||
150 | # we won't know exactly what the regular expression needs | ||
151 | # to look like until we've gone through the entire list of | ||
152 | # named entities. | ||
153 | if len(character) == 1: | ||
154 | short_entities.add(character) | ||
155 | else: | ||
156 | long_entities_by_first_character[character[0]].add(character) | ||
157 | |||
158 | # Now that we've been through the entire list of entities, we | ||
159 | # can create a regular expression that matches any of them. | ||
160 | particles = set() | ||
161 | for short in short_entities: | ||
162 | long_versions = long_entities_by_first_character[short] | ||
163 | if not long_versions: | ||
164 | particles.add(short) | ||
165 | else: | ||
166 | ignore = "".join([x[1] for x in long_versions]) | ||
167 | # This finds, e.g. \u2267 but only if it is _not_ | ||
168 | # followed by \u0338. | ||
169 | particles.add("%s(?![%s])" % (short, ignore)) | ||
170 | |||
171 | for long_entities in list(long_entities_by_first_character.values()): | ||
172 | for long_entity in long_entities: | ||
173 | particles.add(long_entity) | ||
174 | |||
175 | re_definition = "(%s)" % "|".join(particles) | ||
176 | |||
177 | # If an entity shows up in both html5 and codepoint2name, it's | ||
178 | # likely that HTML5 gives it several different names, such as | ||
179 | # 'rsquo' and 'rsquor'. When converting Unicode characters to | ||
180 | # named entities, the codepoint2name name should take | ||
181 | # precedence where possible, since that's the more easily | ||
182 | # recognizable one. | ||
52 | for codepoint, name in list(codepoint2name.items()): | 183 | for codepoint, name in list(codepoint2name.items()): |
53 | character = chr(codepoint) | 184 | character = chr(codepoint) |
54 | if codepoint != 34: | 185 | unicode_to_name[character] = name |
55 | # There's no point in turning the quotation mark into | 186 | |
56 | # ", unless it happens within an attribute value, which | 187 | return unicode_to_name, name_to_unicode, re.compile(re_definition) |
57 | # is handled elsewhere. | ||
58 | characters_for_re.append(character) | ||
59 | lookup[character] = name | ||
60 | # But we do want to turn " into the quotation mark. | ||
61 | reverse_lookup[name] = character | ||
62 | re_definition = "[%s]" % "".join(characters_for_re) | ||
63 | return lookup, reverse_lookup, re.compile(re_definition) | ||
64 | (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER, | 188 | (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER, |
65 | CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables() | 189 | CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables() |
66 | 190 | ||
@@ -72,21 +196,23 @@ class EntitySubstitution(object): | |||
72 | ">": "gt", | 196 | ">": "gt", |
73 | } | 197 | } |
74 | 198 | ||
75 | BARE_AMPERSAND_OR_BRACKET = re.compile(r"([<>]|" | 199 | BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" |
76 | r"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" | 200 | "&(?!#\\d+;|#x[0-9a-fA-F]+;|\\w+;)" |
77 | r")") | 201 | ")") |
78 | 202 | ||
79 | AMPERSAND_OR_BRACKET = re.compile(r"([<>&])") | 203 | AMPERSAND_OR_BRACKET = re.compile("([<>&])") |
80 | 204 | ||
81 | @classmethod | 205 | @classmethod |
82 | def _substitute_html_entity(cls, matchobj): | 206 | def _substitute_html_entity(cls, matchobj): |
207 | """Used with a regular expression to substitute the | ||
208 | appropriate HTML entity for a special character string.""" | ||
83 | entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0)) | 209 | entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0)) |
84 | return "&%s;" % entity | 210 | return "&%s;" % entity |
85 | 211 | ||
86 | @classmethod | 212 | @classmethod |
87 | def _substitute_xml_entity(cls, matchobj): | 213 | def _substitute_xml_entity(cls, matchobj): |
88 | """Used with a regular expression to substitute the | 214 | """Used with a regular expression to substitute the |
89 | appropriate XML entity for an XML special character.""" | 215 | appropriate XML entity for a special character string.""" |
90 | entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)] | 216 | entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)] |
91 | return "&%s;" % entity | 217 | return "&%s;" % entity |
92 | 218 | ||
@@ -181,6 +307,8 @@ class EntitySubstitution(object): | |||
181 | containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that | 307 | containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that |
182 | character with "é" will make it more readable to some | 308 | character with "é" will make it more readable to some |
183 | people. | 309 | people. |
310 | |||
311 | :param s: A Unicode string. | ||
184 | """ | 312 | """ |
185 | return cls.CHARACTER_TO_HTML_ENTITY_RE.sub( | 313 | return cls.CHARACTER_TO_HTML_ENTITY_RE.sub( |
186 | cls._substitute_html_entity, s) | 314 | cls._substitute_html_entity, s) |
@@ -192,23 +320,65 @@ class EncodingDetector: | |||
192 | Order of precedence: | 320 | Order of precedence: |
193 | 321 | ||
194 | 1. Encodings you specifically tell EncodingDetector to try first | 322 | 1. Encodings you specifically tell EncodingDetector to try first |
195 | (the override_encodings argument to the constructor). | 323 | (the known_definite_encodings argument to the constructor). |
324 | |||
325 | 2. An encoding determined by sniffing the document's byte-order mark. | ||
326 | |||
327 | 3. Encodings you specifically tell EncodingDetector to try if | ||
328 | byte-order mark sniffing fails (the user_encodings argument to the | ||
329 | constructor). | ||
196 | 330 | ||
197 | 2. An encoding declared within the bytestring itself, either in an | 331 | 4. An encoding declared within the bytestring itself, either in an |
198 | XML declaration (if the bytestring is to be interpreted as an XML | 332 | XML declaration (if the bytestring is to be interpreted as an XML |
199 | document), or in a <meta> tag (if the bytestring is to be | 333 | document), or in a <meta> tag (if the bytestring is to be |
200 | interpreted as an HTML document.) | 334 | interpreted as an HTML document.) |
201 | 335 | ||
202 | 3. An encoding detected through textual analysis by chardet, | 336 | 5. An encoding detected through textual analysis by chardet, |
203 | cchardet, or a similar external library. | 337 | cchardet, or a similar external library. |
204 | 338 | ||
205 | 4. UTF-8. | 339 | 4. UTF-8. |
206 | 340 | ||
207 | 5. Windows-1252. | 341 | 5. Windows-1252. |
342 | |||
208 | """ | 343 | """ |
209 | def __init__(self, markup, override_encodings=None, is_html=False, | 344 | def __init__(self, markup, known_definite_encodings=None, |
210 | exclude_encodings=None): | 345 | is_html=False, exclude_encodings=None, |
211 | self.override_encodings = override_encodings or [] | 346 | user_encodings=None, override_encodings=None): |
347 | """Constructor. | ||
348 | |||
349 | :param markup: Some markup in an unknown encoding. | ||
350 | |||
351 | :param known_definite_encodings: When determining the encoding | ||
352 | of `markup`, these encodings will be tried first, in | ||
353 | order. In HTML terms, this corresponds to the "known | ||
354 | definite encoding" step defined here: | ||
355 | https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding | ||
356 | |||
357 | :param user_encodings: These encodings will be tried after the | ||
358 | `known_definite_encodings` have been tried and failed, and | ||
359 | after an attempt to sniff the encoding by looking at a | ||
360 | byte order mark has failed. In HTML terms, this | ||
361 | corresponds to the step "user has explicitly instructed | ||
362 | the user agent to override the document's character | ||
363 | encoding", defined here: | ||
364 | https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding | ||
365 | |||
366 | :param override_encodings: A deprecated alias for | ||
367 | known_definite_encodings. Any encodings here will be tried | ||
368 | immediately after the encodings in | ||
369 | known_definite_encodings. | ||
370 | |||
371 | :param is_html: If True, this markup is considered to be | ||
372 | HTML. Otherwise it's assumed to be XML. | ||
373 | |||
374 | :param exclude_encodings: These encodings will not be tried, | ||
375 | even if they otherwise would be. | ||
376 | |||
377 | """ | ||
378 | self.known_definite_encodings = list(known_definite_encodings or []) | ||
379 | if override_encodings: | ||
380 | self.known_definite_encodings += override_encodings | ||
381 | self.user_encodings = user_encodings or [] | ||
212 | exclude_encodings = exclude_encodings or [] | 382 | exclude_encodings = exclude_encodings or [] |
213 | self.exclude_encodings = set([x.lower() for x in exclude_encodings]) | 383 | self.exclude_encodings = set([x.lower() for x in exclude_encodings]) |
214 | self.chardet_encoding = None | 384 | self.chardet_encoding = None |
@@ -219,6 +389,12 @@ class EncodingDetector: | |||
219 | self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup) | 389 | self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup) |
220 | 390 | ||
221 | def _usable(self, encoding, tried): | 391 | def _usable(self, encoding, tried): |
392 | """Should we even bother to try this encoding? | ||
393 | |||
394 | :param encoding: Name of an encoding. | ||
395 | :param tried: Encodings that have already been tried. This will be modified | ||
396 | as a side effect. | ||
397 | """ | ||
222 | if encoding is not None: | 398 | if encoding is not None: |
223 | encoding = encoding.lower() | 399 | encoding = encoding.lower() |
224 | if encoding in self.exclude_encodings: | 400 | if encoding in self.exclude_encodings: |
@@ -230,9 +406,14 @@ class EncodingDetector: | |||
230 | 406 | ||
231 | @property | 407 | @property |
232 | def encodings(self): | 408 | def encodings(self): |
233 | """Yield a number of encodings that might work for this markup.""" | 409 | """Yield a number of encodings that might work for this markup. |
410 | |||
411 | :yield: A sequence of strings. | ||
412 | """ | ||
234 | tried = set() | 413 | tried = set() |
235 | for e in self.override_encodings: | 414 | |
415 | # First, try the known definite encodings | ||
416 | for e in self.known_definite_encodings: | ||
236 | if self._usable(e, tried): | 417 | if self._usable(e, tried): |
237 | yield e | 418 | yield e |
238 | 419 | ||
@@ -241,6 +422,12 @@ class EncodingDetector: | |||
241 | if self._usable(self.sniffed_encoding, tried): | 422 | if self._usable(self.sniffed_encoding, tried): |
242 | yield self.sniffed_encoding | 423 | yield self.sniffed_encoding |
243 | 424 | ||
425 | # Sniffing the byte-order mark did nothing; try the user | ||
426 | # encodings. | ||
427 | for e in self.user_encodings: | ||
428 | if self._usable(e, tried): | ||
429 | yield e | ||
430 | |||
244 | # Look within the document for an XML or HTML encoding | 431 | # Look within the document for an XML or HTML encoding |
245 | # declaration. | 432 | # declaration. |
246 | if self.declared_encoding is None: | 433 | if self.declared_encoding is None: |
@@ -263,7 +450,11 @@ class EncodingDetector: | |||
263 | 450 | ||
264 | @classmethod | 451 | @classmethod |
265 | def strip_byte_order_mark(cls, data): | 452 | def strip_byte_order_mark(cls, data): |
266 | """If a byte-order mark is present, strip it and return the encoding it implies.""" | 453 | """If a byte-order mark is present, strip it and return the encoding it implies. |
454 | |||
455 | :param data: Some markup. | ||
456 | :return: A 2-tuple (modified data, implied encoding) | ||
457 | """ | ||
267 | encoding = None | 458 | encoding = None |
268 | if isinstance(data, str): | 459 | if isinstance(data, str): |
269 | # Unicode data cannot have a byte-order mark. | 460 | # Unicode data cannot have a byte-order mark. |
@@ -295,21 +486,36 @@ class EncodingDetector: | |||
295 | 486 | ||
296 | An HTML encoding is declared in a <meta> tag, hopefully near the | 487 | An HTML encoding is declared in a <meta> tag, hopefully near the |
297 | beginning of the document. | 488 | beginning of the document. |
489 | |||
490 | :param markup: Some markup. | ||
491 | :param is_html: If True, this markup is considered to be HTML. Otherwise | ||
492 | it's assumed to be XML. | ||
493 | :param search_entire_document: Since an encoding is supposed to declared near the beginning | ||
494 | of the document, most of the time it's only necessary to search a few kilobytes of data. | ||
495 | Set this to True to force this method to search the entire document. | ||
298 | """ | 496 | """ |
299 | if search_entire_document: | 497 | if search_entire_document: |
300 | xml_endpos = html_endpos = len(markup) | 498 | xml_endpos = html_endpos = len(markup) |
301 | else: | 499 | else: |
302 | xml_endpos = 1024 | 500 | xml_endpos = 1024 |
303 | html_endpos = max(2048, int(len(markup) * 0.05)) | 501 | html_endpos = max(2048, int(len(markup) * 0.05)) |
304 | 502 | ||
503 | if isinstance(markup, bytes): | ||
504 | res = encoding_res[bytes] | ||
505 | else: | ||
506 | res = encoding_res[str] | ||
507 | |||
508 | xml_re = res['xml'] | ||
509 | html_re = res['html'] | ||
305 | declared_encoding = None | 510 | declared_encoding = None |
306 | declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos) | 511 | declared_encoding_match = xml_re.search(markup, endpos=xml_endpos) |
307 | if not declared_encoding_match and is_html: | 512 | if not declared_encoding_match and is_html: |
308 | declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos) | 513 | declared_encoding_match = html_re.search(markup, endpos=html_endpos) |
309 | if declared_encoding_match is not None: | 514 | if declared_encoding_match is not None: |
310 | declared_encoding = declared_encoding_match.groups()[0].decode( | 515 | declared_encoding = declared_encoding_match.groups()[0] |
311 | 'ascii', 'replace') | ||
312 | if declared_encoding: | 516 | if declared_encoding: |
517 | if isinstance(declared_encoding, bytes): | ||
518 | declared_encoding = declared_encoding.decode('ascii', 'replace') | ||
313 | return declared_encoding.lower() | 519 | return declared_encoding.lower() |
314 | return None | 520 | return None |
315 | 521 | ||
@@ -332,15 +538,53 @@ class UnicodeDammit: | |||
332 | "iso-8859-2", | 538 | "iso-8859-2", |
333 | ] | 539 | ] |
334 | 540 | ||
335 | def __init__(self, markup, override_encodings=[], | 541 | def __init__(self, markup, known_definite_encodings=[], |
336 | smart_quotes_to=None, is_html=False, exclude_encodings=[]): | 542 | smart_quotes_to=None, is_html=False, exclude_encodings=[], |
543 | user_encodings=None, override_encodings=None | ||
544 | ): | ||
545 | """Constructor. | ||
546 | |||
547 | :param markup: A bytestring representing markup in an unknown encoding. | ||
548 | |||
549 | :param known_definite_encodings: When determining the encoding | ||
550 | of `markup`, these encodings will be tried first, in | ||
551 | order. In HTML terms, this corresponds to the "known | ||
552 | definite encoding" step defined here: | ||
553 | https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding | ||
554 | |||
555 | :param user_encodings: These encodings will be tried after the | ||
556 | `known_definite_encodings` have been tried and failed, and | ||
557 | after an attempt to sniff the encoding by looking at a | ||
558 | byte order mark has failed. In HTML terms, this | ||
559 | corresponds to the step "user has explicitly instructed | ||
560 | the user agent to override the document's character | ||
561 | encoding", defined here: | ||
562 | https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding | ||
563 | |||
564 | :param override_encodings: A deprecated alias for | ||
565 | known_definite_encodings. Any encodings here will be tried | ||
566 | immediately after the encodings in | ||
567 | known_definite_encodings. | ||
568 | |||
569 | :param smart_quotes_to: By default, Microsoft smart quotes will, like all other characters, be converted | ||
570 | to Unicode characters. Setting this to 'ascii' will convert them to ASCII quotes instead. | ||
571 | Setting it to 'xml' will convert them to XML entity references, and setting it to 'html' | ||
572 | will convert them to HTML entity references. | ||
573 | :param is_html: If True, this markup is considered to be HTML. Otherwise | ||
574 | it's assumed to be XML. | ||
575 | :param exclude_encodings: These encodings will not be considered, even | ||
576 | if the sniffing code thinks they might make sense. | ||
577 | |||
578 | """ | ||
337 | self.smart_quotes_to = smart_quotes_to | 579 | self.smart_quotes_to = smart_quotes_to |
338 | self.tried_encodings = [] | 580 | self.tried_encodings = [] |
339 | self.contains_replacement_characters = False | 581 | self.contains_replacement_characters = False |
340 | self.is_html = is_html | 582 | self.is_html = is_html |
341 | 583 | self.log = logging.getLogger(__name__) | |
342 | self.detector = EncodingDetector( | 584 | self.detector = EncodingDetector( |
343 | markup, override_encodings, is_html, exclude_encodings) | 585 | markup, known_definite_encodings, is_html, exclude_encodings, |
586 | user_encodings, override_encodings | ||
587 | ) | ||
344 | 588 | ||
345 | # Short-circuit if the data is in Unicode to begin with. | 589 | # Short-circuit if the data is in Unicode to begin with. |
346 | if isinstance(markup, str) or markup == '': | 590 | if isinstance(markup, str) or markup == '': |
@@ -368,9 +612,10 @@ class UnicodeDammit: | |||
368 | if encoding != "ascii": | 612 | if encoding != "ascii": |
369 | u = self._convert_from(encoding, "replace") | 613 | u = self._convert_from(encoding, "replace") |
370 | if u is not None: | 614 | if u is not None: |
371 | logging.warning( | 615 | self.log.warning( |
372 | "Some characters could not be decoded, and were " | 616 | "Some characters could not be decoded, and were " |
373 | "replaced with REPLACEMENT CHARACTER.") | 617 | "replaced with REPLACEMENT CHARACTER." |
618 | ) | ||
374 | self.contains_replacement_characters = True | 619 | self.contains_replacement_characters = True |
375 | break | 620 | break |
376 | 621 | ||
@@ -399,6 +644,10 @@ class UnicodeDammit: | |||
399 | return sub | 644 | return sub |
400 | 645 | ||
401 | def _convert_from(self, proposed, errors="strict"): | 646 | def _convert_from(self, proposed, errors="strict"): |
647 | """Attempt to convert the markup to the proposed encoding. | ||
648 | |||
649 | :param proposed: The name of a character encoding. | ||
650 | """ | ||
402 | proposed = self.find_codec(proposed) | 651 | proposed = self.find_codec(proposed) |
403 | if not proposed or (proposed, errors) in self.tried_encodings: | 652 | if not proposed or (proposed, errors) in self.tried_encodings: |
404 | return None | 653 | return None |
@@ -413,30 +662,40 @@ class UnicodeDammit: | |||
413 | markup = smart_quotes_compiled.sub(self._sub_ms_char, markup) | 662 | markup = smart_quotes_compiled.sub(self._sub_ms_char, markup) |
414 | 663 | ||
415 | try: | 664 | try: |
416 | #print "Trying to convert document to %s (errors=%s)" % ( | 665 | #print("Trying to convert document to %s (errors=%s)" % ( |
417 | # proposed, errors) | 666 | # proposed, errors)) |
418 | u = self._to_unicode(markup, proposed, errors) | 667 | u = self._to_unicode(markup, proposed, errors) |
419 | self.markup = u | 668 | self.markup = u |
420 | self.original_encoding = proposed | 669 | self.original_encoding = proposed |
421 | except Exception as e: | 670 | except Exception as e: |
422 | #print "That didn't work!" | 671 | #print("That didn't work!") |
423 | #print e | 672 | #print(e) |
424 | return None | 673 | return None |
425 | #print "Correct encoding: %s" % proposed | 674 | #print("Correct encoding: %s" % proposed) |
426 | return self.markup | 675 | return self.markup |
427 | 676 | ||
428 | def _to_unicode(self, data, encoding, errors="strict"): | 677 | def _to_unicode(self, data, encoding, errors="strict"): |
429 | '''Given a string and its encoding, decodes the string into Unicode. | 678 | """Given a string and its encoding, decodes the string into Unicode. |
430 | %encoding is a string recognized by encodings.aliases''' | 679 | |
680 | :param encoding: The name of an encoding. | ||
681 | """ | ||
431 | return str(data, encoding, errors) | 682 | return str(data, encoding, errors) |
432 | 683 | ||
433 | @property | 684 | @property |
434 | def declared_html_encoding(self): | 685 | def declared_html_encoding(self): |
686 | """If the markup is an HTML document, returns the encoding declared _within_ | ||
687 | the document. | ||
688 | """ | ||
435 | if not self.is_html: | 689 | if not self.is_html: |
436 | return None | 690 | return None |
437 | return self.detector.declared_encoding | 691 | return self.detector.declared_encoding |
438 | 692 | ||
439 | def find_codec(self, charset): | 693 | def find_codec(self, charset): |
694 | """Convert the name of a character set to a codec name. | ||
695 | |||
696 | :param charset: The name of a character set. | ||
697 | :return: The name of a codec. | ||
698 | """ | ||
440 | value = (self._codec(self.CHARSET_ALIASES.get(charset, charset)) | 699 | value = (self._codec(self.CHARSET_ALIASES.get(charset, charset)) |
441 | or (charset and self._codec(charset.replace("-", ""))) | 700 | or (charset and self._codec(charset.replace("-", ""))) |
442 | or (charset and self._codec(charset.replace("-", "_"))) | 701 | or (charset and self._codec(charset.replace("-", "_"))) |
@@ -726,7 +985,7 @@ class UnicodeDammit: | |||
726 | 0xde : b'\xc3\x9e', # Þ | 985 | 0xde : b'\xc3\x9e', # Þ |
727 | 0xdf : b'\xc3\x9f', # ß | 986 | 0xdf : b'\xc3\x9f', # ß |
728 | 0xe0 : b'\xc3\xa0', # Ã | 987 | 0xe0 : b'\xc3\xa0', # Ã |
729 | 0xe1 : b'\xa1', # á | 988 | 0xe1 : b'\xa1', # á |
730 | 0xe2 : b'\xc3\xa2', # â | 989 | 0xe2 : b'\xc3\xa2', # â |
731 | 0xe3 : b'\xc3\xa3', # ã | 990 | 0xe3 : b'\xc3\xa3', # ã |
732 | 0xe4 : b'\xc3\xa4', # ä | 991 | 0xe4 : b'\xc3\xa4', # ä |
@@ -775,12 +1034,16 @@ class UnicodeDammit: | |||
775 | Currently the only situation supported is Windows-1252 (or its | 1034 | Currently the only situation supported is Windows-1252 (or its |
776 | subset ISO-8859-1), embedded in UTF-8. | 1035 | subset ISO-8859-1), embedded in UTF-8. |
777 | 1036 | ||
778 | The input must be a bytestring. If you've already converted | 1037 | :param in_bytes: A bytestring that you suspect contains |
779 | the document to Unicode, you're too late. | 1038 | characters from multiple encodings. Note that this _must_ |
780 | 1039 | be a bytestring. If you've already converted the document | |
781 | The output is a bytestring in which `embedded_encoding` | 1040 | to Unicode, you're too late. |
782 | characters have been converted to their `main_encoding` | 1041 | :param main_encoding: The primary encoding of `in_bytes`. |
783 | equivalents. | 1042 | :param embedded_encoding: The encoding that was used to embed characters |
1043 | in the main document. | ||
1044 | :return: A bytestring in which `embedded_encoding` | ||
1045 | characters have been converted to their `main_encoding` | ||
1046 | equivalents. | ||
784 | """ | 1047 | """ |
785 | if embedded_encoding.replace('_', '-').lower() not in ( | 1048 | if embedded_encoding.replace('_', '-').lower() not in ( |
786 | 'windows-1252', 'windows_1252'): | 1049 | 'windows-1252', 'windows_1252'): |
diff --git a/bitbake/lib/bs4/diagnose.py b/bitbake/lib/bs4/diagnose.py index 083395fb46..4692795340 100644 --- a/bitbake/lib/bs4/diagnose.py +++ b/bitbake/lib/bs4/diagnose.py | |||
@@ -1,9 +1,10 @@ | |||
1 | """Diagnostic functions, mainly for use when doing tech support.""" | 1 | """Diagnostic functions, mainly for use when doing tech support.""" |
2 | 2 | ||
3 | # Use of this source code is governed by the MIT license. | ||
3 | __license__ = "MIT" | 4 | __license__ = "MIT" |
4 | 5 | ||
5 | import cProfile | 6 | import cProfile |
6 | from io import StringIO | 7 | from io import BytesIO |
7 | from html.parser import HTMLParser | 8 | from html.parser import HTMLParser |
8 | import bs4 | 9 | import bs4 |
9 | from bs4 import BeautifulSoup, __version__ | 10 | from bs4 import BeautifulSoup, __version__ |
@@ -16,12 +17,15 @@ import tempfile | |||
16 | import time | 17 | import time |
17 | import traceback | 18 | import traceback |
18 | import sys | 19 | import sys |
19 | import cProfile | ||
20 | 20 | ||
21 | def diagnose(data): | 21 | def diagnose(data): |
22 | """Diagnostic suite for isolating common problems.""" | 22 | """Diagnostic suite for isolating common problems. |
23 | print("Diagnostic running on Beautiful Soup %s" % __version__) | 23 | |
24 | print("Python version %s" % sys.version) | 24 | :param data: A string containing markup that needs to be explained. |
25 | :return: None; diagnostics are printed to standard output. | ||
26 | """ | ||
27 | print(("Diagnostic running on Beautiful Soup %s" % __version__)) | ||
28 | print(("Python version %s" % sys.version)) | ||
25 | 29 | ||
26 | basic_parsers = ["html.parser", "html5lib", "lxml"] | 30 | basic_parsers = ["html.parser", "html5lib", "lxml"] |
27 | for name in basic_parsers: | 31 | for name in basic_parsers: |
@@ -35,61 +39,70 @@ def diagnose(data): | |||
35 | name)) | 39 | name)) |
36 | 40 | ||
37 | if 'lxml' in basic_parsers: | 41 | if 'lxml' in basic_parsers: |
38 | basic_parsers.append(["lxml", "xml"]) | 42 | basic_parsers.append("lxml-xml") |
39 | try: | 43 | try: |
40 | from lxml import etree | 44 | from lxml import etree |
41 | print("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))) | 45 | print(("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)))) |
42 | except ImportError as e: | 46 | except ImportError as e: |
43 | print ( | 47 | print( |
44 | "lxml is not installed or couldn't be imported.") | 48 | "lxml is not installed or couldn't be imported.") |
45 | 49 | ||
46 | 50 | ||
47 | if 'html5lib' in basic_parsers: | 51 | if 'html5lib' in basic_parsers: |
48 | try: | 52 | try: |
49 | import html5lib | 53 | import html5lib |
50 | print("Found html5lib version %s" % html5lib.__version__) | 54 | print(("Found html5lib version %s" % html5lib.__version__)) |
51 | except ImportError as e: | 55 | except ImportError as e: |
52 | print ( | 56 | print( |
53 | "html5lib is not installed or couldn't be imported.") | 57 | "html5lib is not installed or couldn't be imported.") |
54 | 58 | ||
55 | if hasattr(data, 'read'): | 59 | if hasattr(data, 'read'): |
56 | data = data.read() | 60 | data = data.read() |
57 | elif os.path.exists(data): | ||
58 | print('"%s" looks like a filename. Reading data from the file.' % data) | ||
59 | data = open(data).read() | ||
60 | elif data.startswith("http:") or data.startswith("https:"): | ||
61 | print('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data) | ||
62 | print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.") | ||
63 | return | ||
64 | print() | ||
65 | 61 | ||
66 | for parser in basic_parsers: | 62 | for parser in basic_parsers: |
67 | print("Trying to parse your markup with %s" % parser) | 63 | print(("Trying to parse your markup with %s" % parser)) |
68 | success = False | 64 | success = False |
69 | try: | 65 | try: |
70 | soup = BeautifulSoup(data, parser) | 66 | soup = BeautifulSoup(data, features=parser) |
71 | success = True | 67 | success = True |
72 | except Exception as e: | 68 | except Exception as e: |
73 | print("%s could not parse the markup." % parser) | 69 | print(("%s could not parse the markup." % parser)) |
74 | traceback.print_exc() | 70 | traceback.print_exc() |
75 | if success: | 71 | if success: |
76 | print("Here's what %s did with the markup:" % parser) | 72 | print(("Here's what %s did with the markup:" % parser)) |
77 | print(soup.prettify()) | 73 | print((soup.prettify())) |
78 | 74 | ||
79 | print("-" * 80) | 75 | print(("-" * 80)) |
80 | 76 | ||
81 | def lxml_trace(data, html=True, **kwargs): | 77 | def lxml_trace(data, html=True, **kwargs): |
82 | """Print out the lxml events that occur during parsing. | 78 | """Print out the lxml events that occur during parsing. |
83 | 79 | ||
84 | This lets you see how lxml parses a document when no Beautiful | 80 | This lets you see how lxml parses a document when no Beautiful |
85 | Soup code is running. | 81 | Soup code is running. You can use this to determine whether |
82 | an lxml-specific problem is in Beautiful Soup's lxml tree builders | ||
83 | or in lxml itself. | ||
84 | |||
85 | :param data: Some markup. | ||
86 | :param html: If True, markup will be parsed with lxml's HTML parser. | ||
87 | if False, lxml's XML parser will be used. | ||
86 | """ | 88 | """ |
87 | from lxml import etree | 89 | from lxml import etree |
88 | for event, element in etree.iterparse(StringIO(data), html=html, **kwargs): | 90 | recover = kwargs.pop('recover', True) |
91 | if isinstance(data, str): | ||
92 | data = data.encode("utf8") | ||
93 | reader = BytesIO(data) | ||
94 | for event, element in etree.iterparse( | ||
95 | reader, html=html, recover=recover, **kwargs | ||
96 | ): | ||
89 | print(("%s, %4s, %s" % (event, element.tag, element.text))) | 97 | print(("%s, %4s, %s" % (event, element.tag, element.text))) |
90 | 98 | ||
91 | class AnnouncingParser(HTMLParser): | 99 | class AnnouncingParser(HTMLParser): |
92 | """Announces HTMLParser parse events, without doing anything else.""" | 100 | """Subclass of HTMLParser that announces parse events, without doing |
101 | anything else. | ||
102 | |||
103 | You can use this to get a picture of how html.parser sees a given | ||
104 | document. The easiest way to do this is to call `htmlparser_trace`. | ||
105 | """ | ||
93 | 106 | ||
94 | def _p(self, s): | 107 | def _p(self, s): |
95 | print(s) | 108 | print(s) |
@@ -126,6 +139,8 @@ def htmlparser_trace(data): | |||
126 | 139 | ||
127 | This lets you see how HTMLParser parses a document when no | 140 | This lets you see how HTMLParser parses a document when no |
128 | Beautiful Soup code is running. | 141 | Beautiful Soup code is running. |
142 | |||
143 | :param data: Some markup. | ||
129 | """ | 144 | """ |
130 | parser = AnnouncingParser() | 145 | parser = AnnouncingParser() |
131 | parser.feed(data) | 146 | parser.feed(data) |
@@ -168,9 +183,9 @@ def rdoc(num_elements=1000): | |||
168 | 183 | ||
169 | def benchmark_parsers(num_elements=100000): | 184 | def benchmark_parsers(num_elements=100000): |
170 | """Very basic head-to-head performance benchmark.""" | 185 | """Very basic head-to-head performance benchmark.""" |
171 | print("Comparative parser benchmark on Beautiful Soup %s" % __version__) | 186 | print(("Comparative parser benchmark on Beautiful Soup %s" % __version__)) |
172 | data = rdoc(num_elements) | 187 | data = rdoc(num_elements) |
173 | print("Generated a large invalid HTML document (%d bytes)." % len(data)) | 188 | print(("Generated a large invalid HTML document (%d bytes)." % len(data))) |
174 | 189 | ||
175 | for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]: | 190 | for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]: |
176 | success = False | 191 | success = False |
@@ -180,26 +195,26 @@ def benchmark_parsers(num_elements=100000): | |||
180 | b = time.time() | 195 | b = time.time() |
181 | success = True | 196 | success = True |
182 | except Exception as e: | 197 | except Exception as e: |
183 | print("%s could not parse the markup." % parser) | 198 | print(("%s could not parse the markup." % parser)) |
184 | traceback.print_exc() | 199 | traceback.print_exc() |
185 | if success: | 200 | if success: |
186 | print("BS4+%s parsed the markup in %.2fs." % (parser, b-a)) | 201 | print(("BS4+%s parsed the markup in %.2fs." % (parser, b-a))) |
187 | 202 | ||
188 | from lxml import etree | 203 | from lxml import etree |
189 | a = time.time() | 204 | a = time.time() |
190 | etree.HTML(data) | 205 | etree.HTML(data) |
191 | b = time.time() | 206 | b = time.time() |
192 | print("Raw lxml parsed the markup in %.2fs." % (b-a)) | 207 | print(("Raw lxml parsed the markup in %.2fs." % (b-a))) |
193 | 208 | ||
194 | import html5lib | 209 | import html5lib |
195 | parser = html5lib.HTMLParser() | 210 | parser = html5lib.HTMLParser() |
196 | a = time.time() | 211 | a = time.time() |
197 | parser.parse(data) | 212 | parser.parse(data) |
198 | b = time.time() | 213 | b = time.time() |
199 | print("Raw html5lib parsed the markup in %.2fs." % (b-a)) | 214 | print(("Raw html5lib parsed the markup in %.2fs." % (b-a))) |
200 | 215 | ||
201 | def profile(num_elements=100000, parser="lxml"): | 216 | def profile(num_elements=100000, parser="lxml"): |
202 | 217 | """Use Python's profiler on a randomly generated document.""" | |
203 | filehandle = tempfile.NamedTemporaryFile() | 218 | filehandle = tempfile.NamedTemporaryFile() |
204 | filename = filehandle.name | 219 | filename = filehandle.name |
205 | 220 | ||
@@ -212,5 +227,6 @@ def profile(num_elements=100000, parser="lxml"): | |||
212 | stats.sort_stats("cumulative") | 227 | stats.sort_stats("cumulative") |
213 | stats.print_stats('_html5lib|bs4', 50) | 228 | stats.print_stats('_html5lib|bs4', 50) |
214 | 229 | ||
230 | # If this file is run as a script, standard input is diagnosed. | ||
215 | if __name__ == '__main__': | 231 | if __name__ == '__main__': |
216 | diagnose(sys.stdin.read()) | 232 | diagnose(sys.stdin.read()) |
diff --git a/bitbake/lib/bs4/element.py b/bitbake/lib/bs4/element.py index 68be42d138..0aefe734b2 100644 --- a/bitbake/lib/bs4/element.py +++ b/bitbake/lib/bs4/element.py | |||
@@ -1,14 +1,27 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
1 | __license__ = "MIT" | 2 | __license__ = "MIT" |
2 | 3 | ||
3 | import collections.abc | 4 | try: |
5 | from collections.abc import Callable # Python 3.6 | ||
6 | except ImportError as e: | ||
7 | from collections import Callable | ||
4 | import re | 8 | import re |
5 | import sys | 9 | import sys |
6 | import warnings | 10 | import warnings |
7 | from bs4.dammit import EntitySubstitution | 11 | |
12 | from bs4.css import CSS | ||
13 | from bs4.formatter import ( | ||
14 | Formatter, | ||
15 | HTMLFormatter, | ||
16 | XMLFormatter, | ||
17 | ) | ||
8 | 18 | ||
9 | DEFAULT_OUTPUT_ENCODING = "utf-8" | 19 | DEFAULT_OUTPUT_ENCODING = "utf-8" |
10 | PY3K = (sys.version_info[0] > 2) | ||
11 | 20 | ||
21 | nonwhitespace_re = re.compile(r"\S+") | ||
22 | |||
23 | # NOTE: This isn't used as of 4.7.0. I'm leaving it for a little bit on | ||
24 | # the off chance someone imported it for their own use. | ||
12 | whitespace_re = re.compile(r"\s+") | 25 | whitespace_re = re.compile(r"\s+") |
13 | 26 | ||
14 | def _alias(attr): | 27 | def _alias(attr): |
@@ -23,12 +36,49 @@ def _alias(attr): | |||
23 | return alias | 36 | return alias |
24 | 37 | ||
25 | 38 | ||
39 | # These encodings are recognized by Python (so PageElement.encode | ||
40 | # could theoretically support them) but XML and HTML don't recognize | ||
41 | # them (so they should not show up in an XML or HTML document as that | ||
42 | # document's encoding). | ||
43 | # | ||
44 | # If an XML document is encoded in one of these encodings, no encoding | ||
45 | # will be mentioned in the XML declaration. If an HTML document is | ||
46 | # encoded in one of these encodings, and the HTML document has a | ||
47 | # <meta> tag that mentions an encoding, the encoding will be given as | ||
48 | # the empty string. | ||
49 | # | ||
50 | # Source: | ||
51 | # https://docs.python.org/3/library/codecs.html#python-specific-encodings | ||
52 | PYTHON_SPECIFIC_ENCODINGS = set([ | ||
53 | "idna", | ||
54 | "mbcs", | ||
55 | "oem", | ||
56 | "palmos", | ||
57 | "punycode", | ||
58 | "raw_unicode_escape", | ||
59 | "undefined", | ||
60 | "unicode_escape", | ||
61 | "raw-unicode-escape", | ||
62 | "unicode-escape", | ||
63 | "string-escape", | ||
64 | "string_escape", | ||
65 | ]) | ||
66 | |||
67 | |||
26 | class NamespacedAttribute(str): | 68 | class NamespacedAttribute(str): |
69 | """A namespaced string (e.g. 'xml:lang') that remembers the namespace | ||
70 | ('xml') and the name ('lang') that were used to create it. | ||
71 | """ | ||
27 | 72 | ||
28 | def __new__(cls, prefix, name, namespace=None): | 73 | def __new__(cls, prefix, name=None, namespace=None): |
29 | if name is None: | 74 | if not name: |
75 | # This is the default namespace. Its name "has no value" | ||
76 | # per https://www.w3.org/TR/xml-names/#defaulting | ||
77 | name = None | ||
78 | |||
79 | if not name: | ||
30 | obj = str.__new__(cls, prefix) | 80 | obj = str.__new__(cls, prefix) |
31 | elif prefix is None: | 81 | elif not prefix: |
32 | # Not really namespaced. | 82 | # Not really namespaced. |
33 | obj = str.__new__(cls, name) | 83 | obj = str.__new__(cls, name) |
34 | else: | 84 | else: |
@@ -54,6 +104,11 @@ class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution): | |||
54 | return obj | 104 | return obj |
55 | 105 | ||
56 | def encode(self, encoding): | 106 | def encode(self, encoding): |
107 | """When an HTML document is being encoded to a given encoding, the | ||
108 | value of a meta tag's 'charset' is the name of the encoding. | ||
109 | """ | ||
110 | if encoding in PYTHON_SPECIFIC_ENCODINGS: | ||
111 | return '' | ||
57 | return encoding | 112 | return encoding |
58 | 113 | ||
59 | 114 | ||
@@ -79,118 +134,44 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution): | |||
79 | return obj | 134 | return obj |
80 | 135 | ||
81 | def encode(self, encoding): | 136 | def encode(self, encoding): |
137 | if encoding in PYTHON_SPECIFIC_ENCODINGS: | ||
138 | return '' | ||
82 | def rewrite(match): | 139 | def rewrite(match): |
83 | return match.group(1) + encoding | 140 | return match.group(1) + encoding |
84 | return self.CHARSET_RE.sub(rewrite, self.original_value) | 141 | return self.CHARSET_RE.sub(rewrite, self.original_value) |
85 | 142 | ||
86 | class HTMLAwareEntitySubstitution(EntitySubstitution): | ||
87 | |||
88 | """Entity substitution rules that are aware of some HTML quirks. | ||
89 | 143 | ||
90 | Specifically, the contents of <script> and <style> tags should not | 144 | class PageElement(object): |
91 | undergo entity substitution. | 145 | """Contains the navigational information for some part of the page: |
146 | that is, its current location in the parse tree. | ||
92 | 147 | ||
93 | Incoming NavigableString objects are checked to see if they're the | 148 | NavigableString, Tag, etc. are all subclasses of PageElement. |
94 | direct children of a <script> or <style> tag. | ||
95 | """ | 149 | """ |
96 | 150 | ||
97 | cdata_containing_tags = set(["script", "style"]) | 151 | # In general, we can't tell just by looking at an element whether |
152 | # it's contained in an XML document or an HTML document. But for | ||
153 | # Tags (q.v.) we can store this information at parse time. | ||
154 | known_xml = None | ||
98 | 155 | ||
99 | preformatted_tags = set(["pre"]) | 156 | def setup(self, parent=None, previous_element=None, next_element=None, |
100 | 157 | previous_sibling=None, next_sibling=None): | |
101 | @classmethod | 158 | """Sets up the initial relations between this element and |
102 | def _substitute_if_appropriate(cls, ns, f): | 159 | other elements. |
103 | if (isinstance(ns, NavigableString) | ||
104 | and ns.parent is not None | ||
105 | and ns.parent.name in cls.cdata_containing_tags): | ||
106 | # Do nothing. | ||
107 | return ns | ||
108 | # Substitute. | ||
109 | return f(ns) | ||
110 | 160 | ||
111 | @classmethod | 161 | :param parent: The parent of this element. |
112 | def substitute_html(cls, ns): | ||
113 | return cls._substitute_if_appropriate( | ||
114 | ns, EntitySubstitution.substitute_html) | ||
115 | 162 | ||
116 | @classmethod | 163 | :param previous_element: The element parsed immediately before |
117 | def substitute_xml(cls, ns): | 164 | this one. |
118 | return cls._substitute_if_appropriate( | ||
119 | ns, EntitySubstitution.substitute_xml) | ||
120 | 165 | ||
121 | class PageElement(object): | 166 | :param next_element: The element parsed immediately before |
122 | """Contains the navigational information for some part of the page | 167 | this one. |
123 | (either a tag or a piece of text)""" | ||
124 | |||
125 | # There are five possible values for the "formatter" argument passed in | ||
126 | # to methods like encode() and prettify(): | ||
127 | # | ||
128 | # "html" - All Unicode characters with corresponding HTML entities | ||
129 | # are converted to those entities on output. | ||
130 | # "minimal" - Bare ampersands and angle brackets are converted to | ||
131 | # XML entities: & < > | ||
132 | # None - The null formatter. Unicode characters are never | ||
133 | # converted to entities. This is not recommended, but it's | ||
134 | # faster than "minimal". | ||
135 | # A function - This function will be called on every string that | ||
136 | # needs to undergo entity substitution. | ||
137 | # | ||
138 | |||
139 | # In an HTML document, the default "html" and "minimal" functions | ||
140 | # will leave the contents of <script> and <style> tags alone. For | ||
141 | # an XML document, all tags will be given the same treatment. | ||
142 | |||
143 | HTML_FORMATTERS = { | ||
144 | "html" : HTMLAwareEntitySubstitution.substitute_html, | ||
145 | "minimal" : HTMLAwareEntitySubstitution.substitute_xml, | ||
146 | None : None | ||
147 | } | ||
148 | |||
149 | XML_FORMATTERS = { | ||
150 | "html" : EntitySubstitution.substitute_html, | ||
151 | "minimal" : EntitySubstitution.substitute_xml, | ||
152 | None : None | ||
153 | } | ||
154 | |||
155 | def format_string(self, s, formatter='minimal'): | ||
156 | """Format the given string using the given formatter.""" | ||
157 | if not isinstance(formatter, collections.abc.Callable): | ||
158 | formatter = self._formatter_for_name(formatter) | ||
159 | if formatter is None: | ||
160 | output = s | ||
161 | else: | ||
162 | output = formatter(s) | ||
163 | return output | ||
164 | 168 | ||
165 | @property | 169 | :param previous_sibling: The most recently encountered element |
166 | def _is_xml(self): | 170 | on the same level of the parse tree as this one. |
167 | """Is this element part of an XML tree or an HTML tree? | ||
168 | 171 | ||
169 | This is used when mapping a formatter name ("minimal") to an | 172 | :param previous_sibling: The next element to be encountered |
170 | appropriate function (one that performs entity-substitution on | 173 | on the same level of the parse tree as this one. |
171 | the contents of <script> and <style> tags, or not). It's | ||
172 | inefficient, but it should be called very rarely. | ||
173 | """ | 174 | """ |
174 | if self.parent is None: | ||
175 | # This is the top-level object. It should have .is_xml set | ||
176 | # from tree creation. If not, take a guess--BS is usually | ||
177 | # used on HTML markup. | ||
178 | return getattr(self, 'is_xml', False) | ||
179 | return self.parent._is_xml | ||
180 | |||
181 | def _formatter_for_name(self, name): | ||
182 | "Look up a formatter function based on its name and the tree." | ||
183 | if self._is_xml: | ||
184 | return self.XML_FORMATTERS.get( | ||
185 | name, EntitySubstitution.substitute_xml) | ||
186 | else: | ||
187 | return self.HTML_FORMATTERS.get( | ||
188 | name, HTMLAwareEntitySubstitution.substitute_xml) | ||
189 | |||
190 | def setup(self, parent=None, previous_element=None, next_element=None, | ||
191 | previous_sibling=None, next_sibling=None): | ||
192 | """Sets up the initial relations between this element and | ||
193 | other elements.""" | ||
194 | self.parent = parent | 175 | self.parent = parent |
195 | 176 | ||
196 | self.previous_element = previous_element | 177 | self.previous_element = previous_element |
@@ -198,48 +179,156 @@ class PageElement(object): | |||
198 | self.previous_element.next_element = self | 179 | self.previous_element.next_element = self |
199 | 180 | ||
200 | self.next_element = next_element | 181 | self.next_element = next_element |
201 | if self.next_element: | 182 | if self.next_element is not None: |
202 | self.next_element.previous_element = self | 183 | self.next_element.previous_element = self |
203 | 184 | ||
204 | self.next_sibling = next_sibling | 185 | self.next_sibling = next_sibling |
205 | if self.next_sibling: | 186 | if self.next_sibling is not None: |
206 | self.next_sibling.previous_sibling = self | 187 | self.next_sibling.previous_sibling = self |
207 | 188 | ||
208 | if (not previous_sibling | 189 | if (previous_sibling is None |
209 | and self.parent is not None and self.parent.contents): | 190 | and self.parent is not None and self.parent.contents): |
210 | previous_sibling = self.parent.contents[-1] | 191 | previous_sibling = self.parent.contents[-1] |
211 | 192 | ||
212 | self.previous_sibling = previous_sibling | 193 | self.previous_sibling = previous_sibling |
213 | if previous_sibling: | 194 | if previous_sibling is not None: |
214 | self.previous_sibling.next_sibling = self | 195 | self.previous_sibling.next_sibling = self |
215 | 196 | ||
197 | def format_string(self, s, formatter): | ||
198 | """Format the given string using the given formatter. | ||
199 | |||
200 | :param s: A string. | ||
201 | :param formatter: A Formatter object, or a string naming one of the standard formatters. | ||
202 | """ | ||
203 | if formatter is None: | ||
204 | return s | ||
205 | if not isinstance(formatter, Formatter): | ||
206 | formatter = self.formatter_for_name(formatter) | ||
207 | output = formatter.substitute(s) | ||
208 | return output | ||
209 | |||
210 | def formatter_for_name(self, formatter): | ||
211 | """Look up or create a Formatter for the given identifier, | ||
212 | if necessary. | ||
213 | |||
214 | :param formatter: Can be a Formatter object (used as-is), a | ||
215 | function (used as the entity substitution hook for an | ||
216 | XMLFormatter or HTMLFormatter), or a string (used to look | ||
217 | up an XMLFormatter or HTMLFormatter in the appropriate | ||
218 | registry. | ||
219 | """ | ||
220 | if isinstance(formatter, Formatter): | ||
221 | return formatter | ||
222 | if self._is_xml: | ||
223 | c = XMLFormatter | ||
224 | else: | ||
225 | c = HTMLFormatter | ||
226 | if isinstance(formatter, Callable): | ||
227 | return c(entity_substitution=formatter) | ||
228 | return c.REGISTRY[formatter] | ||
229 | |||
230 | @property | ||
231 | def _is_xml(self): | ||
232 | """Is this element part of an XML tree or an HTML tree? | ||
233 | |||
234 | This is used in formatter_for_name, when deciding whether an | ||
235 | XMLFormatter or HTMLFormatter is more appropriate. It can be | ||
236 | inefficient, but it should be called very rarely. | ||
237 | """ | ||
238 | if self.known_xml is not None: | ||
239 | # Most of the time we will have determined this when the | ||
240 | # document is parsed. | ||
241 | return self.known_xml | ||
242 | |||
243 | # Otherwise, it's likely that this element was created by | ||
244 | # direct invocation of the constructor from within the user's | ||
245 | # Python code. | ||
246 | if self.parent is None: | ||
247 | # This is the top-level object. It should have .known_xml set | ||
248 | # from tree creation. If not, take a guess--BS is usually | ||
249 | # used on HTML markup. | ||
250 | return getattr(self, 'is_xml', False) | ||
251 | return self.parent._is_xml | ||
252 | |||
216 | nextSibling = _alias("next_sibling") # BS3 | 253 | nextSibling = _alias("next_sibling") # BS3 |
217 | previousSibling = _alias("previous_sibling") # BS3 | 254 | previousSibling = _alias("previous_sibling") # BS3 |
218 | 255 | ||
219 | def replace_with(self, replace_with): | 256 | default = object() |
220 | if not self.parent: | 257 | def _all_strings(self, strip=False, types=default): |
258 | """Yield all strings of certain classes, possibly stripping them. | ||
259 | |||
260 | This is implemented differently in Tag and NavigableString. | ||
261 | """ | ||
262 | raise NotImplementedError() | ||
263 | |||
264 | @property | ||
265 | def stripped_strings(self): | ||
266 | """Yield all strings in this PageElement, stripping them first. | ||
267 | |||
268 | :yield: A sequence of stripped strings. | ||
269 | """ | ||
270 | for string in self._all_strings(True): | ||
271 | yield string | ||
272 | |||
273 | def get_text(self, separator="", strip=False, | ||
274 | types=default): | ||
275 | """Get all child strings of this PageElement, concatenated using the | ||
276 | given separator. | ||
277 | |||
278 | :param separator: Strings will be concatenated using this separator. | ||
279 | |||
280 | :param strip: If True, strings will be stripped before being | ||
281 | concatenated. | ||
282 | |||
283 | :param types: A tuple of NavigableString subclasses. Any | ||
284 | strings of a subclass not found in this list will be | ||
285 | ignored. Although there are exceptions, the default | ||
286 | behavior in most cases is to consider only NavigableString | ||
287 | and CData objects. That means no comments, processing | ||
288 | instructions, etc. | ||
289 | |||
290 | :return: A string. | ||
291 | """ | ||
292 | return separator.join([s for s in self._all_strings( | ||
293 | strip, types=types)]) | ||
294 | getText = get_text | ||
295 | text = property(get_text) | ||
296 | |||
297 | def replace_with(self, *args): | ||
298 | """Replace this PageElement with one or more PageElements, keeping the | ||
299 | rest of the tree the same. | ||
300 | |||
301 | :param args: One or more PageElements. | ||
302 | :return: `self`, no longer part of the tree. | ||
303 | """ | ||
304 | if self.parent is None: | ||
221 | raise ValueError( | 305 | raise ValueError( |
222 | "Cannot replace one element with another when the" | 306 | "Cannot replace one element with another when the " |
223 | "element to be replaced is not part of a tree.") | 307 | "element to be replaced is not part of a tree.") |
224 | if replace_with is self: | 308 | if len(args) == 1 and args[0] is self: |
225 | return | 309 | return |
226 | if replace_with is self.parent: | 310 | if any(x is self.parent for x in args): |
227 | raise ValueError("Cannot replace a Tag with its parent.") | 311 | raise ValueError("Cannot replace a Tag with its parent.") |
228 | old_parent = self.parent | 312 | old_parent = self.parent |
229 | my_index = self.parent.index(self) | 313 | my_index = self.parent.index(self) |
230 | self.extract() | 314 | self.extract(_self_index=my_index) |
231 | old_parent.insert(my_index, replace_with) | 315 | for idx, replace_with in enumerate(args, start=my_index): |
316 | old_parent.insert(idx, replace_with) | ||
232 | return self | 317 | return self |
233 | replaceWith = replace_with # BS3 | 318 | replaceWith = replace_with # BS3 |
234 | 319 | ||
235 | def unwrap(self): | 320 | def unwrap(self): |
321 | """Replace this PageElement with its contents. | ||
322 | |||
323 | :return: `self`, no longer part of the tree. | ||
324 | """ | ||
236 | my_parent = self.parent | 325 | my_parent = self.parent |
237 | if not self.parent: | 326 | if self.parent is None: |
238 | raise ValueError( | 327 | raise ValueError( |
239 | "Cannot replace an element with its contents when that" | 328 | "Cannot replace an element with its contents when that" |
240 | "element is not part of a tree.") | 329 | "element is not part of a tree.") |
241 | my_index = self.parent.index(self) | 330 | my_index = self.parent.index(self) |
242 | self.extract() | 331 | self.extract(_self_index=my_index) |
243 | for child in reversed(self.contents[:]): | 332 | for child in reversed(self.contents[:]): |
244 | my_parent.insert(my_index, child) | 333 | my_parent.insert(my_index, child) |
245 | return self | 334 | return self |
@@ -247,14 +336,29 @@ class PageElement(object): | |||
247 | replaceWithChildren = unwrap # BS3 | 336 | replaceWithChildren = unwrap # BS3 |
248 | 337 | ||
249 | def wrap(self, wrap_inside): | 338 | def wrap(self, wrap_inside): |
339 | """Wrap this PageElement inside another one. | ||
340 | |||
341 | :param wrap_inside: A PageElement. | ||
342 | :return: `wrap_inside`, occupying the position in the tree that used | ||
343 | to be occupied by `self`, and with `self` inside it. | ||
344 | """ | ||
250 | me = self.replace_with(wrap_inside) | 345 | me = self.replace_with(wrap_inside) |
251 | wrap_inside.append(me) | 346 | wrap_inside.append(me) |
252 | return wrap_inside | 347 | return wrap_inside |
253 | 348 | ||
254 | def extract(self): | 349 | def extract(self, _self_index=None): |
255 | """Destructively rips this element out of the tree.""" | 350 | """Destructively rips this element out of the tree. |
351 | |||
352 | :param _self_index: The location of this element in its parent's | ||
353 | .contents, if known. Passing this in allows for a performance | ||
354 | optimization. | ||
355 | |||
356 | :return: `self`, no longer part of the tree. | ||
357 | """ | ||
256 | if self.parent is not None: | 358 | if self.parent is not None: |
257 | del self.parent.contents[self.parent.index(self)] | 359 | if _self_index is None: |
360 | _self_index = self.parent.index(self) | ||
361 | del self.parent.contents[_self_index] | ||
258 | 362 | ||
259 | #Find the two elements that would be next to each other if | 363 | #Find the two elements that would be next to each other if |
260 | #this element (and any children) hadn't been parsed. Connect | 364 | #this element (and any children) hadn't been parsed. Connect |
@@ -281,8 +385,13 @@ class PageElement(object): | |||
281 | return self | 385 | return self |
282 | 386 | ||
283 | def _last_descendant(self, is_initialized=True, accept_self=True): | 387 | def _last_descendant(self, is_initialized=True, accept_self=True): |
284 | "Finds the last element beneath this object to be parsed." | 388 | """Finds the last element beneath this object to be parsed. |
285 | if is_initialized and self.next_sibling: | 389 | |
390 | :param is_initialized: Has `setup` been called on this PageElement | ||
391 | yet? | ||
392 | :param accept_self: Is `self` an acceptable answer to the question? | ||
393 | """ | ||
394 | if is_initialized and self.next_sibling is not None: | ||
286 | last_child = self.next_sibling.previous_element | 395 | last_child = self.next_sibling.previous_element |
287 | else: | 396 | else: |
288 | last_child = self | 397 | last_child = self |
@@ -295,6 +404,14 @@ class PageElement(object): | |||
295 | _lastRecursiveChild = _last_descendant | 404 | _lastRecursiveChild = _last_descendant |
296 | 405 | ||
297 | def insert(self, position, new_child): | 406 | def insert(self, position, new_child): |
407 | """Insert a new PageElement in the list of this PageElement's children. | ||
408 | |||
409 | This works the same way as `list.insert`. | ||
410 | |||
411 | :param position: The numeric position that should be occupied | ||
412 | in `self.children` by the new PageElement. | ||
413 | :param new_child: A PageElement. | ||
414 | """ | ||
298 | if new_child is None: | 415 | if new_child is None: |
299 | raise ValueError("Cannot insert None into a tag.") | 416 | raise ValueError("Cannot insert None into a tag.") |
300 | if new_child is self: | 417 | if new_child is self: |
@@ -303,6 +420,14 @@ class PageElement(object): | |||
303 | and not isinstance(new_child, NavigableString)): | 420 | and not isinstance(new_child, NavigableString)): |
304 | new_child = NavigableString(new_child) | 421 | new_child = NavigableString(new_child) |
305 | 422 | ||
423 | from bs4 import BeautifulSoup | ||
424 | if isinstance(new_child, BeautifulSoup): | ||
425 | # We don't want to end up with a situation where one BeautifulSoup | ||
426 | # object contains another. Insert the children one at a time. | ||
427 | for subchild in list(new_child.contents): | ||
428 | self.insert(position, subchild) | ||
429 | position += 1 | ||
430 | return | ||
306 | position = min(position, len(self.contents)) | 431 | position = min(position, len(self.contents)) |
307 | if hasattr(new_child, 'parent') and new_child.parent is not None: | 432 | if hasattr(new_child, 'parent') and new_child.parent is not None: |
308 | # We're 'inserting' an element that's already one | 433 | # We're 'inserting' an element that's already one |
@@ -361,160 +486,326 @@ class PageElement(object): | |||
361 | self.contents.insert(position, new_child) | 486 | self.contents.insert(position, new_child) |
362 | 487 | ||
363 | def append(self, tag): | 488 | def append(self, tag): |
364 | """Appends the given tag to the contents of this tag.""" | 489 | """Appends the given PageElement to the contents of this one. |
490 | |||
491 | :param tag: A PageElement. | ||
492 | """ | ||
365 | self.insert(len(self.contents), tag) | 493 | self.insert(len(self.contents), tag) |
366 | 494 | ||
367 | def insert_before(self, predecessor): | 495 | def extend(self, tags): |
368 | """Makes the given element the immediate predecessor of this one. | 496 | """Appends the given PageElements to this one's contents. |
369 | 497 | ||
370 | The two elements will have the same parent, and the given element | 498 | :param tags: A list of PageElements. If a single Tag is |
499 | provided instead, this PageElement's contents will be extended | ||
500 | with that Tag's contents. | ||
501 | """ | ||
502 | if isinstance(tags, Tag): | ||
503 | tags = tags.contents | ||
504 | if isinstance(tags, list): | ||
505 | # Moving items around the tree may change their position in | ||
506 | # the original list. Make a list that won't change. | ||
507 | tags = list(tags) | ||
508 | for tag in tags: | ||
509 | self.append(tag) | ||
510 | |||
511 | def insert_before(self, *args): | ||
512 | """Makes the given element(s) the immediate predecessor of this one. | ||
513 | |||
514 | All the elements will have the same parent, and the given elements | ||
371 | will be immediately before this one. | 515 | will be immediately before this one. |
516 | |||
517 | :param args: One or more PageElements. | ||
372 | """ | 518 | """ |
373 | if self is predecessor: | ||
374 | raise ValueError("Can't insert an element before itself.") | ||
375 | parent = self.parent | 519 | parent = self.parent |
376 | if parent is None: | 520 | if parent is None: |
377 | raise ValueError( | 521 | raise ValueError( |
378 | "Element has no parent, so 'before' has no meaning.") | 522 | "Element has no parent, so 'before' has no meaning.") |
379 | # Extract first so that the index won't be screwed up if they | 523 | if any(x is self for x in args): |
380 | # are siblings. | 524 | raise ValueError("Can't insert an element before itself.") |
381 | if isinstance(predecessor, PageElement): | 525 | for predecessor in args: |
382 | predecessor.extract() | 526 | # Extract first so that the index won't be screwed up if they |
383 | index = parent.index(self) | 527 | # are siblings. |
384 | parent.insert(index, predecessor) | 528 | if isinstance(predecessor, PageElement): |
385 | 529 | predecessor.extract() | |
386 | def insert_after(self, successor): | 530 | index = parent.index(self) |
387 | """Makes the given element the immediate successor of this one. | 531 | parent.insert(index, predecessor) |
388 | 532 | ||
389 | The two elements will have the same parent, and the given element | 533 | def insert_after(self, *args): |
534 | """Makes the given element(s) the immediate successor of this one. | ||
535 | |||
536 | The elements will have the same parent, and the given elements | ||
390 | will be immediately after this one. | 537 | will be immediately after this one. |
538 | |||
539 | :param args: One or more PageElements. | ||
391 | """ | 540 | """ |
392 | if self is successor: | 541 | # Do all error checking before modifying the tree. |
393 | raise ValueError("Can't insert an element after itself.") | ||
394 | parent = self.parent | 542 | parent = self.parent |
395 | if parent is None: | 543 | if parent is None: |
396 | raise ValueError( | 544 | raise ValueError( |
397 | "Element has no parent, so 'after' has no meaning.") | 545 | "Element has no parent, so 'after' has no meaning.") |
398 | # Extract first so that the index won't be screwed up if they | 546 | if any(x is self for x in args): |
399 | # are siblings. | 547 | raise ValueError("Can't insert an element after itself.") |
400 | if isinstance(successor, PageElement): | 548 | |
401 | successor.extract() | 549 | offset = 0 |
402 | index = parent.index(self) | 550 | for successor in args: |
403 | parent.insert(index+1, successor) | 551 | # Extract first so that the index won't be screwed up if they |
404 | 552 | # are siblings. | |
405 | def find_next(self, name=None, attrs={}, text=None, **kwargs): | 553 | if isinstance(successor, PageElement): |
406 | """Returns the first item that matches the given criteria and | 554 | successor.extract() |
407 | appears after this Tag in the document.""" | 555 | index = parent.index(self) |
408 | return self._find_one(self.find_all_next, name, attrs, text, **kwargs) | 556 | parent.insert(index+1+offset, successor) |
557 | offset += 1 | ||
558 | |||
559 | def find_next(self, name=None, attrs={}, string=None, **kwargs): | ||
560 | """Find the first PageElement that matches the given criteria and | ||
561 | appears later in the document than this PageElement. | ||
562 | |||
563 | All find_* methods take a common set of arguments. See the online | ||
564 | documentation for detailed explanations. | ||
565 | |||
566 | :param name: A filter on tag name. | ||
567 | :param attrs: A dictionary of filters on attribute values. | ||
568 | :param string: A filter for a NavigableString with specific text. | ||
569 | :kwargs: A dictionary of filters on attribute values. | ||
570 | :return: A PageElement. | ||
571 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
572 | """ | ||
573 | return self._find_one(self.find_all_next, name, attrs, string, **kwargs) | ||
409 | findNext = find_next # BS3 | 574 | findNext = find_next # BS3 |
410 | 575 | ||
411 | def find_all_next(self, name=None, attrs={}, text=None, limit=None, | 576 | def find_all_next(self, name=None, attrs={}, string=None, limit=None, |
412 | **kwargs): | 577 | **kwargs): |
413 | """Returns all items that match the given criteria and appear | 578 | """Find all PageElements that match the given criteria and appear |
414 | after this Tag in the document.""" | 579 | later in the document than this PageElement. |
415 | return self._find_all(name, attrs, text, limit, self.next_elements, | 580 | |
416 | **kwargs) | 581 | All find_* methods take a common set of arguments. See the online |
582 | documentation for detailed explanations. | ||
583 | |||
584 | :param name: A filter on tag name. | ||
585 | :param attrs: A dictionary of filters on attribute values. | ||
586 | :param string: A filter for a NavigableString with specific text. | ||
587 | :param limit: Stop looking after finding this many results. | ||
588 | :kwargs: A dictionary of filters on attribute values. | ||
589 | :return: A ResultSet containing PageElements. | ||
590 | """ | ||
591 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
592 | return self._find_all(name, attrs, string, limit, self.next_elements, | ||
593 | _stacklevel=_stacklevel+1, **kwargs) | ||
417 | findAllNext = find_all_next # BS3 | 594 | findAllNext = find_all_next # BS3 |
418 | 595 | ||
419 | def find_next_sibling(self, name=None, attrs={}, text=None, **kwargs): | 596 | def find_next_sibling(self, name=None, attrs={}, string=None, **kwargs): |
420 | """Returns the closest sibling to this Tag that matches the | 597 | """Find the closest sibling to this PageElement that matches the |
421 | given criteria and appears after this Tag in the document.""" | 598 | given criteria and appears later in the document. |
422 | return self._find_one(self.find_next_siblings, name, attrs, text, | 599 | |
600 | All find_* methods take a common set of arguments. See the | ||
601 | online documentation for detailed explanations. | ||
602 | |||
603 | :param name: A filter on tag name. | ||
604 | :param attrs: A dictionary of filters on attribute values. | ||
605 | :param string: A filter for a NavigableString with specific text. | ||
606 | :kwargs: A dictionary of filters on attribute values. | ||
607 | :return: A PageElement. | ||
608 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
609 | """ | ||
610 | return self._find_one(self.find_next_siblings, name, attrs, string, | ||
423 | **kwargs) | 611 | **kwargs) |
424 | findNextSibling = find_next_sibling # BS3 | 612 | findNextSibling = find_next_sibling # BS3 |
425 | 613 | ||
426 | def find_next_siblings(self, name=None, attrs={}, text=None, limit=None, | 614 | def find_next_siblings(self, name=None, attrs={}, string=None, limit=None, |
427 | **kwargs): | 615 | **kwargs): |
428 | """Returns the siblings of this Tag that match the given | 616 | """Find all siblings of this PageElement that match the given criteria |
429 | criteria and appear after this Tag in the document.""" | 617 | and appear later in the document. |
430 | return self._find_all(name, attrs, text, limit, | 618 | |
431 | self.next_siblings, **kwargs) | 619 | All find_* methods take a common set of arguments. See the online |
620 | documentation for detailed explanations. | ||
621 | |||
622 | :param name: A filter on tag name. | ||
623 | :param attrs: A dictionary of filters on attribute values. | ||
624 | :param string: A filter for a NavigableString with specific text. | ||
625 | :param limit: Stop looking after finding this many results. | ||
626 | :kwargs: A dictionary of filters on attribute values. | ||
627 | :return: A ResultSet of PageElements. | ||
628 | :rtype: bs4.element.ResultSet | ||
629 | """ | ||
630 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
631 | return self._find_all( | ||
632 | name, attrs, string, limit, | ||
633 | self.next_siblings, _stacklevel=_stacklevel+1, **kwargs | ||
634 | ) | ||
432 | findNextSiblings = find_next_siblings # BS3 | 635 | findNextSiblings = find_next_siblings # BS3 |
433 | fetchNextSiblings = find_next_siblings # BS2 | 636 | fetchNextSiblings = find_next_siblings # BS2 |
434 | 637 | ||
435 | def find_previous(self, name=None, attrs={}, text=None, **kwargs): | 638 | def find_previous(self, name=None, attrs={}, string=None, **kwargs): |
436 | """Returns the first item that matches the given criteria and | 639 | """Look backwards in the document from this PageElement and find the |
437 | appears before this Tag in the document.""" | 640 | first PageElement that matches the given criteria. |
641 | |||
642 | All find_* methods take a common set of arguments. See the online | ||
643 | documentation for detailed explanations. | ||
644 | |||
645 | :param name: A filter on tag name. | ||
646 | :param attrs: A dictionary of filters on attribute values. | ||
647 | :param string: A filter for a NavigableString with specific text. | ||
648 | :kwargs: A dictionary of filters on attribute values. | ||
649 | :return: A PageElement. | ||
650 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
651 | """ | ||
438 | return self._find_one( | 652 | return self._find_one( |
439 | self.find_all_previous, name, attrs, text, **kwargs) | 653 | self.find_all_previous, name, attrs, string, **kwargs) |
440 | findPrevious = find_previous # BS3 | 654 | findPrevious = find_previous # BS3 |
441 | 655 | ||
442 | def find_all_previous(self, name=None, attrs={}, text=None, limit=None, | 656 | def find_all_previous(self, name=None, attrs={}, string=None, limit=None, |
443 | **kwargs): | 657 | **kwargs): |
444 | """Returns all items that match the given criteria and appear | 658 | """Look backwards in the document from this PageElement and find all |
445 | before this Tag in the document.""" | 659 | PageElements that match the given criteria. |
446 | return self._find_all(name, attrs, text, limit, self.previous_elements, | 660 | |
447 | **kwargs) | 661 | All find_* methods take a common set of arguments. See the online |
662 | documentation for detailed explanations. | ||
663 | |||
664 | :param name: A filter on tag name. | ||
665 | :param attrs: A dictionary of filters on attribute values. | ||
666 | :param string: A filter for a NavigableString with specific text. | ||
667 | :param limit: Stop looking after finding this many results. | ||
668 | :kwargs: A dictionary of filters on attribute values. | ||
669 | :return: A ResultSet of PageElements. | ||
670 | :rtype: bs4.element.ResultSet | ||
671 | """ | ||
672 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
673 | return self._find_all( | ||
674 | name, attrs, string, limit, self.previous_elements, | ||
675 | _stacklevel=_stacklevel+1, **kwargs | ||
676 | ) | ||
448 | findAllPrevious = find_all_previous # BS3 | 677 | findAllPrevious = find_all_previous # BS3 |
449 | fetchPrevious = find_all_previous # BS2 | 678 | fetchPrevious = find_all_previous # BS2 |
450 | 679 | ||
451 | def find_previous_sibling(self, name=None, attrs={}, text=None, **kwargs): | 680 | def find_previous_sibling(self, name=None, attrs={}, string=None, **kwargs): |
452 | """Returns the closest sibling to this Tag that matches the | 681 | """Returns the closest sibling to this PageElement that matches the |
453 | given criteria and appears before this Tag in the document.""" | 682 | given criteria and appears earlier in the document. |
454 | return self._find_one(self.find_previous_siblings, name, attrs, text, | 683 | |
684 | All find_* methods take a common set of arguments. See the online | ||
685 | documentation for detailed explanations. | ||
686 | |||
687 | :param name: A filter on tag name. | ||
688 | :param attrs: A dictionary of filters on attribute values. | ||
689 | :param string: A filter for a NavigableString with specific text. | ||
690 | :kwargs: A dictionary of filters on attribute values. | ||
691 | :return: A PageElement. | ||
692 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
693 | """ | ||
694 | return self._find_one(self.find_previous_siblings, name, attrs, string, | ||
455 | **kwargs) | 695 | **kwargs) |
456 | findPreviousSibling = find_previous_sibling # BS3 | 696 | findPreviousSibling = find_previous_sibling # BS3 |
457 | 697 | ||
458 | def find_previous_siblings(self, name=None, attrs={}, text=None, | 698 | def find_previous_siblings(self, name=None, attrs={}, string=None, |
459 | limit=None, **kwargs): | 699 | limit=None, **kwargs): |
460 | """Returns the siblings of this Tag that match the given | 700 | """Returns all siblings to this PageElement that match the |
461 | criteria and appear before this Tag in the document.""" | 701 | given criteria and appear earlier in the document. |
462 | return self._find_all(name, attrs, text, limit, | 702 | |
463 | self.previous_siblings, **kwargs) | 703 | All find_* methods take a common set of arguments. See the online |
704 | documentation for detailed explanations. | ||
705 | |||
706 | :param name: A filter on tag name. | ||
707 | :param attrs: A dictionary of filters on attribute values. | ||
708 | :param string: A filter for a NavigableString with specific text. | ||
709 | :param limit: Stop looking after finding this many results. | ||
710 | :kwargs: A dictionary of filters on attribute values. | ||
711 | :return: A ResultSet of PageElements. | ||
712 | :rtype: bs4.element.ResultSet | ||
713 | """ | ||
714 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
715 | return self._find_all( | ||
716 | name, attrs, string, limit, | ||
717 | self.previous_siblings, _stacklevel=_stacklevel+1, **kwargs | ||
718 | ) | ||
464 | findPreviousSiblings = find_previous_siblings # BS3 | 719 | findPreviousSiblings = find_previous_siblings # BS3 |
465 | fetchPreviousSiblings = find_previous_siblings # BS2 | 720 | fetchPreviousSiblings = find_previous_siblings # BS2 |
466 | 721 | ||
467 | def find_parent(self, name=None, attrs={}, **kwargs): | 722 | def find_parent(self, name=None, attrs={}, **kwargs): |
468 | """Returns the closest parent of this Tag that matches the given | 723 | """Find the closest parent of this PageElement that matches the given |
469 | criteria.""" | 724 | criteria. |
725 | |||
726 | All find_* methods take a common set of arguments. See the online | ||
727 | documentation for detailed explanations. | ||
728 | |||
729 | :param name: A filter on tag name. | ||
730 | :param attrs: A dictionary of filters on attribute values. | ||
731 | :kwargs: A dictionary of filters on attribute values. | ||
732 | |||
733 | :return: A PageElement. | ||
734 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
735 | """ | ||
470 | # NOTE: We can't use _find_one because findParents takes a different | 736 | # NOTE: We can't use _find_one because findParents takes a different |
471 | # set of arguments. | 737 | # set of arguments. |
472 | r = None | 738 | r = None |
473 | l = self.find_parents(name, attrs, 1, **kwargs) | 739 | l = self.find_parents(name, attrs, 1, _stacklevel=3, **kwargs) |
474 | if l: | 740 | if l: |
475 | r = l[0] | 741 | r = l[0] |
476 | return r | 742 | return r |
477 | findParent = find_parent # BS3 | 743 | findParent = find_parent # BS3 |
478 | 744 | ||
479 | def find_parents(self, name=None, attrs={}, limit=None, **kwargs): | 745 | def find_parents(self, name=None, attrs={}, limit=None, **kwargs): |
480 | """Returns the parents of this Tag that match the given | 746 | """Find all parents of this PageElement that match the given criteria. |
481 | criteria.""" | 747 | |
748 | All find_* methods take a common set of arguments. See the online | ||
749 | documentation for detailed explanations. | ||
482 | 750 | ||
751 | :param name: A filter on tag name. | ||
752 | :param attrs: A dictionary of filters on attribute values. | ||
753 | :param limit: Stop looking after finding this many results. | ||
754 | :kwargs: A dictionary of filters on attribute values. | ||
755 | |||
756 | :return: A PageElement. | ||
757 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
758 | """ | ||
759 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
483 | return self._find_all(name, attrs, None, limit, self.parents, | 760 | return self._find_all(name, attrs, None, limit, self.parents, |
484 | **kwargs) | 761 | _stacklevel=_stacklevel+1, **kwargs) |
485 | findParents = find_parents # BS3 | 762 | findParents = find_parents # BS3 |
486 | fetchParents = find_parents # BS2 | 763 | fetchParents = find_parents # BS2 |
487 | 764 | ||
488 | @property | 765 | @property |
489 | def next(self): | 766 | def next(self): |
767 | """The PageElement, if any, that was parsed just after this one. | ||
768 | |||
769 | :return: A PageElement. | ||
770 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
771 | """ | ||
490 | return self.next_element | 772 | return self.next_element |
491 | 773 | ||
492 | @property | 774 | @property |
493 | def previous(self): | 775 | def previous(self): |
776 | """The PageElement, if any, that was parsed just before this one. | ||
777 | |||
778 | :return: A PageElement. | ||
779 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
780 | """ | ||
494 | return self.previous_element | 781 | return self.previous_element |
495 | 782 | ||
496 | #These methods do the real heavy lifting. | 783 | #These methods do the real heavy lifting. |
497 | 784 | ||
498 | def _find_one(self, method, name, attrs, text, **kwargs): | 785 | def _find_one(self, method, name, attrs, string, **kwargs): |
499 | r = None | 786 | r = None |
500 | l = method(name, attrs, text, 1, **kwargs) | 787 | l = method(name, attrs, string, 1, _stacklevel=4, **kwargs) |
501 | if l: | 788 | if l: |
502 | r = l[0] | 789 | r = l[0] |
503 | return r | 790 | return r |
504 | 791 | ||
505 | def _find_all(self, name, attrs, text, limit, generator, **kwargs): | 792 | def _find_all(self, name, attrs, string, limit, generator, **kwargs): |
506 | "Iterates over a generator looking for things that match." | 793 | "Iterates over a generator looking for things that match." |
794 | _stacklevel = kwargs.pop('_stacklevel', 3) | ||
507 | 795 | ||
508 | if text is None and 'string' in kwargs: | 796 | if string is None and 'text' in kwargs: |
509 | text = kwargs['string'] | 797 | string = kwargs.pop('text') |
510 | del kwargs['string'] | 798 | warnings.warn( |
799 | "The 'text' argument to find()-type methods is deprecated. Use 'string' instead.", | ||
800 | DeprecationWarning, stacklevel=_stacklevel | ||
801 | ) | ||
511 | 802 | ||
512 | if isinstance(name, SoupStrainer): | 803 | if isinstance(name, SoupStrainer): |
513 | strainer = name | 804 | strainer = name |
514 | else: | 805 | else: |
515 | strainer = SoupStrainer(name, attrs, text, **kwargs) | 806 | strainer = SoupStrainer(name, attrs, string, **kwargs) |
516 | 807 | ||
517 | if text is None and not limit and not attrs and not kwargs: | 808 | if string is None and not limit and not attrs and not kwargs: |
518 | if name is True or name is None: | 809 | if name is True or name is None: |
519 | # Optimization to find all tags. | 810 | # Optimization to find all tags. |
520 | result = (element for element in generator | 811 | result = (element for element in generator |
@@ -522,9 +813,23 @@ class PageElement(object): | |||
522 | return ResultSet(strainer, result) | 813 | return ResultSet(strainer, result) |
523 | elif isinstance(name, str): | 814 | elif isinstance(name, str): |
524 | # Optimization to find all tags with a given name. | 815 | # Optimization to find all tags with a given name. |
816 | if name.count(':') == 1: | ||
817 | # This is a name with a prefix. If this is a namespace-aware document, | ||
818 | # we need to match the local name against tag.name. If not, | ||
819 | # we need to match the fully-qualified name against tag.name. | ||
820 | prefix, local_name = name.split(':', 1) | ||
821 | else: | ||
822 | prefix = None | ||
823 | local_name = name | ||
525 | result = (element for element in generator | 824 | result = (element for element in generator |
526 | if isinstance(element, Tag) | 825 | if isinstance(element, Tag) |
527 | and element.name == name) | 826 | and ( |
827 | element.name == name | ||
828 | ) or ( | ||
829 | element.name == local_name | ||
830 | and (prefix is None or element.prefix == prefix) | ||
831 | ) | ||
832 | ) | ||
528 | return ResultSet(strainer, result) | 833 | return ResultSet(strainer, result) |
529 | results = ResultSet(strainer) | 834 | results = ResultSet(strainer) |
530 | while True: | 835 | while True: |
@@ -544,6 +849,10 @@ class PageElement(object): | |||
544 | #NavigableStrings and Tags. | 849 | #NavigableStrings and Tags. |
545 | @property | 850 | @property |
546 | def next_elements(self): | 851 | def next_elements(self): |
852 | """All PageElements that were parsed after this one. | ||
853 | |||
854 | :yield: A sequence of PageElements. | ||
855 | """ | ||
547 | i = self.next_element | 856 | i = self.next_element |
548 | while i is not None: | 857 | while i is not None: |
549 | yield i | 858 | yield i |
@@ -551,6 +860,11 @@ class PageElement(object): | |||
551 | 860 | ||
552 | @property | 861 | @property |
553 | def next_siblings(self): | 862 | def next_siblings(self): |
863 | """All PageElements that are siblings of this one but were parsed | ||
864 | later. | ||
865 | |||
866 | :yield: A sequence of PageElements. | ||
867 | """ | ||
554 | i = self.next_sibling | 868 | i = self.next_sibling |
555 | while i is not None: | 869 | while i is not None: |
556 | yield i | 870 | yield i |
@@ -558,6 +872,10 @@ class PageElement(object): | |||
558 | 872 | ||
559 | @property | 873 | @property |
560 | def previous_elements(self): | 874 | def previous_elements(self): |
875 | """All PageElements that were parsed before this one. | ||
876 | |||
877 | :yield: A sequence of PageElements. | ||
878 | """ | ||
561 | i = self.previous_element | 879 | i = self.previous_element |
562 | while i is not None: | 880 | while i is not None: |
563 | yield i | 881 | yield i |
@@ -565,6 +883,11 @@ class PageElement(object): | |||
565 | 883 | ||
566 | @property | 884 | @property |
567 | def previous_siblings(self): | 885 | def previous_siblings(self): |
886 | """All PageElements that are siblings of this one but were parsed | ||
887 | earlier. | ||
888 | |||
889 | :yield: A sequence of PageElements. | ||
890 | """ | ||
568 | i = self.previous_sibling | 891 | i = self.previous_sibling |
569 | while i is not None: | 892 | while i is not None: |
570 | yield i | 893 | yield i |
@@ -572,87 +895,23 @@ class PageElement(object): | |||
572 | 895 | ||
573 | @property | 896 | @property |
574 | def parents(self): | 897 | def parents(self): |
898 | """All PageElements that are parents of this PageElement. | ||
899 | |||
900 | :yield: A sequence of PageElements. | ||
901 | """ | ||
575 | i = self.parent | 902 | i = self.parent |
576 | while i is not None: | 903 | while i is not None: |
577 | yield i | 904 | yield i |
578 | i = i.parent | 905 | i = i.parent |
579 | 906 | ||
580 | # Methods for supporting CSS selectors. | 907 | @property |
581 | 908 | def decomposed(self): | |
582 | tag_name_re = re.compile(r'^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$') | 909 | """Check whether a PageElement has been decomposed. |
583 | |||
584 | # /^([a-zA-Z0-9][-.a-zA-Z0-9:_]*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/ | ||
585 | # \---------------------------/ \---/\-------------/ \-------/ | ||
586 | # | | | | | ||
587 | # | | | The value | ||
588 | # | | ~,|,^,$,* or = | ||
589 | # | Attribute | ||
590 | # Tag | ||
591 | attribselect_re = re.compile( | ||
592 | r'^(?P<tag>[a-zA-Z0-9][-.a-zA-Z0-9:_]*)?\[(?P<attribute>[\w-]+)(?P<operator>[=~\|\^\$\*]?)' + | ||
593 | r'=?"?(?P<value>[^\]"]*)"?\]$' | ||
594 | ) | ||
595 | |||
596 | def _attr_value_as_string(self, value, default=None): | ||
597 | """Force an attribute value into a string representation. | ||
598 | 910 | ||
599 | A multi-valued attribute will be converted into a | 911 | :rtype: bool |
600 | space-separated stirng. | ||
601 | """ | 912 | """ |
602 | value = self.get(value, default) | 913 | return getattr(self, '_decomposed', False) or False |
603 | if isinstance(value, list) or isinstance(value, tuple): | 914 | |
604 | value =" ".join(value) | ||
605 | return value | ||
606 | |||
607 | def _tag_name_matches_and(self, function, tag_name): | ||
608 | if not tag_name: | ||
609 | return function | ||
610 | else: | ||
611 | def _match(tag): | ||
612 | return tag.name == tag_name and function(tag) | ||
613 | return _match | ||
614 | |||
615 | def _attribute_checker(self, operator, attribute, value=''): | ||
616 | """Create a function that performs a CSS selector operation. | ||
617 | |||
618 | Takes an operator, attribute and optional value. Returns a | ||
619 | function that will return True for elements that match that | ||
620 | combination. | ||
621 | """ | ||
622 | if operator == '=': | ||
623 | # string representation of `attribute` is equal to `value` | ||
624 | return lambda el: el._attr_value_as_string(attribute) == value | ||
625 | elif operator == '~': | ||
626 | # space-separated list representation of `attribute` | ||
627 | # contains `value` | ||
628 | def _includes_value(element): | ||
629 | attribute_value = element.get(attribute, []) | ||
630 | if not isinstance(attribute_value, list): | ||
631 | attribute_value = attribute_value.split() | ||
632 | return value in attribute_value | ||
633 | return _includes_value | ||
634 | elif operator == '^': | ||
635 | # string representation of `attribute` starts with `value` | ||
636 | return lambda el: el._attr_value_as_string( | ||
637 | attribute, '').startswith(value) | ||
638 | elif operator == '$': | ||
639 | # string represenation of `attribute` ends with `value` | ||
640 | return lambda el: el._attr_value_as_string( | ||
641 | attribute, '').endswith(value) | ||
642 | elif operator == '*': | ||
643 | # string representation of `attribute` contains `value` | ||
644 | return lambda el: value in el._attr_value_as_string(attribute, '') | ||
645 | elif operator == '|': | ||
646 | # string representation of `attribute` is either exactly | ||
647 | # `value` or starts with `value` and then a dash. | ||
648 | def _is_or_starts_with_dash(element): | ||
649 | attribute_value = element._attr_value_as_string(attribute, '') | ||
650 | return (attribute_value == value or attribute_value.startswith( | ||
651 | value + '-')) | ||
652 | return _is_or_starts_with_dash | ||
653 | else: | ||
654 | return lambda el: el.has_attr(attribute) | ||
655 | |||
656 | # Old non-property versions of the generators, for backwards | 915 | # Old non-property versions of the generators, for backwards |
657 | # compatibility with BS3. | 916 | # compatibility with BS3. |
658 | def nextGenerator(self): | 917 | def nextGenerator(self): |
@@ -672,6 +931,11 @@ class PageElement(object): | |||
672 | 931 | ||
673 | 932 | ||
674 | class NavigableString(str, PageElement): | 933 | class NavigableString(str, PageElement): |
934 | """A Python Unicode string that is part of a parse tree. | ||
935 | |||
936 | When Beautiful Soup parses the markup <b>penguin</b>, it will | ||
937 | create a NavigableString for the string "penguin". | ||
938 | """ | ||
675 | 939 | ||
676 | PREFIX = '' | 940 | PREFIX = '' |
677 | SUFFIX = '' | 941 | SUFFIX = '' |
@@ -691,12 +955,22 @@ class NavigableString(str, PageElement): | |||
691 | u.setup() | 955 | u.setup() |
692 | return u | 956 | return u |
693 | 957 | ||
694 | def __copy__(self): | 958 | def __deepcopy__(self, memo, recursive=False): |
695 | """A copy of a NavigableString has the same contents and class | 959 | """A copy of a NavigableString has the same contents and class |
696 | as the original, but it is not connected to the parse tree. | 960 | as the original, but it is not connected to the parse tree. |
961 | |||
962 | :param recursive: This parameter is ignored; it's only defined | ||
963 | so that NavigableString.__deepcopy__ implements the same | ||
964 | signature as Tag.__deepcopy__. | ||
697 | """ | 965 | """ |
698 | return type(self)(self) | 966 | return type(self)(self) |
699 | 967 | ||
968 | def __copy__(self): | ||
969 | """A copy of a NavigableString can only be a deep copy, because | ||
970 | only one PageElement can occupy a given place in a parse tree. | ||
971 | """ | ||
972 | return self.__deepcopy__({}) | ||
973 | |||
700 | def __getnewargs__(self): | 974 | def __getnewargs__(self): |
701 | return (str(self),) | 975 | return (str(self),) |
702 | 976 | ||
@@ -712,55 +986,146 @@ class NavigableString(str, PageElement): | |||
712 | self.__class__.__name__, attr)) | 986 | self.__class__.__name__, attr)) |
713 | 987 | ||
714 | def output_ready(self, formatter="minimal"): | 988 | def output_ready(self, formatter="minimal"): |
989 | """Run the string through the provided formatter. | ||
990 | |||
991 | :param formatter: A Formatter object, or a string naming one of the standard formatters. | ||
992 | """ | ||
715 | output = self.format_string(self, formatter) | 993 | output = self.format_string(self, formatter) |
716 | return self.PREFIX + output + self.SUFFIX | 994 | return self.PREFIX + output + self.SUFFIX |
717 | 995 | ||
718 | @property | 996 | @property |
719 | def name(self): | 997 | def name(self): |
998 | """Since a NavigableString is not a Tag, it has no .name. | ||
999 | |||
1000 | This property is implemented so that code like this doesn't crash | ||
1001 | when run on a mixture of Tag and NavigableString objects: | ||
1002 | [x.name for x in tag.children] | ||
1003 | """ | ||
720 | return None | 1004 | return None |
721 | 1005 | ||
722 | @name.setter | 1006 | @name.setter |
723 | def name(self, name): | 1007 | def name(self, name): |
1008 | """Prevent NavigableString.name from ever being set.""" | ||
724 | raise AttributeError("A NavigableString cannot be given a name.") | 1009 | raise AttributeError("A NavigableString cannot be given a name.") |
725 | 1010 | ||
1011 | def _all_strings(self, strip=False, types=PageElement.default): | ||
1012 | """Yield all strings of certain classes, possibly stripping them. | ||
1013 | |||
1014 | This makes it easy for NavigableString to implement methods | ||
1015 | like get_text() as conveniences, creating a consistent | ||
1016 | text-extraction API across all PageElements. | ||
1017 | |||
1018 | :param strip: If True, all strings will be stripped before being | ||
1019 | yielded. | ||
1020 | |||
1021 | :param types: A tuple of NavigableString subclasses. If this | ||
1022 | NavigableString isn't one of those subclasses, the | ||
1023 | sequence will be empty. By default, the subclasses | ||
1024 | considered are NavigableString and CData objects. That | ||
1025 | means no comments, processing instructions, etc. | ||
1026 | |||
1027 | :yield: A sequence that either contains this string, or is empty. | ||
1028 | |||
1029 | """ | ||
1030 | if types is self.default: | ||
1031 | # This is kept in Tag because it's full of subclasses of | ||
1032 | # this class, which aren't defined until later in the file. | ||
1033 | types = Tag.DEFAULT_INTERESTING_STRING_TYPES | ||
1034 | |||
1035 | # Do nothing if the caller is looking for specific types of | ||
1036 | # string, and we're of a different type. | ||
1037 | # | ||
1038 | # We check specific types instead of using isinstance(self, | ||
1039 | # types) because all of these classes subclass | ||
1040 | # NavigableString. Anyone who's using this feature probably | ||
1041 | # wants generic NavigableStrings but not other stuff. | ||
1042 | my_type = type(self) | ||
1043 | if types is not None: | ||
1044 | if isinstance(types, type): | ||
1045 | # Looking for a single type. | ||
1046 | if my_type is not types: | ||
1047 | return | ||
1048 | elif my_type not in types: | ||
1049 | # Looking for one of a list of types. | ||
1050 | return | ||
1051 | |||
1052 | value = self | ||
1053 | if strip: | ||
1054 | value = value.strip() | ||
1055 | if len(value) > 0: | ||
1056 | yield value | ||
1057 | strings = property(_all_strings) | ||
1058 | |||
726 | class PreformattedString(NavigableString): | 1059 | class PreformattedString(NavigableString): |
727 | """A NavigableString not subject to the normal formatting rules. | 1060 | """A NavigableString not subject to the normal formatting rules. |
728 | 1061 | ||
729 | The string will be passed into the formatter (to trigger side effects), | 1062 | This is an abstract class used for special kinds of strings such |
730 | but the return value will be ignored. | 1063 | as comments (the Comment class) and CDATA blocks (the CData |
1064 | class). | ||
731 | """ | 1065 | """ |
732 | 1066 | ||
733 | def output_ready(self, formatter="minimal"): | 1067 | PREFIX = '' |
734 | """CData strings are passed into the formatter. | 1068 | SUFFIX = '' |
735 | But the return value is ignored.""" | 1069 | |
736 | self.format_string(self, formatter) | 1070 | def output_ready(self, formatter=None): |
1071 | """Make this string ready for output by adding any subclass-specific | ||
1072 | prefix or suffix. | ||
1073 | |||
1074 | :param formatter: A Formatter object, or a string naming one | ||
1075 | of the standard formatters. The string will be passed into the | ||
1076 | Formatter, but only to trigger any side effects: the return | ||
1077 | value is ignored. | ||
1078 | |||
1079 | :return: The string, with any subclass-specific prefix and | ||
1080 | suffix added on. | ||
1081 | """ | ||
1082 | if formatter is not None: | ||
1083 | ignore = self.format_string(self, formatter) | ||
737 | return self.PREFIX + self + self.SUFFIX | 1084 | return self.PREFIX + self + self.SUFFIX |
738 | 1085 | ||
739 | class CData(PreformattedString): | 1086 | class CData(PreformattedString): |
740 | 1087 | """A CDATA block.""" | |
741 | PREFIX = '<![CDATA[' | 1088 | PREFIX = '<![CDATA[' |
742 | SUFFIX = ']]>' | 1089 | SUFFIX = ']]>' |
743 | 1090 | ||
744 | class ProcessingInstruction(PreformattedString): | 1091 | class ProcessingInstruction(PreformattedString): |
1092 | """A SGML processing instruction.""" | ||
745 | 1093 | ||
746 | PREFIX = '<?' | 1094 | PREFIX = '<?' |
747 | SUFFIX = '>' | 1095 | SUFFIX = '>' |
748 | 1096 | ||
749 | class Comment(PreformattedString): | 1097 | class XMLProcessingInstruction(ProcessingInstruction): |
1098 | """An XML processing instruction.""" | ||
1099 | PREFIX = '<?' | ||
1100 | SUFFIX = '?>' | ||
750 | 1101 | ||
1102 | class Comment(PreformattedString): | ||
1103 | """An HTML or XML comment.""" | ||
751 | PREFIX = '<!--' | 1104 | PREFIX = '<!--' |
752 | SUFFIX = '-->' | 1105 | SUFFIX = '-->' |
753 | 1106 | ||
754 | 1107 | ||
755 | class Declaration(PreformattedString): | 1108 | class Declaration(PreformattedString): |
1109 | """An XML declaration.""" | ||
756 | PREFIX = '<?' | 1110 | PREFIX = '<?' |
757 | SUFFIX = '?>' | 1111 | SUFFIX = '?>' |
758 | 1112 | ||
759 | 1113 | ||
760 | class Doctype(PreformattedString): | 1114 | class Doctype(PreformattedString): |
761 | 1115 | """A document type declaration.""" | |
762 | @classmethod | 1116 | @classmethod |
763 | def for_name_and_ids(cls, name, pub_id, system_id): | 1117 | def for_name_and_ids(cls, name, pub_id, system_id): |
1118 | """Generate an appropriate document type declaration for a given | ||
1119 | public ID and system ID. | ||
1120 | |||
1121 | :param name: The name of the document's root element, e.g. 'html'. | ||
1122 | :param pub_id: The Formal Public Identifier for this document type, | ||
1123 | e.g. '-//W3C//DTD XHTML 1.1//EN' | ||
1124 | :param system_id: The system identifier for this document type, | ||
1125 | e.g. 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd' | ||
1126 | |||
1127 | :return: A Doctype. | ||
1128 | """ | ||
764 | value = name or '' | 1129 | value = name or '' |
765 | if pub_id is not None: | 1130 | if pub_id is not None: |
766 | value += ' PUBLIC "%s"' % pub_id | 1131 | value += ' PUBLIC "%s"' % pub_id |
@@ -775,14 +1140,105 @@ class Doctype(PreformattedString): | |||
775 | SUFFIX = '>\n' | 1140 | SUFFIX = '>\n' |
776 | 1141 | ||
777 | 1142 | ||
1143 | class Stylesheet(NavigableString): | ||
1144 | """A NavigableString representing an stylesheet (probably | ||
1145 | CSS). | ||
1146 | |||
1147 | Used to distinguish embedded stylesheets from textual content. | ||
1148 | """ | ||
1149 | pass | ||
1150 | |||
1151 | |||
1152 | class Script(NavigableString): | ||
1153 | """A NavigableString representing an executable script (probably | ||
1154 | Javascript). | ||
1155 | |||
1156 | Used to distinguish executable code from textual content. | ||
1157 | """ | ||
1158 | pass | ||
1159 | |||
1160 | |||
1161 | class TemplateString(NavigableString): | ||
1162 | """A NavigableString representing a string found inside an HTML | ||
1163 | template embedded in a larger document. | ||
1164 | |||
1165 | Used to distinguish such strings from the main body of the document. | ||
1166 | """ | ||
1167 | pass | ||
1168 | |||
1169 | |||
1170 | class RubyTextString(NavigableString): | ||
1171 | """A NavigableString representing the contents of the <rt> HTML | ||
1172 | element. | ||
1173 | |||
1174 | https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rt-element | ||
1175 | |||
1176 | Can be used to distinguish such strings from the strings they're | ||
1177 | annotating. | ||
1178 | """ | ||
1179 | pass | ||
1180 | |||
1181 | |||
1182 | class RubyParenthesisString(NavigableString): | ||
1183 | """A NavigableString representing the contents of the <rp> HTML | ||
1184 | element. | ||
1185 | |||
1186 | https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rp-element | ||
1187 | """ | ||
1188 | pass | ||
1189 | |||
1190 | |||
778 | class Tag(PageElement): | 1191 | class Tag(PageElement): |
1192 | """Represents an HTML or XML tag that is part of a parse tree, along | ||
1193 | with its attributes and contents. | ||
779 | 1194 | ||
780 | """Represents a found HTML tag with its attributes and contents.""" | 1195 | When Beautiful Soup parses the markup <b>penguin</b>, it will |
1196 | create a Tag object representing the <b> tag. | ||
1197 | """ | ||
781 | 1198 | ||
782 | def __init__(self, parser=None, builder=None, name=None, namespace=None, | 1199 | def __init__(self, parser=None, builder=None, name=None, namespace=None, |
783 | prefix=None, attrs=None, parent=None, previous=None): | 1200 | prefix=None, attrs=None, parent=None, previous=None, |
784 | "Basic constructor." | 1201 | is_xml=None, sourceline=None, sourcepos=None, |
785 | 1202 | can_be_empty_element=None, cdata_list_attributes=None, | |
1203 | preserve_whitespace_tags=None, | ||
1204 | interesting_string_types=None, | ||
1205 | namespaces=None | ||
1206 | ): | ||
1207 | """Basic constructor. | ||
1208 | |||
1209 | :param parser: A BeautifulSoup object. | ||
1210 | :param builder: A TreeBuilder. | ||
1211 | :param name: The name of the tag. | ||
1212 | :param namespace: The URI of this Tag's XML namespace, if any. | ||
1213 | :param prefix: The prefix for this Tag's XML namespace, if any. | ||
1214 | :param attrs: A dictionary of this Tag's attribute values. | ||
1215 | :param parent: The PageElement to use as this Tag's parent. | ||
1216 | :param previous: The PageElement that was parsed immediately before | ||
1217 | this tag. | ||
1218 | :param is_xml: If True, this is an XML tag. Otherwise, this is an | ||
1219 | HTML tag. | ||
1220 | :param sourceline: The line number where this tag was found in its | ||
1221 | source document. | ||
1222 | :param sourcepos: The character position within `sourceline` where this | ||
1223 | tag was found. | ||
1224 | :param can_be_empty_element: If True, this tag should be | ||
1225 | represented as <tag/>. If False, this tag should be represented | ||
1226 | as <tag></tag>. | ||
1227 | :param cdata_list_attributes: A list of attributes whose values should | ||
1228 | be treated as CDATA if they ever show up on this tag. | ||
1229 | :param preserve_whitespace_tags: A list of tag names whose contents | ||
1230 | should have their whitespace preserved. | ||
1231 | :param interesting_string_types: This is a NavigableString | ||
1232 | subclass or a tuple of them. When iterating over this | ||
1233 | Tag's strings in methods like Tag.strings or Tag.get_text, | ||
1234 | these are the types of strings that are interesting enough | ||
1235 | to be considered. The default is to consider | ||
1236 | NavigableString and CData the only interesting string | ||
1237 | subtypes. | ||
1238 | :param namespaces: A dictionary mapping currently active | ||
1239 | namespace prefixes to URIs. This can be used later to | ||
1240 | construct CSS selectors. | ||
1241 | """ | ||
786 | if parser is None: | 1242 | if parser is None: |
787 | self.parser_class = None | 1243 | self.parser_class = None |
788 | else: | 1244 | else: |
@@ -793,7 +1249,12 @@ class Tag(PageElement): | |||
793 | raise ValueError("No value provided for new tag's name.") | 1249 | raise ValueError("No value provided for new tag's name.") |
794 | self.name = name | 1250 | self.name = name |
795 | self.namespace = namespace | 1251 | self.namespace = namespace |
1252 | self._namespaces = namespaces or {} | ||
796 | self.prefix = prefix | 1253 | self.prefix = prefix |
1254 | if ((not builder or builder.store_line_numbers) | ||
1255 | and (sourceline is not None or sourcepos is not None)): | ||
1256 | self.sourceline = sourceline | ||
1257 | self.sourcepos = sourcepos | ||
797 | if attrs is None: | 1258 | if attrs is None: |
798 | attrs = {} | 1259 | attrs = {} |
799 | elif attrs: | 1260 | elif attrs: |
@@ -804,32 +1265,109 @@ class Tag(PageElement): | |||
804 | attrs = dict(attrs) | 1265 | attrs = dict(attrs) |
805 | else: | 1266 | else: |
806 | attrs = dict(attrs) | 1267 | attrs = dict(attrs) |
1268 | |||
1269 | # If possible, determine ahead of time whether this tag is an | ||
1270 | # XML tag. | ||
1271 | if builder: | ||
1272 | self.known_xml = builder.is_xml | ||
1273 | else: | ||
1274 | self.known_xml = is_xml | ||
807 | self.attrs = attrs | 1275 | self.attrs = attrs |
808 | self.contents = [] | 1276 | self.contents = [] |
809 | self.setup(parent, previous) | 1277 | self.setup(parent, previous) |
810 | self.hidden = False | 1278 | self.hidden = False |
811 | 1279 | ||
812 | # Set up any substitutions, such as the charset in a META tag. | 1280 | if builder is None: |
813 | if builder is not None: | 1281 | # In the absence of a TreeBuilder, use whatever values were |
1282 | # passed in here. They're probably None, unless this is a copy of some | ||
1283 | # other tag. | ||
1284 | self.can_be_empty_element = can_be_empty_element | ||
1285 | self.cdata_list_attributes = cdata_list_attributes | ||
1286 | self.preserve_whitespace_tags = preserve_whitespace_tags | ||
1287 | self.interesting_string_types = interesting_string_types | ||
1288 | else: | ||
1289 | # Set up any substitutions for this tag, such as the charset in a META tag. | ||
814 | builder.set_up_substitutions(self) | 1290 | builder.set_up_substitutions(self) |
1291 | |||
1292 | # Ask the TreeBuilder whether this tag might be an empty-element tag. | ||
815 | self.can_be_empty_element = builder.can_be_empty_element(name) | 1293 | self.can_be_empty_element = builder.can_be_empty_element(name) |
816 | else: | 1294 | |
817 | self.can_be_empty_element = False | 1295 | # Keep track of the list of attributes of this tag that |
1296 | # might need to be treated as a list. | ||
1297 | # | ||
1298 | # For performance reasons, we store the whole data structure | ||
1299 | # rather than asking the question of every tag. Asking would | ||
1300 | # require building a new data structure every time, and | ||
1301 | # (unlike can_be_empty_element), we almost never need | ||
1302 | # to check this. | ||
1303 | self.cdata_list_attributes = builder.cdata_list_attributes | ||
1304 | |||
1305 | # Keep track of the names that might cause this tag to be treated as a | ||
1306 | # whitespace-preserved tag. | ||
1307 | self.preserve_whitespace_tags = builder.preserve_whitespace_tags | ||
1308 | |||
1309 | if self.name in builder.string_containers: | ||
1310 | # This sort of tag uses a special string container | ||
1311 | # subclass for most of its strings. When we ask the | ||
1312 | self.interesting_string_types = builder.string_containers[self.name] | ||
1313 | else: | ||
1314 | self.interesting_string_types = self.DEFAULT_INTERESTING_STRING_TYPES | ||
818 | 1315 | ||
819 | parserClass = _alias("parser_class") # BS3 | 1316 | parserClass = _alias("parser_class") # BS3 |
820 | 1317 | ||
821 | def __copy__(self): | 1318 | def __deepcopy__(self, memo, recursive=True): |
822 | """A copy of a Tag is a new Tag, unconnected to the parse tree. | 1319 | """A deepcopy of a Tag is a new Tag, unconnected to the parse tree. |
823 | Its contents are a copy of the old Tag's contents. | 1320 | Its contents are a copy of the old Tag's contents. |
824 | """ | 1321 | """ |
825 | clone = type(self)(None, self.builder, self.name, self.namespace, | 1322 | clone = self._clone() |
826 | self.nsprefix, self.attrs) | 1323 | |
1324 | if recursive: | ||
1325 | # Clone this tag's descendants recursively, but without | ||
1326 | # making any recursive function calls. | ||
1327 | tag_stack = [clone] | ||
1328 | for event, element in self._event_stream(self.descendants): | ||
1329 | if event is Tag.END_ELEMENT_EVENT: | ||
1330 | # Stop appending incoming Tags to the Tag that was | ||
1331 | # just closed. | ||
1332 | tag_stack.pop() | ||
1333 | else: | ||
1334 | descendant_clone = element.__deepcopy__( | ||
1335 | memo, recursive=False | ||
1336 | ) | ||
1337 | # Add to its parent's .contents | ||
1338 | tag_stack[-1].append(descendant_clone) | ||
1339 | |||
1340 | if event is Tag.START_ELEMENT_EVENT: | ||
1341 | # Add the Tag itself to the stack so that its | ||
1342 | # children will be .appended to it. | ||
1343 | tag_stack.append(descendant_clone) | ||
1344 | return clone | ||
1345 | |||
1346 | def __copy__(self): | ||
1347 | """A copy of a Tag must always be a deep copy, because a Tag's | ||
1348 | children can only have one parent at a time. | ||
1349 | """ | ||
1350 | return self.__deepcopy__({}) | ||
1351 | |||
1352 | def _clone(self): | ||
1353 | """Create a new Tag just like this one, but with no | ||
1354 | contents and unattached to any parse tree. | ||
1355 | |||
1356 | This is the first step in the deepcopy process. | ||
1357 | """ | ||
1358 | clone = type(self)( | ||
1359 | None, None, self.name, self.namespace, | ||
1360 | self.prefix, self.attrs, is_xml=self._is_xml, | ||
1361 | sourceline=self.sourceline, sourcepos=self.sourcepos, | ||
1362 | can_be_empty_element=self.can_be_empty_element, | ||
1363 | cdata_list_attributes=self.cdata_list_attributes, | ||
1364 | preserve_whitespace_tags=self.preserve_whitespace_tags, | ||
1365 | interesting_string_types=self.interesting_string_types | ||
1366 | ) | ||
827 | for attr in ('can_be_empty_element', 'hidden'): | 1367 | for attr in ('can_be_empty_element', 'hidden'): |
828 | setattr(clone, attr, getattr(self, attr)) | 1368 | setattr(clone, attr, getattr(self, attr)) |
829 | for child in self.contents: | ||
830 | clone.append(child.__copy__()) | ||
831 | return clone | 1369 | return clone |
832 | 1370 | ||
833 | @property | 1371 | @property |
834 | def is_empty_element(self): | 1372 | def is_empty_element(self): |
835 | """Is this tag an empty-element tag? (aka a self-closing tag) | 1373 | """Is this tag an empty-element tag? (aka a self-closing tag) |
@@ -850,13 +1388,17 @@ class Tag(PageElement): | |||
850 | 1388 | ||
851 | @property | 1389 | @property |
852 | def string(self): | 1390 | def string(self): |
853 | """Convenience property to get the single string within this tag. | 1391 | """Convenience property to get the single string within this |
1392 | PageElement. | ||
854 | 1393 | ||
855 | :Return: If this tag has a single string child, return value | 1394 | TODO It might make sense to have NavigableString.string return |
856 | is that string. If this tag has no children, or more than one | 1395 | itself. |
857 | child, return value is None. If this tag has one child tag, | 1396 | |
1397 | :return: If this element has a single string child, return | ||
1398 | value is that string. If this element has one child tag, | ||
858 | return value is the 'string' attribute of the child tag, | 1399 | return value is the 'string' attribute of the child tag, |
859 | recursively. | 1400 | recursively. If this element is itself a string, has no |
1401 | children, or has more than one child, return value is None. | ||
860 | """ | 1402 | """ |
861 | if len(self.contents) != 1: | 1403 | if len(self.contents) != 1: |
862 | return None | 1404 | return None |
@@ -867,57 +1409,75 @@ class Tag(PageElement): | |||
867 | 1409 | ||
868 | @string.setter | 1410 | @string.setter |
869 | def string(self, string): | 1411 | def string(self, string): |
1412 | """Replace this PageElement's contents with `string`.""" | ||
870 | self.clear() | 1413 | self.clear() |
871 | self.append(string.__class__(string)) | 1414 | self.append(string.__class__(string)) |
872 | 1415 | ||
873 | def _all_strings(self, strip=False, types=(NavigableString, CData)): | 1416 | DEFAULT_INTERESTING_STRING_TYPES = (NavigableString, CData) |
1417 | def _all_strings(self, strip=False, types=PageElement.default): | ||
874 | """Yield all strings of certain classes, possibly stripping them. | 1418 | """Yield all strings of certain classes, possibly stripping them. |
875 | 1419 | ||
876 | By default, yields only NavigableString and CData objects. So | 1420 | :param strip: If True, all strings will be stripped before being |
877 | no comments, processing instructions, etc. | 1421 | yielded. |
1422 | |||
1423 | :param types: A tuple of NavigableString subclasses. Any strings of | ||
1424 | a subclass not found in this list will be ignored. By | ||
1425 | default, the subclasses considered are the ones found in | ||
1426 | self.interesting_string_types. If that's not specified, | ||
1427 | only NavigableString and CData objects will be | ||
1428 | considered. That means no comments, processing | ||
1429 | instructions, etc. | ||
1430 | |||
1431 | :yield: A sequence of strings. | ||
1432 | |||
878 | """ | 1433 | """ |
1434 | if types is self.default: | ||
1435 | types = self.interesting_string_types | ||
1436 | |||
879 | for descendant in self.descendants: | 1437 | for descendant in self.descendants: |
880 | if ( | 1438 | if (types is None and not isinstance(descendant, NavigableString)): |
881 | (types is None and not isinstance(descendant, NavigableString)) | 1439 | continue |
882 | or | 1440 | descendant_type = type(descendant) |
883 | (types is not None and type(descendant) not in types)): | 1441 | if isinstance(types, type): |
1442 | if descendant_type is not types: | ||
1443 | # We're not interested in strings of this type. | ||
1444 | continue | ||
1445 | elif types is not None and descendant_type not in types: | ||
1446 | # We're not interested in strings of this type. | ||
884 | continue | 1447 | continue |
885 | if strip: | 1448 | if strip: |
886 | descendant = descendant.strip() | 1449 | descendant = descendant.strip() |
887 | if len(descendant) == 0: | 1450 | if len(descendant) == 0: |
888 | continue | 1451 | continue |
889 | yield descendant | 1452 | yield descendant |
890 | |||
891 | strings = property(_all_strings) | 1453 | strings = property(_all_strings) |
892 | 1454 | ||
893 | @property | 1455 | def decompose(self): |
894 | def stripped_strings(self): | 1456 | """Recursively destroys this PageElement and its children. |
895 | for string in self._all_strings(True): | ||
896 | yield string | ||
897 | 1457 | ||
898 | def get_text(self, separator="", strip=False, | 1458 | This element will be removed from the tree and wiped out; so |
899 | types=(NavigableString, CData)): | 1459 | will everything beneath it. |
900 | """ | ||
901 | Get all child strings, concatenated using the given separator. | ||
902 | """ | ||
903 | return separator.join([s for s in self._all_strings( | ||
904 | strip, types=types)]) | ||
905 | getText = get_text | ||
906 | text = property(get_text) | ||
907 | 1460 | ||
908 | def decompose(self): | 1461 | The behavior of a decomposed PageElement is undefined and you |
909 | """Recursively destroys the contents of this tree.""" | 1462 | should never use one for anything, but if you need to _check_ |
1463 | whether an element has been decomposed, you can use the | ||
1464 | `decomposed` property. | ||
1465 | """ | ||
910 | self.extract() | 1466 | self.extract() |
911 | i = self | 1467 | i = self |
912 | while i is not None: | 1468 | while i is not None: |
913 | next = i.next_element | 1469 | n = i.next_element |
914 | i.__dict__.clear() | 1470 | i.__dict__.clear() |
915 | i.contents = [] | 1471 | i.contents = [] |
916 | i = next | 1472 | i._decomposed = True |
1473 | i = n | ||
917 | 1474 | ||
918 | def clear(self, decompose=False): | 1475 | def clear(self, decompose=False): |
919 | """ | 1476 | """Wipe out all children of this PageElement by calling extract() |
920 | Extract all children. If decompose is True, decompose instead. | 1477 | on them. |
1478 | |||
1479 | :param decompose: If this is True, decompose() (a more | ||
1480 | destructive method) will be called instead of extract(). | ||
921 | """ | 1481 | """ |
922 | if decompose: | 1482 | if decompose: |
923 | for element in self.contents[:]: | 1483 | for element in self.contents[:]: |
@@ -929,10 +1489,51 @@ class Tag(PageElement): | |||
929 | for element in self.contents[:]: | 1489 | for element in self.contents[:]: |
930 | element.extract() | 1490 | element.extract() |
931 | 1491 | ||
932 | def index(self, element): | 1492 | def smooth(self): |
1493 | """Smooth out this element's children by consolidating consecutive | ||
1494 | strings. | ||
1495 | |||
1496 | This makes pretty-printed output look more natural following a | ||
1497 | lot of operations that modified the tree. | ||
933 | """ | 1498 | """ |
934 | Find the index of a child by identity, not value. Avoids issues with | 1499 | # Mark the first position of every pair of children that need |
935 | tag.contents.index(element) getting the index of equal elements. | 1500 | # to be consolidated. Do this rather than making a copy of |
1501 | # self.contents, since in most cases very few strings will be | ||
1502 | # affected. | ||
1503 | marked = [] | ||
1504 | for i, a in enumerate(self.contents): | ||
1505 | if isinstance(a, Tag): | ||
1506 | # Recursively smooth children. | ||
1507 | a.smooth() | ||
1508 | if i == len(self.contents)-1: | ||
1509 | # This is the last item in .contents, and it's not a | ||
1510 | # tag. There's no chance it needs any work. | ||
1511 | continue | ||
1512 | b = self.contents[i+1] | ||
1513 | if (isinstance(a, NavigableString) | ||
1514 | and isinstance(b, NavigableString) | ||
1515 | and not isinstance(a, PreformattedString) | ||
1516 | and not isinstance(b, PreformattedString) | ||
1517 | ): | ||
1518 | marked.append(i) | ||
1519 | |||
1520 | # Go over the marked positions in reverse order, so that | ||
1521 | # removing items from .contents won't affect the remaining | ||
1522 | # positions. | ||
1523 | for i in reversed(marked): | ||
1524 | a = self.contents[i] | ||
1525 | b = self.contents[i+1] | ||
1526 | b.extract() | ||
1527 | n = NavigableString(a+b) | ||
1528 | a.replace_with(n) | ||
1529 | |||
1530 | def index(self, element): | ||
1531 | """Find the index of a child by identity, not value. | ||
1532 | |||
1533 | Avoids issues with tag.contents.index(element) getting the | ||
1534 | index of equal elements. | ||
1535 | |||
1536 | :param element: Look for this PageElement in `self.contents`. | ||
936 | """ | 1537 | """ |
937 | for i, child in enumerate(self.contents): | 1538 | for i, child in enumerate(self.contents): |
938 | if child is element: | 1539 | if child is element: |
@@ -945,23 +1546,38 @@ class Tag(PageElement): | |||
945 | attribute.""" | 1546 | attribute.""" |
946 | return self.attrs.get(key, default) | 1547 | return self.attrs.get(key, default) |
947 | 1548 | ||
1549 | def get_attribute_list(self, key, default=None): | ||
1550 | """The same as get(), but always returns a list. | ||
1551 | |||
1552 | :param key: The attribute to look for. | ||
1553 | :param default: Use this value if the attribute is not present | ||
1554 | on this PageElement. | ||
1555 | :return: A list of values, probably containing only a single | ||
1556 | value. | ||
1557 | """ | ||
1558 | value = self.get(key, default) | ||
1559 | if not isinstance(value, list): | ||
1560 | value = [value] | ||
1561 | return value | ||
1562 | |||
948 | def has_attr(self, key): | 1563 | def has_attr(self, key): |
1564 | """Does this PageElement have an attribute with the given name?""" | ||
949 | return key in self.attrs | 1565 | return key in self.attrs |
950 | 1566 | ||
951 | def __hash__(self): | 1567 | def __hash__(self): |
952 | return str(self).__hash__() | 1568 | return str(self).__hash__() |
953 | 1569 | ||
954 | def __getitem__(self, key): | 1570 | def __getitem__(self, key): |
955 | """tag[key] returns the value of the 'key' attribute for the tag, | 1571 | """tag[key] returns the value of the 'key' attribute for the Tag, |
956 | and throws an exception if it's not there.""" | 1572 | and throws an exception if it's not there.""" |
957 | return self.attrs[key] | 1573 | return self.attrs[key] |
958 | 1574 | ||
959 | def __iter__(self): | 1575 | def __iter__(self): |
960 | "Iterating over a tag iterates over its contents." | 1576 | "Iterating over a Tag iterates over its contents." |
961 | return iter(self.contents) | 1577 | return iter(self.contents) |
962 | 1578 | ||
963 | def __len__(self): | 1579 | def __len__(self): |
964 | "The length of a tag is the length of its list of contents." | 1580 | "The length of a Tag is the length of its list of contents." |
965 | return len(self.contents) | 1581 | return len(self.contents) |
966 | 1582 | ||
967 | def __contains__(self, x): | 1583 | def __contains__(self, x): |
@@ -981,29 +1597,33 @@ class Tag(PageElement): | |||
981 | self.attrs.pop(key, None) | 1597 | self.attrs.pop(key, None) |
982 | 1598 | ||
983 | def __call__(self, *args, **kwargs): | 1599 | def __call__(self, *args, **kwargs): |
984 | """Calling a tag like a function is the same as calling its | 1600 | """Calling a Tag like a function is the same as calling its |
985 | find_all() method. Eg. tag('a') returns a list of all the A tags | 1601 | find_all() method. Eg. tag('a') returns a list of all the A tags |
986 | found within this tag.""" | 1602 | found within this tag.""" |
987 | return self.find_all(*args, **kwargs) | 1603 | return self.find_all(*args, **kwargs) |
988 | 1604 | ||
989 | def __getattr__(self, tag): | 1605 | def __getattr__(self, tag): |
990 | #print "Getattr %s.%s" % (self.__class__, tag) | 1606 | """Calling tag.subtag is the same as calling tag.find(name="subtag")""" |
1607 | #print("Getattr %s.%s" % (self.__class__, tag)) | ||
991 | if len(tag) > 3 and tag.endswith('Tag'): | 1608 | if len(tag) > 3 and tag.endswith('Tag'): |
992 | # BS3: soup.aTag -> "soup.find("a") | 1609 | # BS3: soup.aTag -> "soup.find("a") |
993 | tag_name = tag[:-3] | 1610 | tag_name = tag[:-3] |
994 | warnings.warn( | 1611 | warnings.warn( |
995 | '.%sTag is deprecated, use .find("%s") instead.' % ( | 1612 | '.%(name)sTag is deprecated, use .find("%(name)s") instead. If you really were looking for a tag called %(name)sTag, use .find("%(name)sTag")' % dict( |
996 | tag_name, tag_name)) | 1613 | name=tag_name |
1614 | ), | ||
1615 | DeprecationWarning, stacklevel=2 | ||
1616 | ) | ||
997 | return self.find(tag_name) | 1617 | return self.find(tag_name) |
998 | # We special case contents to avoid recursion. | 1618 | # We special case contents to avoid recursion. |
999 | elif not tag.startswith("__") and not tag=="contents": | 1619 | elif not tag.startswith("__") and not tag == "contents": |
1000 | return self.find(tag) | 1620 | return self.find(tag) |
1001 | raise AttributeError( | 1621 | raise AttributeError( |
1002 | "'%s' object has no attribute '%s'" % (self.__class__, tag)) | 1622 | "'%s' object has no attribute '%s'" % (self.__class__, tag)) |
1003 | 1623 | ||
1004 | def __eq__(self, other): | 1624 | def __eq__(self, other): |
1005 | """Returns true iff this tag has the same name, the same attributes, | 1625 | """Returns true iff this Tag has the same name, the same attributes, |
1006 | and the same contents (recursively) as the given tag.""" | 1626 | and the same contents (recursively) as `other`.""" |
1007 | if self is other: | 1627 | if self is other: |
1008 | return True | 1628 | return True |
1009 | if (not hasattr(other, 'name') or | 1629 | if (not hasattr(other, 'name') or |
@@ -1019,69 +1639,235 @@ class Tag(PageElement): | |||
1019 | return True | 1639 | return True |
1020 | 1640 | ||
1021 | def __ne__(self, other): | 1641 | def __ne__(self, other): |
1022 | """Returns true iff this tag is not identical to the other tag, | 1642 | """Returns true iff this Tag is not identical to `other`, |
1023 | as defined in __eq__.""" | 1643 | as defined in __eq__.""" |
1024 | return not self == other | 1644 | return not self == other |
1025 | 1645 | ||
1026 | def __repr__(self, encoding="unicode-escape"): | 1646 | def __repr__(self, encoding="unicode-escape"): |
1027 | """Renders this tag as a string.""" | 1647 | """Renders this PageElement as a string. |
1028 | if PY3K: | ||
1029 | # "The return value must be a string object", i.e. Unicode | ||
1030 | return self.decode() | ||
1031 | else: | ||
1032 | # "The return value must be a string object", i.e. a bytestring. | ||
1033 | # By convention, the return value of __repr__ should also be | ||
1034 | # an ASCII string. | ||
1035 | return self.encode(encoding) | ||
1036 | 1648 | ||
1037 | def __unicode__(self): | 1649 | :param encoding: The encoding to use (Python 2 only). |
1650 | TODO: This is now ignored and a warning should be issued | ||
1651 | if a value is provided. | ||
1652 | :return: A (Unicode) string. | ||
1653 | """ | ||
1654 | # "The return value must be a string object", i.e. Unicode | ||
1038 | return self.decode() | 1655 | return self.decode() |
1039 | 1656 | ||
1040 | def __str__(self): | 1657 | def __unicode__(self): |
1041 | if PY3K: | 1658 | """Renders this PageElement as a Unicode string.""" |
1042 | return self.decode() | 1659 | return self.decode() |
1043 | else: | ||
1044 | return self.encode() | ||
1045 | 1660 | ||
1046 | if PY3K: | 1661 | __str__ = __repr__ = __unicode__ |
1047 | __str__ = __repr__ = __unicode__ | ||
1048 | 1662 | ||
1049 | def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, | 1663 | def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, |
1050 | indent_level=None, formatter="minimal", | 1664 | indent_level=None, formatter="minimal", |
1051 | errors="xmlcharrefreplace"): | 1665 | errors="xmlcharrefreplace"): |
1666 | """Render a bytestring representation of this PageElement and its | ||
1667 | contents. | ||
1668 | |||
1669 | :param encoding: The destination encoding. | ||
1670 | :param indent_level: Each line of the rendering will be | ||
1671 | indented this many levels. (The formatter decides what a | ||
1672 | 'level' means in terms of spaces or other characters | ||
1673 | output.) Used internally in recursive calls while | ||
1674 | pretty-printing. | ||
1675 | :param formatter: A Formatter object, or a string naming one of | ||
1676 | the standard formatters. | ||
1677 | :param errors: An error handling strategy such as | ||
1678 | 'xmlcharrefreplace'. This value is passed along into | ||
1679 | encode() and its value should be one of the constants | ||
1680 | defined by Python. | ||
1681 | :return: A bytestring. | ||
1682 | |||
1683 | """ | ||
1052 | # Turn the data structure into Unicode, then encode the | 1684 | # Turn the data structure into Unicode, then encode the |
1053 | # Unicode. | 1685 | # Unicode. |
1054 | u = self.decode(indent_level, encoding, formatter) | 1686 | u = self.decode(indent_level, encoding, formatter) |
1055 | return u.encode(encoding, errors) | 1687 | return u.encode(encoding, errors) |
1056 | 1688 | ||
1057 | def _should_pretty_print(self, indent_level): | ||
1058 | """Should this tag be pretty-printed?""" | ||
1059 | return ( | ||
1060 | indent_level is not None and | ||
1061 | (self.name not in HTMLAwareEntitySubstitution.preformatted_tags | ||
1062 | or self._is_xml)) | ||
1063 | |||
1064 | def decode(self, indent_level=None, | 1689 | def decode(self, indent_level=None, |
1065 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, | 1690 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, |
1066 | formatter="minimal"): | 1691 | formatter="minimal", |
1067 | """Returns a Unicode representation of this tag and its contents. | 1692 | iterator=None): |
1693 | pieces = [] | ||
1694 | # First off, turn a non-Formatter `formatter` into a Formatter | ||
1695 | # object. This will stop the lookup from happening over and | ||
1696 | # over again. | ||
1697 | if not isinstance(formatter, Formatter): | ||
1698 | formatter = self.formatter_for_name(formatter) | ||
1699 | |||
1700 | if indent_level is True: | ||
1701 | indent_level = 0 | ||
1702 | |||
1703 | # The currently active tag that put us into string literal | ||
1704 | # mode. Until this element is closed, children will be treated | ||
1705 | # as string literals and not pretty-printed. String literal | ||
1706 | # mode is turned on immediately after this tag begins, and | ||
1707 | # turned off immediately before it's closed. This means there | ||
1708 | # will be whitespace before and after the tag itself. | ||
1709 | string_literal_tag = None | ||
1710 | |||
1711 | for event, element in self._event_stream(iterator): | ||
1712 | if event in (Tag.START_ELEMENT_EVENT, Tag.EMPTY_ELEMENT_EVENT): | ||
1713 | piece = element._format_tag( | ||
1714 | eventual_encoding, formatter, opening=True | ||
1715 | ) | ||
1716 | elif event is Tag.END_ELEMENT_EVENT: | ||
1717 | piece = element._format_tag( | ||
1718 | eventual_encoding, formatter, opening=False | ||
1719 | ) | ||
1720 | if indent_level is not None: | ||
1721 | indent_level -= 1 | ||
1722 | else: | ||
1723 | piece = element.output_ready(formatter) | ||
1724 | |||
1725 | # Now we need to apply the 'prettiness' -- extra | ||
1726 | # whitespace before and/or after this tag. This can get | ||
1727 | # complicated because certain tags, like <pre> and | ||
1728 | # <script>, can't be prettified, since adding whitespace would | ||
1729 | # change the meaning of the content. | ||
1730 | |||
1731 | # The default behavior is to add whitespace before and | ||
1732 | # after an element when string literal mode is off, and to | ||
1733 | # leave things as they are when string literal mode is on. | ||
1734 | if string_literal_tag: | ||
1735 | indent_before = indent_after = False | ||
1736 | else: | ||
1737 | indent_before = indent_after = True | ||
1738 | |||
1739 | # The only time the behavior is more complex than that is | ||
1740 | # when we encounter an opening or closing tag that might | ||
1741 | # put us into or out of string literal mode. | ||
1742 | if (event is Tag.START_ELEMENT_EVENT | ||
1743 | and not string_literal_tag | ||
1744 | and not element._should_pretty_print()): | ||
1745 | # We are about to enter string literal mode. Add | ||
1746 | # whitespace before this tag, but not after. We | ||
1747 | # will stay in string literal mode until this tag | ||
1748 | # is closed. | ||
1749 | indent_before = True | ||
1750 | indent_after = False | ||
1751 | string_literal_tag = element | ||
1752 | elif (event is Tag.END_ELEMENT_EVENT | ||
1753 | and element is string_literal_tag): | ||
1754 | # We are about to exit string literal mode by closing | ||
1755 | # the tag that sent us into that mode. Add whitespace | ||
1756 | # after this tag, but not before. | ||
1757 | indent_before = False | ||
1758 | indent_after = True | ||
1759 | string_literal_tag = None | ||
1760 | |||
1761 | # Now we know whether to add whitespace before and/or | ||
1762 | # after this element. | ||
1763 | if indent_level is not None: | ||
1764 | if (indent_before or indent_after): | ||
1765 | if isinstance(element, NavigableString): | ||
1766 | piece = piece.strip() | ||
1767 | if piece: | ||
1768 | piece = self._indent_string( | ||
1769 | piece, indent_level, formatter, | ||
1770 | indent_before, indent_after | ||
1771 | ) | ||
1772 | if event == Tag.START_ELEMENT_EVENT: | ||
1773 | indent_level += 1 | ||
1774 | pieces.append(piece) | ||
1775 | return "".join(pieces) | ||
1776 | |||
1777 | # Names for the different events yielded by _event_stream | ||
1778 | START_ELEMENT_EVENT = object() | ||
1779 | END_ELEMENT_EVENT = object() | ||
1780 | EMPTY_ELEMENT_EVENT = object() | ||
1781 | STRING_ELEMENT_EVENT = object() | ||
1782 | |||
1783 | def _event_stream(self, iterator=None): | ||
1784 | """Yield a sequence of events that can be used to reconstruct the DOM | ||
1785 | for this element. | ||
1786 | |||
1787 | This lets us recreate the nested structure of this element | ||
1788 | (e.g. when formatting it as a string) without using recursive | ||
1789 | method calls. | ||
1790 | |||
1791 | This is similar in concept to the SAX API, but it's a simpler | ||
1792 | interface designed for internal use. The events are different | ||
1793 | from SAX and the arguments associated with the events are Tags | ||
1794 | and other Beautiful Soup objects. | ||
1795 | |||
1796 | :param iterator: An alternate iterator to use when traversing | ||
1797 | the tree. | ||
1798 | """ | ||
1799 | tag_stack = [] | ||
1068 | 1800 | ||
1069 | :param eventual_encoding: The tag is destined to be | 1801 | iterator = iterator or self.self_and_descendants |
1070 | encoded into this encoding. This method is _not_ | 1802 | |
1071 | responsible for performing that encoding. This information | 1803 | for c in iterator: |
1072 | is passed in so that it can be substituted in if the | 1804 | # If the parent of the element we're about to yield is not |
1073 | document contains a <META> tag that mentions the document's | 1805 | # the tag currently on the stack, it means that the tag on |
1074 | encoding. | 1806 | # the stack closed before this element appeared. |
1807 | while tag_stack and c.parent != tag_stack[-1]: | ||
1808 | now_closed_tag = tag_stack.pop() | ||
1809 | yield Tag.END_ELEMENT_EVENT, now_closed_tag | ||
1810 | |||
1811 | if isinstance(c, Tag): | ||
1812 | if c.is_empty_element: | ||
1813 | yield Tag.EMPTY_ELEMENT_EVENT, c | ||
1814 | else: | ||
1815 | yield Tag.START_ELEMENT_EVENT, c | ||
1816 | tag_stack.append(c) | ||
1817 | continue | ||
1818 | else: | ||
1819 | yield Tag.STRING_ELEMENT_EVENT, c | ||
1820 | |||
1821 | while tag_stack: | ||
1822 | now_closed_tag = tag_stack.pop() | ||
1823 | yield Tag.END_ELEMENT_EVENT, now_closed_tag | ||
1824 | |||
1825 | def _indent_string(self, s, indent_level, formatter, | ||
1826 | indent_before, indent_after): | ||
1827 | """Add indentation whitespace before and/or after a string. | ||
1828 | |||
1829 | :param s: The string to amend with whitespace. | ||
1830 | :param indent_level: The indentation level; affects how much | ||
1831 | whitespace goes before the string. | ||
1832 | :param indent_before: Whether or not to add whitespace | ||
1833 | before the string. | ||
1834 | :param indent_after: Whether or not to add whitespace | ||
1835 | (a newline) after the string. | ||
1075 | """ | 1836 | """ |
1837 | space_before = '' | ||
1838 | if indent_before and indent_level: | ||
1839 | space_before = (formatter.indent * indent_level) | ||
1076 | 1840 | ||
1077 | # First off, turn a string formatter into a function. This | 1841 | space_after = '' |
1078 | # will stop the lookup from happening over and over again. | 1842 | if indent_after: |
1079 | if not isinstance(formatter, collections.abc.Callable): | 1843 | space_after = "\n" |
1080 | formatter = self._formatter_for_name(formatter) | ||
1081 | 1844 | ||
1082 | attrs = [] | 1845 | return space_before + s + space_after |
1083 | if self.attrs: | 1846 | |
1084 | for key, val in sorted(self.attrs.items()): | 1847 | def _format_tag(self, eventual_encoding, formatter, opening): |
1848 | if self.hidden: | ||
1849 | # A hidden tag is invisible, although its contents | ||
1850 | # are visible. | ||
1851 | return '' | ||
1852 | |||
1853 | # A tag starts with the < character (see below). | ||
1854 | |||
1855 | # Then the / character, if this is a closing tag. | ||
1856 | closing_slash = '' | ||
1857 | if not opening: | ||
1858 | closing_slash = '/' | ||
1859 | |||
1860 | # Then an optional namespace prefix. | ||
1861 | prefix = '' | ||
1862 | if self.prefix: | ||
1863 | prefix = self.prefix + ":" | ||
1864 | |||
1865 | # Then a list of attribute values, if this is an opening tag. | ||
1866 | attribute_string = '' | ||
1867 | if opening: | ||
1868 | attributes = formatter.attributes(self) | ||
1869 | attrs = [] | ||
1870 | for key, val in attributes: | ||
1085 | if val is None: | 1871 | if val is None: |
1086 | decoded = key | 1872 | decoded = key |
1087 | else: | 1873 | else: |
@@ -1090,71 +1876,52 @@ class Tag(PageElement): | |||
1090 | elif not isinstance(val, str): | 1876 | elif not isinstance(val, str): |
1091 | val = str(val) | 1877 | val = str(val) |
1092 | elif ( | 1878 | elif ( |
1093 | isinstance(val, AttributeValueWithCharsetSubstitution) | 1879 | isinstance(val, AttributeValueWithCharsetSubstitution) |
1094 | and eventual_encoding is not None): | 1880 | and eventual_encoding is not None |
1881 | ): | ||
1095 | val = val.encode(eventual_encoding) | 1882 | val = val.encode(eventual_encoding) |
1096 | 1883 | ||
1097 | text = self.format_string(val, formatter) | 1884 | text = formatter.attribute_value(val) |
1098 | decoded = ( | 1885 | decoded = ( |
1099 | str(key) + '=' | 1886 | str(key) + '=' |
1100 | + EntitySubstitution.quoted_attribute_value(text)) | 1887 | + formatter.quoted_attribute_value(text)) |
1101 | attrs.append(decoded) | 1888 | attrs.append(decoded) |
1102 | close = '' | 1889 | if attrs: |
1103 | closeTag = '' | 1890 | attribute_string = ' ' + ' '.join(attrs) |
1104 | |||
1105 | prefix = '' | ||
1106 | if self.prefix: | ||
1107 | prefix = self.prefix + ":" | ||
1108 | 1891 | ||
1892 | # Then an optional closing slash (for a void element in an | ||
1893 | # XML document). | ||
1894 | void_element_closing_slash = '' | ||
1109 | if self.is_empty_element: | 1895 | if self.is_empty_element: |
1110 | close = '/' | 1896 | void_element_closing_slash = formatter.void_element_close_prefix or '' |
1111 | else: | ||
1112 | closeTag = '</%s%s>' % (prefix, self.name) | ||
1113 | |||
1114 | pretty_print = self._should_pretty_print(indent_level) | ||
1115 | space = '' | ||
1116 | indent_space = '' | ||
1117 | if indent_level is not None: | ||
1118 | indent_space = (' ' * (indent_level - 1)) | ||
1119 | if pretty_print: | ||
1120 | space = indent_space | ||
1121 | indent_contents = indent_level + 1 | ||
1122 | else: | ||
1123 | indent_contents = None | ||
1124 | contents = self.decode_contents( | ||
1125 | indent_contents, eventual_encoding, formatter) | ||
1126 | 1897 | ||
1127 | if self.hidden: | 1898 | # Put it all together. |
1128 | # This is the 'document root' object. | 1899 | return '<' + closing_slash + prefix + self.name + attribute_string + void_element_closing_slash + '>' |
1129 | s = contents | 1900 | |
1130 | else: | 1901 | def _should_pretty_print(self, indent_level=1): |
1131 | s = [] | 1902 | """Should this tag be pretty-printed? |
1132 | attribute_string = '' | 1903 | |
1133 | if attrs: | 1904 | Most of them should, but some (such as <pre> in HTML |
1134 | attribute_string = ' ' + ' '.join(attrs) | 1905 | documents) should not. |
1135 | if indent_level is not None: | 1906 | """ |
1136 | # Even if this particular tag is not pretty-printed, | 1907 | return ( |
1137 | # we should indent up to the start of the tag. | 1908 | indent_level is not None |
1138 | s.append(indent_space) | 1909 | and ( |
1139 | s.append('<%s%s%s%s>' % ( | 1910 | not self.preserve_whitespace_tags |
1140 | prefix, self.name, attribute_string, close)) | 1911 | or self.name not in self.preserve_whitespace_tags |
1141 | if pretty_print: | 1912 | ) |
1142 | s.append("\n") | 1913 | ) |
1143 | s.append(contents) | ||
1144 | if pretty_print and contents and contents[-1] != "\n": | ||
1145 | s.append("\n") | ||
1146 | if pretty_print and closeTag: | ||
1147 | s.append(space) | ||
1148 | s.append(closeTag) | ||
1149 | if indent_level is not None and closeTag and self.next_sibling: | ||
1150 | # Even if this particular tag is not pretty-printed, | ||
1151 | # we're now done with the tag, and we should add a | ||
1152 | # newline if appropriate. | ||
1153 | s.append("\n") | ||
1154 | s = ''.join(s) | ||
1155 | return s | ||
1156 | 1914 | ||
1157 | def prettify(self, encoding=None, formatter="minimal"): | 1915 | def prettify(self, encoding=None, formatter="minimal"): |
1916 | """Pretty-print this PageElement as a string. | ||
1917 | |||
1918 | :param encoding: The eventual encoding of the string. If this is None, | ||
1919 | a Unicode string will be returned. | ||
1920 | :param formatter: A Formatter object, or a string naming one of | ||
1921 | the standard formatters. | ||
1922 | :return: A Unicode string (if encoding==None) or a bytestring | ||
1923 | (otherwise). | ||
1924 | """ | ||
1158 | if encoding is None: | 1925 | if encoding is None: |
1159 | return self.decode(True, formatter=formatter) | 1926 | return self.decode(True, formatter=formatter) |
1160 | else: | 1927 | else: |
@@ -1166,62 +1933,50 @@ class Tag(PageElement): | |||
1166 | """Renders the contents of this tag as a Unicode string. | 1933 | """Renders the contents of this tag as a Unicode string. |
1167 | 1934 | ||
1168 | :param indent_level: Each line of the rendering will be | 1935 | :param indent_level: Each line of the rendering will be |
1169 | indented this many spaces. | 1936 | indented this many levels. (The formatter decides what a |
1937 | 'level' means in terms of spaces or other characters | ||
1938 | output.) Used internally in recursive calls while | ||
1939 | pretty-printing. | ||
1170 | 1940 | ||
1171 | :param eventual_encoding: The tag is destined to be | 1941 | :param eventual_encoding: The tag is destined to be |
1172 | encoded into this encoding. This method is _not_ | 1942 | encoded into this encoding. decode_contents() is _not_ |
1173 | responsible for performing that encoding. This information | 1943 | responsible for performing that encoding. This information |
1174 | is passed in so that it can be substituted in if the | 1944 | is passed in so that it can be substituted in if the |
1175 | document contains a <META> tag that mentions the document's | 1945 | document contains a <META> tag that mentions the document's |
1176 | encoding. | 1946 | encoding. |
1177 | 1947 | ||
1178 | :param formatter: The output formatter responsible for converting | 1948 | :param formatter: A Formatter object, or a string naming one of |
1179 | entities to Unicode characters. | 1949 | the standard Formatters. |
1180 | """ | 1950 | |
1181 | # First off, turn a string formatter into a function. This | 1951 | """ |
1182 | # will stop the lookup from happening over and over again. | 1952 | return self.decode(indent_level, eventual_encoding, formatter, |
1183 | if not isinstance(formatter, collections.abc.Callable): | 1953 | iterator=self.descendants) |
1184 | formatter = self._formatter_for_name(formatter) | ||
1185 | |||
1186 | pretty_print = (indent_level is not None) | ||
1187 | s = [] | ||
1188 | for c in self: | ||
1189 | text = None | ||
1190 | if isinstance(c, NavigableString): | ||
1191 | text = c.output_ready(formatter) | ||
1192 | elif isinstance(c, Tag): | ||
1193 | s.append(c.decode(indent_level, eventual_encoding, | ||
1194 | formatter)) | ||
1195 | if text and indent_level and not self.name == 'pre': | ||
1196 | text = text.strip() | ||
1197 | if text: | ||
1198 | if pretty_print and not self.name == 'pre': | ||
1199 | s.append(" " * (indent_level - 1)) | ||
1200 | s.append(text) | ||
1201 | if pretty_print and not self.name == 'pre': | ||
1202 | s.append("\n") | ||
1203 | return ''.join(s) | ||
1204 | 1954 | ||
1205 | def encode_contents( | 1955 | def encode_contents( |
1206 | self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING, | 1956 | self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING, |
1207 | formatter="minimal"): | 1957 | formatter="minimal"): |
1208 | """Renders the contents of this tag as a bytestring. | 1958 | """Renders the contents of this PageElement as a bytestring. |
1209 | 1959 | ||
1210 | :param indent_level: Each line of the rendering will be | 1960 | :param indent_level: Each line of the rendering will be |
1211 | indented this many spaces. | 1961 | indented this many levels. (The formatter decides what a |
1962 | 'level' means in terms of spaces or other characters | ||
1963 | output.) Used internally in recursive calls while | ||
1964 | pretty-printing. | ||
1212 | 1965 | ||
1213 | :param eventual_encoding: The bytestring will be in this encoding. | 1966 | :param eventual_encoding: The bytestring will be in this encoding. |
1214 | 1967 | ||
1215 | :param formatter: The output formatter responsible for converting | 1968 | :param formatter: A Formatter object, or a string naming one of |
1216 | entities to Unicode characters. | 1969 | the standard Formatters. |
1217 | """ | ||
1218 | 1970 | ||
1971 | :return: A bytestring. | ||
1972 | """ | ||
1219 | contents = self.decode_contents(indent_level, encoding, formatter) | 1973 | contents = self.decode_contents(indent_level, encoding, formatter) |
1220 | return contents.encode(encoding) | 1974 | return contents.encode(encoding) |
1221 | 1975 | ||
1222 | # Old method for BS3 compatibility | 1976 | # Old method for BS3 compatibility |
1223 | def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, | 1977 | def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, |
1224 | prettyPrint=False, indentLevel=0): | 1978 | prettyPrint=False, indentLevel=0): |
1979 | """Deprecated method for BS3 compatibility.""" | ||
1225 | if not prettyPrint: | 1980 | if not prettyPrint: |
1226 | indentLevel = None | 1981 | indentLevel = None |
1227 | return self.encode_contents( | 1982 | return self.encode_contents( |
@@ -1229,44 +1984,88 @@ class Tag(PageElement): | |||
1229 | 1984 | ||
1230 | #Soup methods | 1985 | #Soup methods |
1231 | 1986 | ||
1232 | def find(self, name=None, attrs={}, recursive=True, text=None, | 1987 | def find(self, name=None, attrs={}, recursive=True, string=None, |
1233 | **kwargs): | 1988 | **kwargs): |
1234 | """Return only the first child of this Tag matching the given | 1989 | """Look in the children of this PageElement and find the first |
1235 | criteria.""" | 1990 | PageElement that matches the given criteria. |
1991 | |||
1992 | All find_* methods take a common set of arguments. See the online | ||
1993 | documentation for detailed explanations. | ||
1994 | |||
1995 | :param name: A filter on tag name. | ||
1996 | :param attrs: A dictionary of filters on attribute values. | ||
1997 | :param recursive: If this is True, find() will perform a | ||
1998 | recursive search of this PageElement's children. Otherwise, | ||
1999 | only the direct children will be considered. | ||
2000 | :param limit: Stop looking after finding this many results. | ||
2001 | :kwargs: A dictionary of filters on attribute values. | ||
2002 | :return: A PageElement. | ||
2003 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
2004 | """ | ||
1236 | r = None | 2005 | r = None |
1237 | l = self.find_all(name, attrs, recursive, text, 1, **kwargs) | 2006 | l = self.find_all(name, attrs, recursive, string, 1, _stacklevel=3, |
2007 | **kwargs) | ||
1238 | if l: | 2008 | if l: |
1239 | r = l[0] | 2009 | r = l[0] |
1240 | return r | 2010 | return r |
1241 | findChild = find | 2011 | findChild = find #BS2 |
1242 | 2012 | ||
1243 | def find_all(self, name=None, attrs={}, recursive=True, text=None, | 2013 | def find_all(self, name=None, attrs={}, recursive=True, string=None, |
1244 | limit=None, **kwargs): | 2014 | limit=None, **kwargs): |
1245 | """Extracts a list of Tag objects that match the given | 2015 | """Look in the children of this PageElement and find all |
1246 | criteria. You can specify the name of the Tag and any | 2016 | PageElements that match the given criteria. |
1247 | attributes you want the Tag to have. | 2017 | |
1248 | 2018 | All find_* methods take a common set of arguments. See the online | |
1249 | The value of a key-value pair in the 'attrs' map can be a | 2019 | documentation for detailed explanations. |
1250 | string, a list of strings, a regular expression object, or a | 2020 | |
1251 | callable that takes a string and returns whether or not the | 2021 | :param name: A filter on tag name. |
1252 | string matches for some custom definition of 'matches'. The | 2022 | :param attrs: A dictionary of filters on attribute values. |
1253 | same is true of the tag name.""" | 2023 | :param recursive: If this is True, find_all() will perform a |
1254 | 2024 | recursive search of this PageElement's children. Otherwise, | |
2025 | only the direct children will be considered. | ||
2026 | :param limit: Stop looking after finding this many results. | ||
2027 | :kwargs: A dictionary of filters on attribute values. | ||
2028 | :return: A ResultSet of PageElements. | ||
2029 | :rtype: bs4.element.ResultSet | ||
2030 | """ | ||
1255 | generator = self.descendants | 2031 | generator = self.descendants |
1256 | if not recursive: | 2032 | if not recursive: |
1257 | generator = self.children | 2033 | generator = self.children |
1258 | return self._find_all(name, attrs, text, limit, generator, **kwargs) | 2034 | _stacklevel = kwargs.pop('_stacklevel', 2) |
2035 | return self._find_all(name, attrs, string, limit, generator, | ||
2036 | _stacklevel=_stacklevel+1, **kwargs) | ||
1259 | findAll = find_all # BS3 | 2037 | findAll = find_all # BS3 |
1260 | findChildren = find_all # BS2 | 2038 | findChildren = find_all # BS2 |
1261 | 2039 | ||
1262 | #Generator methods | 2040 | #Generator methods |
1263 | @property | 2041 | @property |
1264 | def children(self): | 2042 | def children(self): |
2043 | """Iterate over all direct children of this PageElement. | ||
2044 | |||
2045 | :yield: A sequence of PageElements. | ||
2046 | """ | ||
1265 | # return iter() to make the purpose of the method clear | 2047 | # return iter() to make the purpose of the method clear |
1266 | return iter(self.contents) # XXX This seems to be untested. | 2048 | return iter(self.contents) # XXX This seems to be untested. |
1267 | 2049 | ||
1268 | @property | 2050 | @property |
2051 | def self_and_descendants(self): | ||
2052 | """Iterate over this PageElement and its children in a | ||
2053 | breadth-first sequence. | ||
2054 | |||
2055 | :yield: A sequence of PageElements. | ||
2056 | """ | ||
2057 | if not self.hidden: | ||
2058 | yield self | ||
2059 | for i in self.descendants: | ||
2060 | yield i | ||
2061 | |||
2062 | @property | ||
1269 | def descendants(self): | 2063 | def descendants(self): |
2064 | """Iterate over all children of this PageElement in a | ||
2065 | breadth-first sequence. | ||
2066 | |||
2067 | :yield: A sequence of PageElements. | ||
2068 | """ | ||
1270 | if not len(self.contents): | 2069 | if not len(self.contents): |
1271 | return | 2070 | return |
1272 | stopNode = self._last_descendant().next_element | 2071 | stopNode = self._last_descendant().next_element |
@@ -1276,262 +2075,102 @@ class Tag(PageElement): | |||
1276 | current = current.next_element | 2075 | current = current.next_element |
1277 | 2076 | ||
1278 | # CSS selector code | 2077 | # CSS selector code |
2078 | def select_one(self, selector, namespaces=None, **kwargs): | ||
2079 | """Perform a CSS selection operation on the current element. | ||
1279 | 2080 | ||
1280 | _selector_combinators = ['>', '+', '~'] | 2081 | :param selector: A CSS selector. |
1281 | _select_debug = False | ||
1282 | def select_one(self, selector): | ||
1283 | """Perform a CSS selection operation on the current element.""" | ||
1284 | value = self.select(selector, limit=1) | ||
1285 | if value: | ||
1286 | return value[0] | ||
1287 | return None | ||
1288 | 2082 | ||
1289 | def select(self, selector, _candidate_generator=None, limit=None): | 2083 | :param namespaces: A dictionary mapping namespace prefixes |
1290 | """Perform a CSS selection operation on the current element.""" | 2084 | used in the CSS selector to namespace URIs. By default, |
1291 | 2085 | Beautiful Soup will use the prefixes it encountered while | |
1292 | # Handle grouping selectors if ',' exists, ie: p,a | 2086 | parsing the document. |
1293 | if ',' in selector: | ||
1294 | context = [] | ||
1295 | for partial_selector in selector.split(','): | ||
1296 | partial_selector = partial_selector.strip() | ||
1297 | if partial_selector == '': | ||
1298 | raise ValueError('Invalid group selection syntax: %s' % selector) | ||
1299 | candidates = self.select(partial_selector, limit=limit) | ||
1300 | for candidate in candidates: | ||
1301 | if candidate not in context: | ||
1302 | context.append(candidate) | ||
1303 | |||
1304 | if limit and len(context) >= limit: | ||
1305 | break | ||
1306 | return context | ||
1307 | 2087 | ||
1308 | tokens = selector.split() | 2088 | :param kwargs: Keyword arguments to be passed into Soup Sieve's |
1309 | current_context = [self] | 2089 | soupsieve.select() method. |
1310 | 2090 | ||
1311 | if tokens[-1] in self._selector_combinators: | 2091 | :return: A Tag. |
1312 | raise ValueError( | 2092 | :rtype: bs4.element.Tag |
1313 | 'Final combinator "%s" is missing an argument.' % tokens[-1]) | 2093 | """ |
2094 | return self.css.select_one(selector, namespaces, **kwargs) | ||
1314 | 2095 | ||
1315 | if self._select_debug: | 2096 | def select(self, selector, namespaces=None, limit=None, **kwargs): |
1316 | print('Running CSS selector "%s"' % selector) | 2097 | """Perform a CSS selection operation on the current element. |
1317 | 2098 | ||
1318 | for index, token in enumerate(tokens): | 2099 | This uses the SoupSieve library. |
1319 | new_context = [] | ||
1320 | new_context_ids = set([]) | ||
1321 | 2100 | ||
1322 | if tokens[index-1] in self._selector_combinators: | 2101 | :param selector: A string containing a CSS selector. |
1323 | # This token was consumed by the previous combinator. Skip it. | ||
1324 | if self._select_debug: | ||
1325 | print(' Token was consumed by the previous combinator.') | ||
1326 | continue | ||
1327 | 2102 | ||
1328 | if self._select_debug: | 2103 | :param namespaces: A dictionary mapping namespace prefixes |
1329 | print(' Considering token "%s"' % token) | 2104 | used in the CSS selector to namespace URIs. By default, |
1330 | recursive_candidate_generator = None | 2105 | Beautiful Soup will use the prefixes it encountered while |
1331 | tag_name = None | 2106 | parsing the document. |
1332 | 2107 | ||
1333 | # Each operation corresponds to a checker function, a rule | 2108 | :param limit: After finding this number of results, stop looking. |
1334 | # for determining whether a candidate matches the | 2109 | |
1335 | # selector. Candidates are generated by the active | 2110 | :param kwargs: Keyword arguments to be passed into SoupSieve's |
1336 | # iterator. | 2111 | soupsieve.select() method. |
1337 | checker = None | 2112 | |
1338 | 2113 | :return: A ResultSet of Tags. | |
1339 | m = self.attribselect_re.match(token) | 2114 | :rtype: bs4.element.ResultSet |
1340 | if m is not None: | 2115 | """ |
1341 | # Attribute selector | 2116 | return self.css.select(selector, namespaces, limit, **kwargs) |
1342 | tag_name, attribute, operator, value = m.groups() | 2117 | |
1343 | checker = self._attribute_checker(operator, attribute, value) | 2118 | @property |
1344 | 2119 | def css(self): | |
1345 | elif '#' in token: | 2120 | """Return an interface to the CSS selector API.""" |
1346 | # ID selector | 2121 | return CSS(self) |
1347 | tag_name, tag_id = token.split('#', 1) | ||
1348 | def id_matches(tag): | ||
1349 | return tag.get('id', None) == tag_id | ||
1350 | checker = id_matches | ||
1351 | |||
1352 | elif '.' in token: | ||
1353 | # Class selector | ||
1354 | tag_name, klass = token.split('.', 1) | ||
1355 | classes = set(klass.split('.')) | ||
1356 | def classes_match(candidate): | ||
1357 | return classes.issubset(candidate.get('class', [])) | ||
1358 | checker = classes_match | ||
1359 | |||
1360 | elif ':' in token: | ||
1361 | # Pseudo-class | ||
1362 | tag_name, pseudo = token.split(':', 1) | ||
1363 | if tag_name == '': | ||
1364 | raise ValueError( | ||
1365 | "A pseudo-class must be prefixed with a tag name.") | ||
1366 | pseudo_attributes = re.match(r'([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo) | ||
1367 | found = [] | ||
1368 | if pseudo_attributes is None: | ||
1369 | pseudo_type = pseudo | ||
1370 | pseudo_value = None | ||
1371 | else: | ||
1372 | pseudo_type, pseudo_value = pseudo_attributes.groups() | ||
1373 | if pseudo_type == 'nth-of-type': | ||
1374 | try: | ||
1375 | pseudo_value = int(pseudo_value) | ||
1376 | except: | ||
1377 | raise NotImplementedError( | ||
1378 | 'Only numeric values are currently supported for the nth-of-type pseudo-class.') | ||
1379 | if pseudo_value < 1: | ||
1380 | raise ValueError( | ||
1381 | 'nth-of-type pseudo-class value must be at least 1.') | ||
1382 | class Counter(object): | ||
1383 | def __init__(self, destination): | ||
1384 | self.count = 0 | ||
1385 | self.destination = destination | ||
1386 | |||
1387 | def nth_child_of_type(self, tag): | ||
1388 | self.count += 1 | ||
1389 | if self.count == self.destination: | ||
1390 | return True | ||
1391 | if self.count > self.destination: | ||
1392 | # Stop the generator that's sending us | ||
1393 | # these things. | ||
1394 | raise StopIteration() | ||
1395 | return False | ||
1396 | checker = Counter(pseudo_value).nth_child_of_type | ||
1397 | else: | ||
1398 | raise NotImplementedError( | ||
1399 | 'Only the following pseudo-classes are implemented: nth-of-type.') | ||
1400 | |||
1401 | elif token == '*': | ||
1402 | # Star selector -- matches everything | ||
1403 | pass | ||
1404 | elif token == '>': | ||
1405 | # Run the next token as a CSS selector against the | ||
1406 | # direct children of each tag in the current context. | ||
1407 | recursive_candidate_generator = lambda tag: tag.children | ||
1408 | elif token == '~': | ||
1409 | # Run the next token as a CSS selector against the | ||
1410 | # siblings of each tag in the current context. | ||
1411 | recursive_candidate_generator = lambda tag: tag.next_siblings | ||
1412 | elif token == '+': | ||
1413 | # For each tag in the current context, run the next | ||
1414 | # token as a CSS selector against the tag's next | ||
1415 | # sibling that's a tag. | ||
1416 | def next_tag_sibling(tag): | ||
1417 | yield tag.find_next_sibling(True) | ||
1418 | recursive_candidate_generator = next_tag_sibling | ||
1419 | |||
1420 | elif self.tag_name_re.match(token): | ||
1421 | # Just a tag name. | ||
1422 | tag_name = token | ||
1423 | else: | ||
1424 | raise ValueError( | ||
1425 | 'Unsupported or invalid CSS selector: "%s"' % token) | ||
1426 | if recursive_candidate_generator: | ||
1427 | # This happens when the selector looks like "> foo". | ||
1428 | # | ||
1429 | # The generator calls select() recursively on every | ||
1430 | # member of the current context, passing in a different | ||
1431 | # candidate generator and a different selector. | ||
1432 | # | ||
1433 | # In the case of "> foo", the candidate generator is | ||
1434 | # one that yields a tag's direct children (">"), and | ||
1435 | # the selector is "foo". | ||
1436 | next_token = tokens[index+1] | ||
1437 | def recursive_select(tag): | ||
1438 | if self._select_debug: | ||
1439 | print(' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs)) | ||
1440 | print('-' * 40) | ||
1441 | for i in tag.select(next_token, recursive_candidate_generator): | ||
1442 | if self._select_debug: | ||
1443 | print('(Recursive select picked up candidate %s %s)' % (i.name, i.attrs)) | ||
1444 | yield i | ||
1445 | if self._select_debug: | ||
1446 | print('-' * 40) | ||
1447 | _use_candidate_generator = recursive_select | ||
1448 | elif _candidate_generator is None: | ||
1449 | # By default, a tag's candidates are all of its | ||
1450 | # children. If tag_name is defined, only yield tags | ||
1451 | # with that name. | ||
1452 | if self._select_debug: | ||
1453 | if tag_name: | ||
1454 | check = "[any]" | ||
1455 | else: | ||
1456 | check = tag_name | ||
1457 | print(' Default candidate generator, tag name="%s"' % check) | ||
1458 | if self._select_debug: | ||
1459 | # This is redundant with later code, but it stops | ||
1460 | # a bunch of bogus tags from cluttering up the | ||
1461 | # debug log. | ||
1462 | def default_candidate_generator(tag): | ||
1463 | for child in tag.descendants: | ||
1464 | if not isinstance(child, Tag): | ||
1465 | continue | ||
1466 | if tag_name and not child.name == tag_name: | ||
1467 | continue | ||
1468 | yield child | ||
1469 | _use_candidate_generator = default_candidate_generator | ||
1470 | else: | ||
1471 | _use_candidate_generator = lambda tag: tag.descendants | ||
1472 | else: | ||
1473 | _use_candidate_generator = _candidate_generator | ||
1474 | |||
1475 | count = 0 | ||
1476 | for tag in current_context: | ||
1477 | if self._select_debug: | ||
1478 | print(" Running candidate generator on %s %s" % ( | ||
1479 | tag.name, repr(tag.attrs))) | ||
1480 | for candidate in _use_candidate_generator(tag): | ||
1481 | if not isinstance(candidate, Tag): | ||
1482 | continue | ||
1483 | if tag_name and candidate.name != tag_name: | ||
1484 | continue | ||
1485 | if checker is not None: | ||
1486 | try: | ||
1487 | result = checker(candidate) | ||
1488 | except StopIteration: | ||
1489 | # The checker has decided we should no longer | ||
1490 | # run the generator. | ||
1491 | break | ||
1492 | if checker is None or result: | ||
1493 | if self._select_debug: | ||
1494 | print(" SUCCESS %s %s" % (candidate.name, repr(candidate.attrs))) | ||
1495 | if id(candidate) not in new_context_ids: | ||
1496 | # If a tag matches a selector more than once, | ||
1497 | # don't include it in the context more than once. | ||
1498 | new_context.append(candidate) | ||
1499 | new_context_ids.add(id(candidate)) | ||
1500 | if limit and len(new_context) >= limit: | ||
1501 | break | ||
1502 | elif self._select_debug: | ||
1503 | print(" FAILURE %s %s" % (candidate.name, repr(candidate.attrs))) | ||
1504 | |||
1505 | |||
1506 | current_context = new_context | ||
1507 | |||
1508 | if self._select_debug: | ||
1509 | print("Final verdict:") | ||
1510 | for i in current_context: | ||
1511 | print(" %s %s" % (i.name, i.attrs)) | ||
1512 | return current_context | ||
1513 | 2122 | ||
1514 | # Old names for backwards compatibility | 2123 | # Old names for backwards compatibility |
1515 | def childGenerator(self): | 2124 | def childGenerator(self): |
2125 | """Deprecated generator.""" | ||
1516 | return self.children | 2126 | return self.children |
1517 | 2127 | ||
1518 | def recursiveChildGenerator(self): | 2128 | def recursiveChildGenerator(self): |
2129 | """Deprecated generator.""" | ||
1519 | return self.descendants | 2130 | return self.descendants |
1520 | 2131 | ||
1521 | def has_key(self, key): | 2132 | def has_key(self, key): |
1522 | """This was kind of misleading because has_key() (attributes) | 2133 | """Deprecated method. This was kind of misleading because has_key() |
1523 | was different from __in__ (contents). has_key() is gone in | 2134 | (attributes) was different from __in__ (contents). |
1524 | Python 3, anyway.""" | 2135 | |
1525 | warnings.warn('has_key is deprecated. Use has_attr("%s") instead.' % ( | 2136 | has_key() is gone in Python 3, anyway. |
1526 | key)) | 2137 | """ |
2138 | warnings.warn( | ||
2139 | 'has_key is deprecated. Use has_attr(key) instead.', | ||
2140 | DeprecationWarning, stacklevel=2 | ||
2141 | ) | ||
1527 | return self.has_attr(key) | 2142 | return self.has_attr(key) |
1528 | 2143 | ||
1529 | # Next, a couple classes to represent queries and their results. | 2144 | # Next, a couple classes to represent queries and their results. |
1530 | class SoupStrainer(object): | 2145 | class SoupStrainer(object): |
1531 | """Encapsulates a number of ways of matching a markup element (tag or | 2146 | """Encapsulates a number of ways of matching a markup element (tag or |
1532 | text).""" | 2147 | string). |
2148 | |||
2149 | This is primarily used to underpin the find_* methods, but you can | ||
2150 | create one yourself and pass it in as `parse_only` to the | ||
2151 | `BeautifulSoup` constructor, to parse a subset of a large | ||
2152 | document. | ||
2153 | """ | ||
2154 | |||
2155 | def __init__(self, name=None, attrs={}, string=None, **kwargs): | ||
2156 | """Constructor. | ||
2157 | |||
2158 | The SoupStrainer constructor takes the same arguments passed | ||
2159 | into the find_* methods. See the online documentation for | ||
2160 | detailed explanations. | ||
2161 | |||
2162 | :param name: A filter on tag name. | ||
2163 | :param attrs: A dictionary of filters on attribute values. | ||
2164 | :param string: A filter for a NavigableString with specific text. | ||
2165 | :kwargs: A dictionary of filters on attribute values. | ||
2166 | """ | ||
2167 | if string is None and 'text' in kwargs: | ||
2168 | string = kwargs.pop('text') | ||
2169 | warnings.warn( | ||
2170 | "The 'text' argument to the SoupStrainer constructor is deprecated. Use 'string' instead.", | ||
2171 | DeprecationWarning, stacklevel=2 | ||
2172 | ) | ||
1533 | 2173 | ||
1534 | def __init__(self, name=None, attrs={}, text=None, **kwargs): | ||
1535 | self.name = self._normalize_search_value(name) | 2174 | self.name = self._normalize_search_value(name) |
1536 | if not isinstance(attrs, dict): | 2175 | if not isinstance(attrs, dict): |
1537 | # Treat a non-dict value for attrs as a search for the 'class' | 2176 | # Treat a non-dict value for attrs as a search for the 'class' |
@@ -1556,12 +2195,15 @@ class SoupStrainer(object): | |||
1556 | normalized_attrs[key] = self._normalize_search_value(value) | 2195 | normalized_attrs[key] = self._normalize_search_value(value) |
1557 | 2196 | ||
1558 | self.attrs = normalized_attrs | 2197 | self.attrs = normalized_attrs |
1559 | self.text = self._normalize_search_value(text) | 2198 | self.string = self._normalize_search_value(string) |
2199 | |||
2200 | # DEPRECATED but just in case someone is checking this. | ||
2201 | self.text = self.string | ||
1560 | 2202 | ||
1561 | def _normalize_search_value(self, value): | 2203 | def _normalize_search_value(self, value): |
1562 | # Leave it alone if it's a Unicode string, a callable, a | 2204 | # Leave it alone if it's a Unicode string, a callable, a |
1563 | # regular expression, a boolean, or None. | 2205 | # regular expression, a boolean, or None. |
1564 | if (isinstance(value, str) or isinstance(value, collections.abc.Callable) or hasattr(value, 'match') | 2206 | if (isinstance(value, str) or isinstance(value, Callable) or hasattr(value, 'match') |
1565 | or isinstance(value, bool) or value is None): | 2207 | or isinstance(value, bool) or value is None): |
1566 | return value | 2208 | return value |
1567 | 2209 | ||
@@ -1589,19 +2231,40 @@ class SoupStrainer(object): | |||
1589 | return str(str(value)) | 2231 | return str(str(value)) |
1590 | 2232 | ||
1591 | def __str__(self): | 2233 | def __str__(self): |
1592 | if self.text: | 2234 | """A human-readable representation of this SoupStrainer.""" |
1593 | return self.text | 2235 | if self.string: |
2236 | return self.string | ||
1594 | else: | 2237 | else: |
1595 | return "%s|%s" % (self.name, self.attrs) | 2238 | return "%s|%s" % (self.name, self.attrs) |
1596 | 2239 | ||
1597 | def search_tag(self, markup_name=None, markup_attrs={}): | 2240 | def search_tag(self, markup_name=None, markup_attrs={}): |
2241 | """Check whether a Tag with the given name and attributes would | ||
2242 | match this SoupStrainer. | ||
2243 | |||
2244 | Used prospectively to decide whether to even bother creating a Tag | ||
2245 | object. | ||
2246 | |||
2247 | :param markup_name: A tag name as found in some markup. | ||
2248 | :param markup_attrs: A dictionary of attributes as found in some markup. | ||
2249 | |||
2250 | :return: True if the prospective tag would match this SoupStrainer; | ||
2251 | False otherwise. | ||
2252 | """ | ||
1598 | found = None | 2253 | found = None |
1599 | markup = None | 2254 | markup = None |
1600 | if isinstance(markup_name, Tag): | 2255 | if isinstance(markup_name, Tag): |
1601 | markup = markup_name | 2256 | markup = markup_name |
1602 | markup_attrs = markup | 2257 | markup_attrs = markup |
2258 | |||
2259 | if isinstance(self.name, str): | ||
2260 | # Optimization for a very common case where the user is | ||
2261 | # searching for a tag with one specific name, and we're | ||
2262 | # looking at a tag with a different name. | ||
2263 | if markup and not markup.prefix and self.name != markup.name: | ||
2264 | return False | ||
2265 | |||
1603 | call_function_with_tag_data = ( | 2266 | call_function_with_tag_data = ( |
1604 | isinstance(self.name, collections.abc.Callable) | 2267 | isinstance(self.name, Callable) |
1605 | and not isinstance(markup_name, Tag)) | 2268 | and not isinstance(markup_name, Tag)) |
1606 | 2269 | ||
1607 | if ((not self.name) | 2270 | if ((not self.name) |
@@ -1630,13 +2293,22 @@ class SoupStrainer(object): | |||
1630 | found = markup | 2293 | found = markup |
1631 | else: | 2294 | else: |
1632 | found = markup_name | 2295 | found = markup_name |
1633 | if found and self.text and not self._matches(found.string, self.text): | 2296 | if found and self.string and not self._matches(found.string, self.string): |
1634 | found = None | 2297 | found = None |
1635 | return found | 2298 | return found |
2299 | |||
2300 | # For BS3 compatibility. | ||
1636 | searchTag = search_tag | 2301 | searchTag = search_tag |
1637 | 2302 | ||
1638 | def search(self, markup): | 2303 | def search(self, markup): |
1639 | # print 'looking for %s in %s' % (self, markup) | 2304 | """Find all items in `markup` that match this SoupStrainer. |
2305 | |||
2306 | Used by the core _find_all() method, which is ultimately | ||
2307 | called by all find_* methods. | ||
2308 | |||
2309 | :param markup: A PageElement or a list of them. | ||
2310 | """ | ||
2311 | # print('looking for %s in %s' % (self, markup)) | ||
1640 | found = None | 2312 | found = None |
1641 | # If given a list of items, scan it for a text element that | 2313 | # If given a list of items, scan it for a text element that |
1642 | # matches. | 2314 | # matches. |
@@ -1649,49 +2321,44 @@ class SoupStrainer(object): | |||
1649 | # If it's a Tag, make sure its name or attributes match. | 2321 | # If it's a Tag, make sure its name or attributes match. |
1650 | # Don't bother with Tags if we're searching for text. | 2322 | # Don't bother with Tags if we're searching for text. |
1651 | elif isinstance(markup, Tag): | 2323 | elif isinstance(markup, Tag): |
1652 | if not self.text or self.name or self.attrs: | 2324 | if not self.string or self.name or self.attrs: |
1653 | found = self.search_tag(markup) | 2325 | found = self.search_tag(markup) |
1654 | # If it's text, make sure the text matches. | 2326 | # If it's text, make sure the text matches. |
1655 | elif isinstance(markup, NavigableString) or \ | 2327 | elif isinstance(markup, NavigableString) or \ |
1656 | isinstance(markup, str): | 2328 | isinstance(markup, str): |
1657 | if not self.name and not self.attrs and self._matches(markup, self.text): | 2329 | if not self.name and not self.attrs and self._matches(markup, self.string): |
1658 | found = markup | 2330 | found = markup |
1659 | else: | 2331 | else: |
1660 | raise Exception( | 2332 | raise Exception( |
1661 | "I don't know how to match against a %s" % markup.__class__) | 2333 | "I don't know how to match against a %s" % markup.__class__) |
1662 | return found | 2334 | return found |
1663 | 2335 | ||
1664 | def _matches(self, markup, match_against): | 2336 | def _matches(self, markup, match_against, already_tried=None): |
1665 | # print u"Matching %s against %s" % (markup, match_against) | 2337 | # print(u"Matching %s against %s" % (markup, match_against)) |
1666 | result = False | 2338 | result = False |
1667 | if isinstance(markup, list) or isinstance(markup, tuple): | 2339 | if isinstance(markup, list) or isinstance(markup, tuple): |
1668 | # This should only happen when searching a multi-valued attribute | 2340 | # This should only happen when searching a multi-valued attribute |
1669 | # like 'class'. | 2341 | # like 'class'. |
1670 | if (isinstance(match_against, str) | 2342 | for item in markup: |
1671 | and ' ' in match_against): | 2343 | if self._matches(item, match_against): |
1672 | # A bit of a special case. If they try to match "foo | 2344 | return True |
1673 | # bar" on a multivalue attribute's value, only accept | 2345 | # We didn't match any particular value of the multivalue |
1674 | # the literal value "foo bar" | 2346 | # attribute, but maybe we match the attribute value when |
1675 | # | 2347 | # considered as a string. |
1676 | # XXX This is going to be pretty slow because we keep | 2348 | if self._matches(' '.join(markup), match_against): |
1677 | # splitting match_against. But it shouldn't come up | 2349 | return True |
1678 | # too often. | 2350 | return False |
1679 | return (whitespace_re.split(match_against) == markup) | ||
1680 | else: | ||
1681 | for item in markup: | ||
1682 | if self._matches(item, match_against): | ||
1683 | return True | ||
1684 | return False | ||
1685 | 2351 | ||
1686 | if match_against is True: | 2352 | if match_against is True: |
1687 | # True matches any non-None value. | 2353 | # True matches any non-None value. |
1688 | return markup is not None | 2354 | return markup is not None |
1689 | 2355 | ||
1690 | if isinstance(match_against, collections.abc.Callable): | 2356 | if isinstance(match_against, Callable): |
1691 | return match_against(markup) | 2357 | return match_against(markup) |
1692 | 2358 | ||
1693 | # Custom callables take the tag as an argument, but all | 2359 | # Custom callables take the tag as an argument, but all |
1694 | # other ways of matching match the tag name as a string. | 2360 | # other ways of matching match the tag name as a string. |
2361 | original_markup = markup | ||
1695 | if isinstance(markup, Tag): | 2362 | if isinstance(markup, Tag): |
1696 | markup = markup.name | 2363 | markup = markup.name |
1697 | 2364 | ||
@@ -1702,23 +2369,67 @@ class SoupStrainer(object): | |||
1702 | # None matches None, False, an empty string, an empty list, and so on. | 2369 | # None matches None, False, an empty string, an empty list, and so on. |
1703 | return not match_against | 2370 | return not match_against |
1704 | 2371 | ||
1705 | if isinstance(match_against, str): | 2372 | if (hasattr(match_against, '__iter__') |
2373 | and not isinstance(match_against, str)): | ||
2374 | # We're asked to match against an iterable of items. | ||
2375 | # The markup must be match at least one item in the | ||
2376 | # iterable. We'll try each one in turn. | ||
2377 | # | ||
2378 | # To avoid infinite recursion we need to keep track of | ||
2379 | # items we've already seen. | ||
2380 | if not already_tried: | ||
2381 | already_tried = set() | ||
2382 | for item in match_against: | ||
2383 | if item.__hash__: | ||
2384 | key = item | ||
2385 | else: | ||
2386 | key = id(item) | ||
2387 | if key in already_tried: | ||
2388 | continue | ||
2389 | else: | ||
2390 | already_tried.add(key) | ||
2391 | if self._matches(original_markup, item, already_tried): | ||
2392 | return True | ||
2393 | else: | ||
2394 | return False | ||
2395 | |||
2396 | # Beyond this point we might need to run the test twice: once against | ||
2397 | # the tag's name and once against its prefixed name. | ||
2398 | match = False | ||
2399 | |||
2400 | if not match and isinstance(match_against, str): | ||
1706 | # Exact string match | 2401 | # Exact string match |
1707 | return markup == match_against | 2402 | match = markup == match_against |
1708 | 2403 | ||
1709 | if hasattr(match_against, 'match'): | 2404 | if not match and hasattr(match_against, 'search'): |
1710 | # Regexp match | 2405 | # Regexp match |
1711 | return match_against.search(markup) | 2406 | return match_against.search(markup) |
1712 | 2407 | ||
1713 | if hasattr(match_against, '__iter__'): | 2408 | if (not match |
1714 | # The markup must be an exact match against something | 2409 | and isinstance(original_markup, Tag) |
1715 | # in the iterable. | 2410 | and original_markup.prefix): |
1716 | return markup in match_against | 2411 | # Try the whole thing again with the prefixed tag name. |
2412 | return self._matches( | ||
2413 | original_markup.prefix + ':' + original_markup.name, match_against | ||
2414 | ) | ||
2415 | |||
2416 | return match | ||
1717 | 2417 | ||
1718 | 2418 | ||
1719 | class ResultSet(list): | 2419 | class ResultSet(list): |
1720 | """A ResultSet is just a list that keeps track of the SoupStrainer | 2420 | """A ResultSet is just a list that keeps track of the SoupStrainer |
1721 | that created it.""" | 2421 | that created it.""" |
1722 | def __init__(self, source, result=()): | 2422 | def __init__(self, source, result=()): |
2423 | """Constructor. | ||
2424 | |||
2425 | :param source: A SoupStrainer. | ||
2426 | :param result: A list of PageElements. | ||
2427 | """ | ||
1723 | super(ResultSet, self).__init__(result) | 2428 | super(ResultSet, self).__init__(result) |
1724 | self.source = source | 2429 | self.source = source |
2430 | |||
2431 | def __getattr__(self, key): | ||
2432 | """Raise a helpful exception to explain a common code fix.""" | ||
2433 | raise AttributeError( | ||
2434 | "ResultSet object has no attribute '%s'. You're probably treating a list of elements like a single element. Did you call find_all() when you meant to call find()?" % key | ||
2435 | ) | ||
diff --git a/bitbake/lib/bs4/formatter.py b/bitbake/lib/bs4/formatter.py new file mode 100644 index 0000000000..9fa1b57cb6 --- /dev/null +++ b/bitbake/lib/bs4/formatter.py | |||
@@ -0,0 +1,185 @@ | |||
1 | from bs4.dammit import EntitySubstitution | ||
2 | |||
3 | class Formatter(EntitySubstitution): | ||
4 | """Describes a strategy to use when outputting a parse tree to a string. | ||
5 | |||
6 | Some parts of this strategy come from the distinction between | ||
7 | HTML4, HTML5, and XML. Others are configurable by the user. | ||
8 | |||
9 | Formatters are passed in as the `formatter` argument to methods | ||
10 | like `PageElement.encode`. Most people won't need to think about | ||
11 | formatters, and most people who need to think about them can pass | ||
12 | in one of these predefined strings as `formatter` rather than | ||
13 | making a new Formatter object: | ||
14 | |||
15 | For HTML documents: | ||
16 | * 'html' - HTML entity substitution for generic HTML documents. (default) | ||
17 | * 'html5' - HTML entity substitution for HTML5 documents, as | ||
18 | well as some optimizations in the way tags are rendered. | ||
19 | * 'minimal' - Only make the substitutions necessary to guarantee | ||
20 | valid HTML. | ||
21 | * None - Do not perform any substitution. This will be faster | ||
22 | but may result in invalid markup. | ||
23 | |||
24 | For XML documents: | ||
25 | * 'html' - Entity substitution for XHTML documents. | ||
26 | * 'minimal' - Only make the substitutions necessary to guarantee | ||
27 | valid XML. (default) | ||
28 | * None - Do not perform any substitution. This will be faster | ||
29 | but may result in invalid markup. | ||
30 | """ | ||
31 | # Registries of XML and HTML formatters. | ||
32 | XML_FORMATTERS = {} | ||
33 | HTML_FORMATTERS = {} | ||
34 | |||
35 | HTML = 'html' | ||
36 | XML = 'xml' | ||
37 | |||
38 | HTML_DEFAULTS = dict( | ||
39 | cdata_containing_tags=set(["script", "style"]), | ||
40 | ) | ||
41 | |||
42 | def _default(self, language, value, kwarg): | ||
43 | if value is not None: | ||
44 | return value | ||
45 | if language == self.XML: | ||
46 | return set() | ||
47 | return self.HTML_DEFAULTS[kwarg] | ||
48 | |||
49 | def __init__( | ||
50 | self, language=None, entity_substitution=None, | ||
51 | void_element_close_prefix='/', cdata_containing_tags=None, | ||
52 | empty_attributes_are_booleans=False, indent=1, | ||
53 | ): | ||
54 | r"""Constructor. | ||
55 | |||
56 | :param language: This should be Formatter.XML if you are formatting | ||
57 | XML markup and Formatter.HTML if you are formatting HTML markup. | ||
58 | |||
59 | :param entity_substitution: A function to call to replace special | ||
60 | characters with XML/HTML entities. For examples, see | ||
61 | bs4.dammit.EntitySubstitution.substitute_html and substitute_xml. | ||
62 | :param void_element_close_prefix: By default, void elements | ||
63 | are represented as <tag/> (XML rules) rather than <tag> | ||
64 | (HTML rules). To get <tag>, pass in the empty string. | ||
65 | :param cdata_containing_tags: The list of tags that are defined | ||
66 | as containing CDATA in this dialect. For example, in HTML, | ||
67 | <script> and <style> tags are defined as containing CDATA, | ||
68 | and their contents should not be formatted. | ||
69 | :param blank_attributes_are_booleans: Render attributes whose value | ||
70 | is the empty string as HTML-style boolean attributes. | ||
71 | (Attributes whose value is None are always rendered this way.) | ||
72 | |||
73 | :param indent: If indent is a non-negative integer or string, | ||
74 | then the contents of elements will be indented | ||
75 | appropriately when pretty-printing. An indent level of 0, | ||
76 | negative, or "" will only insert newlines. Using a | ||
77 | positive integer indent indents that many spaces per | ||
78 | level. If indent is a string (such as "\t"), that string | ||
79 | is used to indent each level. The default behavior is to | ||
80 | indent one space per level. | ||
81 | """ | ||
82 | self.language = language | ||
83 | self.entity_substitution = entity_substitution | ||
84 | self.void_element_close_prefix = void_element_close_prefix | ||
85 | self.cdata_containing_tags = self._default( | ||
86 | language, cdata_containing_tags, 'cdata_containing_tags' | ||
87 | ) | ||
88 | self.empty_attributes_are_booleans=empty_attributes_are_booleans | ||
89 | if indent is None: | ||
90 | indent = 0 | ||
91 | if isinstance(indent, int): | ||
92 | if indent < 0: | ||
93 | indent = 0 | ||
94 | indent = ' ' * indent | ||
95 | elif isinstance(indent, str): | ||
96 | indent = indent | ||
97 | else: | ||
98 | indent = ' ' | ||
99 | self.indent = indent | ||
100 | |||
101 | def substitute(self, ns): | ||
102 | """Process a string that needs to undergo entity substitution. | ||
103 | This may be a string encountered in an attribute value or as | ||
104 | text. | ||
105 | |||
106 | :param ns: A string. | ||
107 | :return: A string with certain characters replaced by named | ||
108 | or numeric entities. | ||
109 | """ | ||
110 | if not self.entity_substitution: | ||
111 | return ns | ||
112 | from .element import NavigableString | ||
113 | if (isinstance(ns, NavigableString) | ||
114 | and ns.parent is not None | ||
115 | and ns.parent.name in self.cdata_containing_tags): | ||
116 | # Do nothing. | ||
117 | return ns | ||
118 | # Substitute. | ||
119 | return self.entity_substitution(ns) | ||
120 | |||
121 | def attribute_value(self, value): | ||
122 | """Process the value of an attribute. | ||
123 | |||
124 | :param ns: A string. | ||
125 | :return: A string with certain characters replaced by named | ||
126 | or numeric entities. | ||
127 | """ | ||
128 | return self.substitute(value) | ||
129 | |||
130 | def attributes(self, tag): | ||
131 | """Reorder a tag's attributes however you want. | ||
132 | |||
133 | By default, attributes are sorted alphabetically. This makes | ||
134 | behavior consistent between Python 2 and Python 3, and preserves | ||
135 | backwards compatibility with older versions of Beautiful Soup. | ||
136 | |||
137 | If `empty_boolean_attributes` is True, then attributes whose | ||
138 | values are set to the empty string will be treated as boolean | ||
139 | attributes. | ||
140 | """ | ||
141 | if tag.attrs is None: | ||
142 | return [] | ||
143 | return sorted( | ||
144 | (k, (None if self.empty_attributes_are_booleans and v == '' else v)) | ||
145 | for k, v in list(tag.attrs.items()) | ||
146 | ) | ||
147 | |||
148 | class HTMLFormatter(Formatter): | ||
149 | """A generic Formatter for HTML.""" | ||
150 | REGISTRY = {} | ||
151 | def __init__(self, *args, **kwargs): | ||
152 | super(HTMLFormatter, self).__init__(self.HTML, *args, **kwargs) | ||
153 | |||
154 | |||
155 | class XMLFormatter(Formatter): | ||
156 | """A generic Formatter for XML.""" | ||
157 | REGISTRY = {} | ||
158 | def __init__(self, *args, **kwargs): | ||
159 | super(XMLFormatter, self).__init__(self.XML, *args, **kwargs) | ||
160 | |||
161 | |||
162 | # Set up aliases for the default formatters. | ||
163 | HTMLFormatter.REGISTRY['html'] = HTMLFormatter( | ||
164 | entity_substitution=EntitySubstitution.substitute_html | ||
165 | ) | ||
166 | HTMLFormatter.REGISTRY["html5"] = HTMLFormatter( | ||
167 | entity_substitution=EntitySubstitution.substitute_html, | ||
168 | void_element_close_prefix=None, | ||
169 | empty_attributes_are_booleans=True, | ||
170 | ) | ||
171 | HTMLFormatter.REGISTRY["minimal"] = HTMLFormatter( | ||
172 | entity_substitution=EntitySubstitution.substitute_xml | ||
173 | ) | ||
174 | HTMLFormatter.REGISTRY[None] = HTMLFormatter( | ||
175 | entity_substitution=None | ||
176 | ) | ||
177 | XMLFormatter.REGISTRY["html"] = XMLFormatter( | ||
178 | entity_substitution=EntitySubstitution.substitute_html | ||
179 | ) | ||
180 | XMLFormatter.REGISTRY["minimal"] = XMLFormatter( | ||
181 | entity_substitution=EntitySubstitution.substitute_xml | ||
182 | ) | ||
183 | XMLFormatter.REGISTRY[None] = Formatter( | ||
184 | Formatter(Formatter.XML, entity_substitution=None) | ||
185 | ) | ||
diff --git a/bitbake/lib/bs4/testing.py b/bitbake/lib/bs4/testing.py deleted file mode 100644 index 6584ecf303..0000000000 --- a/bitbake/lib/bs4/testing.py +++ /dev/null | |||
@@ -1,686 +0,0 @@ | |||
1 | """Helper classes for tests.""" | ||
2 | |||
3 | __license__ = "MIT" | ||
4 | |||
5 | import pickle | ||
6 | import copy | ||
7 | import unittest | ||
8 | from unittest import TestCase | ||
9 | from bs4 import BeautifulSoup | ||
10 | from bs4.element import ( | ||
11 | CharsetMetaAttributeValue, | ||
12 | Comment, | ||
13 | ContentMetaAttributeValue, | ||
14 | Doctype, | ||
15 | SoupStrainer, | ||
16 | ) | ||
17 | |||
18 | from bs4.builder._htmlparser import HTMLParserTreeBuilder | ||
19 | default_builder = HTMLParserTreeBuilder | ||
20 | |||
21 | |||
22 | class SoupTest(unittest.TestCase): | ||
23 | |||
24 | @property | ||
25 | def default_builder(self): | ||
26 | return default_builder() | ||
27 | |||
28 | def soup(self, markup, **kwargs): | ||
29 | """Build a Beautiful Soup object from markup.""" | ||
30 | builder = kwargs.pop('builder', self.default_builder) | ||
31 | return BeautifulSoup(markup, builder=builder, **kwargs) | ||
32 | |||
33 | def document_for(self, markup): | ||
34 | """Turn an HTML fragment into a document. | ||
35 | |||
36 | The details depend on the builder. | ||
37 | """ | ||
38 | return self.default_builder.test_fragment_to_document(markup) | ||
39 | |||
40 | def assertSoupEquals(self, to_parse, compare_parsed_to=None): | ||
41 | builder = self.default_builder | ||
42 | obj = BeautifulSoup(to_parse, builder=builder) | ||
43 | if compare_parsed_to is None: | ||
44 | compare_parsed_to = to_parse | ||
45 | |||
46 | self.assertEqual(obj.decode(), self.document_for(compare_parsed_to)) | ||
47 | |||
48 | def assertConnectedness(self, element): | ||
49 | """Ensure that next_element and previous_element are properly | ||
50 | set for all descendants of the given element. | ||
51 | """ | ||
52 | earlier = None | ||
53 | for e in element.descendants: | ||
54 | if earlier: | ||
55 | self.assertEqual(e, earlier.next_element) | ||
56 | self.assertEqual(earlier, e.previous_element) | ||
57 | earlier = e | ||
58 | |||
59 | class HTMLTreeBuilderSmokeTest(SoupTest): | ||
60 | |||
61 | """A basic test of a treebuilder's competence. | ||
62 | |||
63 | Any HTML treebuilder, present or future, should be able to pass | ||
64 | these tests. With invalid markup, there's room for interpretation, | ||
65 | and different parsers can handle it differently. But with the | ||
66 | markup in these tests, there's not much room for interpretation. | ||
67 | """ | ||
68 | |||
69 | def test_pickle_and_unpickle_identity(self): | ||
70 | # Pickling a tree, then unpickling it, yields a tree identical | ||
71 | # to the original. | ||
72 | tree = self.soup("<a><b>foo</a>") | ||
73 | dumped = pickle.dumps(tree, 2) | ||
74 | loaded = pickle.loads(dumped) | ||
75 | self.assertEqual(loaded.__class__, BeautifulSoup) | ||
76 | self.assertEqual(loaded.decode(), tree.decode()) | ||
77 | |||
78 | def assertDoctypeHandled(self, doctype_fragment): | ||
79 | """Assert that a given doctype string is handled correctly.""" | ||
80 | doctype_str, soup = self._document_with_doctype(doctype_fragment) | ||
81 | |||
82 | # Make sure a Doctype object was created. | ||
83 | doctype = soup.contents[0] | ||
84 | self.assertEqual(doctype.__class__, Doctype) | ||
85 | self.assertEqual(doctype, doctype_fragment) | ||
86 | self.assertEqual(str(soup)[:len(doctype_str)], doctype_str) | ||
87 | |||
88 | # Make sure that the doctype was correctly associated with the | ||
89 | # parse tree and that the rest of the document parsed. | ||
90 | self.assertEqual(soup.p.contents[0], 'foo') | ||
91 | |||
92 | def _document_with_doctype(self, doctype_fragment): | ||
93 | """Generate and parse a document with the given doctype.""" | ||
94 | doctype = '<!DOCTYPE %s>' % doctype_fragment | ||
95 | markup = doctype + '\n<p>foo</p>' | ||
96 | soup = self.soup(markup) | ||
97 | return doctype, soup | ||
98 | |||
99 | def test_normal_doctypes(self): | ||
100 | """Make sure normal, everyday HTML doctypes are handled correctly.""" | ||
101 | self.assertDoctypeHandled("html") | ||
102 | self.assertDoctypeHandled( | ||
103 | 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"') | ||
104 | |||
105 | def test_empty_doctype(self): | ||
106 | soup = self.soup("<!DOCTYPE>") | ||
107 | doctype = soup.contents[0] | ||
108 | self.assertEqual("", doctype.strip()) | ||
109 | |||
110 | def test_public_doctype_with_url(self): | ||
111 | doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"' | ||
112 | self.assertDoctypeHandled(doctype) | ||
113 | |||
114 | def test_system_doctype(self): | ||
115 | self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"') | ||
116 | |||
117 | def test_namespaced_system_doctype(self): | ||
118 | # We can handle a namespaced doctype with a system ID. | ||
119 | self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"') | ||
120 | |||
121 | def test_namespaced_public_doctype(self): | ||
122 | # Test a namespaced doctype with a public id. | ||
123 | self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"') | ||
124 | |||
125 | def test_real_xhtml_document(self): | ||
126 | """A real XHTML document should come out more or less the same as it went in.""" | ||
127 | markup = b"""<?xml version="1.0" encoding="utf-8"?> | ||
128 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"> | ||
129 | <html xmlns="http://www.w3.org/1999/xhtml"> | ||
130 | <head><title>Hello.</title></head> | ||
131 | <body>Goodbye.</body> | ||
132 | </html>""" | ||
133 | soup = self.soup(markup) | ||
134 | self.assertEqual( | ||
135 | soup.encode("utf-8").replace(b"\n", b""), | ||
136 | markup.replace(b"\n", b"")) | ||
137 | |||
138 | def test_processing_instruction(self): | ||
139 | markup = b"""<?PITarget PIContent?>""" | ||
140 | soup = self.soup(markup) | ||
141 | self.assertEqual(markup, soup.encode("utf8")) | ||
142 | |||
143 | def test_deepcopy(self): | ||
144 | """Make sure you can copy the tree builder. | ||
145 | |||
146 | This is important because the builder is part of a | ||
147 | BeautifulSoup object, and we want to be able to copy that. | ||
148 | """ | ||
149 | copy.deepcopy(self.default_builder) | ||
150 | |||
151 | def test_p_tag_is_never_empty_element(self): | ||
152 | """A <p> tag is never designated as an empty-element tag. | ||
153 | |||
154 | Even if the markup shows it as an empty-element tag, it | ||
155 | shouldn't be presented that way. | ||
156 | """ | ||
157 | soup = self.soup("<p/>") | ||
158 | self.assertFalse(soup.p.is_empty_element) | ||
159 | self.assertEqual(str(soup.p), "<p></p>") | ||
160 | |||
161 | def test_unclosed_tags_get_closed(self): | ||
162 | """A tag that's not closed by the end of the document should be closed. | ||
163 | |||
164 | This applies to all tags except empty-element tags. | ||
165 | """ | ||
166 | self.assertSoupEquals("<p>", "<p></p>") | ||
167 | self.assertSoupEquals("<b>", "<b></b>") | ||
168 | |||
169 | self.assertSoupEquals("<br>", "<br/>") | ||
170 | |||
171 | def test_br_is_always_empty_element_tag(self): | ||
172 | """A <br> tag is designated as an empty-element tag. | ||
173 | |||
174 | Some parsers treat <br></br> as one <br/> tag, some parsers as | ||
175 | two tags, but it should always be an empty-element tag. | ||
176 | """ | ||
177 | soup = self.soup("<br></br>") | ||
178 | self.assertTrue(soup.br.is_empty_element) | ||
179 | self.assertEqual(str(soup.br), "<br/>") | ||
180 | |||
181 | def test_nested_formatting_elements(self): | ||
182 | self.assertSoupEquals("<em><em></em></em>") | ||
183 | |||
184 | def test_double_head(self): | ||
185 | html = '''<!DOCTYPE html> | ||
186 | <html> | ||
187 | <head> | ||
188 | <title>Ordinary HEAD element test</title> | ||
189 | </head> | ||
190 | <script type="text/javascript"> | ||
191 | alert("Help!"); | ||
192 | </script> | ||
193 | <body> | ||
194 | Hello, world! | ||
195 | </body> | ||
196 | </html> | ||
197 | ''' | ||
198 | soup = self.soup(html) | ||
199 | self.assertEqual("text/javascript", soup.find('script')['type']) | ||
200 | |||
201 | def test_comment(self): | ||
202 | # Comments are represented as Comment objects. | ||
203 | markup = "<p>foo<!--foobar-->baz</p>" | ||
204 | self.assertSoupEquals(markup) | ||
205 | |||
206 | soup = self.soup(markup) | ||
207 | comment = soup.find(text="foobar") | ||
208 | self.assertEqual(comment.__class__, Comment) | ||
209 | |||
210 | # The comment is properly integrated into the tree. | ||
211 | foo = soup.find(text="foo") | ||
212 | self.assertEqual(comment, foo.next_element) | ||
213 | baz = soup.find(text="baz") | ||
214 | self.assertEqual(comment, baz.previous_element) | ||
215 | |||
216 | def test_preserved_whitespace_in_pre_and_textarea(self): | ||
217 | """Whitespace must be preserved in <pre> and <textarea> tags.""" | ||
218 | self.assertSoupEquals("<pre> </pre>") | ||
219 | self.assertSoupEquals("<textarea> woo </textarea>") | ||
220 | |||
221 | def test_nested_inline_elements(self): | ||
222 | """Inline elements can be nested indefinitely.""" | ||
223 | b_tag = "<b>Inside a B tag</b>" | ||
224 | self.assertSoupEquals(b_tag) | ||
225 | |||
226 | nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>" | ||
227 | self.assertSoupEquals(nested_b_tag) | ||
228 | |||
229 | double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>" | ||
230 | self.assertSoupEquals(nested_b_tag) | ||
231 | |||
232 | def test_nested_block_level_elements(self): | ||
233 | """Block elements can be nested.""" | ||
234 | soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>') | ||
235 | blockquote = soup.blockquote | ||
236 | self.assertEqual(blockquote.p.b.string, 'Foo') | ||
237 | self.assertEqual(blockquote.b.string, 'Foo') | ||
238 | |||
239 | def test_correctly_nested_tables(self): | ||
240 | """One table can go inside another one.""" | ||
241 | markup = ('<table id="1">' | ||
242 | '<tr>' | ||
243 | "<td>Here's another table:" | ||
244 | '<table id="2">' | ||
245 | '<tr><td>foo</td></tr>' | ||
246 | '</table></td>') | ||
247 | |||
248 | self.assertSoupEquals( | ||
249 | markup, | ||
250 | '<table id="1"><tr><td>Here\'s another table:' | ||
251 | '<table id="2"><tr><td>foo</td></tr></table>' | ||
252 | '</td></tr></table>') | ||
253 | |||
254 | self.assertSoupEquals( | ||
255 | "<table><thead><tr><td>Foo</td></tr></thead>" | ||
256 | "<tbody><tr><td>Bar</td></tr></tbody>" | ||
257 | "<tfoot><tr><td>Baz</td></tr></tfoot></table>") | ||
258 | |||
259 | def test_deeply_nested_multivalued_attribute(self): | ||
260 | # html5lib can set the attributes of the same tag many times | ||
261 | # as it rearranges the tree. This has caused problems with | ||
262 | # multivalued attributes. | ||
263 | markup = '<table><div><div class="css"></div></div></table>' | ||
264 | soup = self.soup(markup) | ||
265 | self.assertEqual(["css"], soup.div.div['class']) | ||
266 | |||
267 | def test_multivalued_attribute_on_html(self): | ||
268 | # html5lib uses a different API to set the attributes ot the | ||
269 | # <html> tag. This has caused problems with multivalued | ||
270 | # attributes. | ||
271 | markup = '<html class="a b"></html>' | ||
272 | soup = self.soup(markup) | ||
273 | self.assertEqual(["a", "b"], soup.html['class']) | ||
274 | |||
275 | def test_angle_brackets_in_attribute_values_are_escaped(self): | ||
276 | self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>') | ||
277 | |||
278 | def test_entities_in_attributes_converted_to_unicode(self): | ||
279 | expect = '<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>' | ||
280 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
281 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
282 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
283 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
284 | |||
285 | def test_entities_in_text_converted_to_unicode(self): | ||
286 | expect = '<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>' | ||
287 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
288 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
289 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
290 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
291 | |||
292 | def test_quot_entity_converted_to_quotation_mark(self): | ||
293 | self.assertSoupEquals("<p>I said "good day!"</p>", | ||
294 | '<p>I said "good day!"</p>') | ||
295 | |||
296 | def test_out_of_range_entity(self): | ||
297 | expect = "\N{REPLACEMENT CHARACTER}" | ||
298 | self.assertSoupEquals("�", expect) | ||
299 | self.assertSoupEquals("�", expect) | ||
300 | self.assertSoupEquals("�", expect) | ||
301 | |||
302 | def test_multipart_strings(self): | ||
303 | "Mostly to prevent a recurrence of a bug in the html5lib treebuilder." | ||
304 | soup = self.soup("<html><h2>\nfoo</h2><p></p></html>") | ||
305 | self.assertEqual("p", soup.h2.string.next_element.name) | ||
306 | self.assertEqual("p", soup.p.name) | ||
307 | self.assertConnectedness(soup) | ||
308 | |||
309 | def test_head_tag_between_head_and_body(self): | ||
310 | "Prevent recurrence of a bug in the html5lib treebuilder." | ||
311 | content = """<html><head></head> | ||
312 | <link></link> | ||
313 | <body>foo</body> | ||
314 | </html> | ||
315 | """ | ||
316 | soup = self.soup(content) | ||
317 | self.assertNotEqual(None, soup.html.body) | ||
318 | self.assertConnectedness(soup) | ||
319 | |||
320 | def test_multiple_copies_of_a_tag(self): | ||
321 | "Prevent recurrence of a bug in the html5lib treebuilder." | ||
322 | content = """<!DOCTYPE html> | ||
323 | <html> | ||
324 | <body> | ||
325 | <article id="a" > | ||
326 | <div><a href="1"></div> | ||
327 | <footer> | ||
328 | <a href="2"></a> | ||
329 | </footer> | ||
330 | </article> | ||
331 | </body> | ||
332 | </html> | ||
333 | """ | ||
334 | soup = self.soup(content) | ||
335 | self.assertConnectedness(soup.article) | ||
336 | |||
337 | def test_basic_namespaces(self): | ||
338 | """Parsers don't need to *understand* namespaces, but at the | ||
339 | very least they should not choke on namespaces or lose | ||
340 | data.""" | ||
341 | |||
342 | markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>' | ||
343 | soup = self.soup(markup) | ||
344 | self.assertEqual(markup, soup.encode()) | ||
345 | html = soup.html | ||
346 | self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns']) | ||
347 | self.assertEqual( | ||
348 | 'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml']) | ||
349 | self.assertEqual( | ||
350 | 'http://www.w3.org/2000/svg', soup.html['xmlns:svg']) | ||
351 | |||
352 | def test_multivalued_attribute_value_becomes_list(self): | ||
353 | markup = b'<a class="foo bar">' | ||
354 | soup = self.soup(markup) | ||
355 | self.assertEqual(['foo', 'bar'], soup.a['class']) | ||
356 | |||
357 | # | ||
358 | # Generally speaking, tests below this point are more tests of | ||
359 | # Beautiful Soup than tests of the tree builders. But parsers are | ||
360 | # weird, so we run these tests separately for every tree builder | ||
361 | # to detect any differences between them. | ||
362 | # | ||
363 | |||
364 | def test_can_parse_unicode_document(self): | ||
365 | # A seemingly innocuous document... but it's in Unicode! And | ||
366 | # it contains characters that can't be represented in the | ||
367 | # encoding found in the declaration! The horror! | ||
368 | markup = '<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>' | ||
369 | soup = self.soup(markup) | ||
370 | self.assertEqual('Sacr\xe9 bleu!', soup.body.string) | ||
371 | |||
372 | def test_soupstrainer(self): | ||
373 | """Parsers should be able to work with SoupStrainers.""" | ||
374 | strainer = SoupStrainer("b") | ||
375 | soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>", | ||
376 | parse_only=strainer) | ||
377 | self.assertEqual(soup.decode(), "<b>bold</b>") | ||
378 | |||
379 | def test_single_quote_attribute_values_become_double_quotes(self): | ||
380 | self.assertSoupEquals("<foo attr='bar'></foo>", | ||
381 | '<foo attr="bar"></foo>') | ||
382 | |||
383 | def test_attribute_values_with_nested_quotes_are_left_alone(self): | ||
384 | text = """<foo attr='bar "brawls" happen'>a</foo>""" | ||
385 | self.assertSoupEquals(text) | ||
386 | |||
387 | def test_attribute_values_with_double_nested_quotes_get_quoted(self): | ||
388 | text = """<foo attr='bar "brawls" happen'>a</foo>""" | ||
389 | soup = self.soup(text) | ||
390 | soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"' | ||
391 | self.assertSoupEquals( | ||
392 | soup.foo.decode(), | ||
393 | """<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""") | ||
394 | |||
395 | def test_ampersand_in_attribute_value_gets_escaped(self): | ||
396 | self.assertSoupEquals('<this is="really messed up & stuff"></this>', | ||
397 | '<this is="really messed up & stuff"></this>') | ||
398 | |||
399 | self.assertSoupEquals( | ||
400 | '<a href="http://example.org?a=1&b=2;3">foo</a>', | ||
401 | '<a href="http://example.org?a=1&b=2;3">foo</a>') | ||
402 | |||
403 | def test_escaped_ampersand_in_attribute_value_is_left_alone(self): | ||
404 | self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>') | ||
405 | |||
406 | def test_entities_in_strings_converted_during_parsing(self): | ||
407 | # Both XML and HTML entities are converted to Unicode characters | ||
408 | # during parsing. | ||
409 | text = "<p><<sacré bleu!>></p>" | ||
410 | expected = "<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>" | ||
411 | self.assertSoupEquals(text, expected) | ||
412 | |||
413 | def test_smart_quotes_converted_on_the_way_in(self): | ||
414 | # Microsoft smart quotes are converted to Unicode characters during | ||
415 | # parsing. | ||
416 | quote = b"<p>\x91Foo\x92</p>" | ||
417 | soup = self.soup(quote) | ||
418 | self.assertEqual( | ||
419 | soup.p.string, | ||
420 | "\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}") | ||
421 | |||
422 | def test_non_breaking_spaces_converted_on_the_way_in(self): | ||
423 | soup = self.soup("<a> </a>") | ||
424 | self.assertEqual(soup.a.string, "\N{NO-BREAK SPACE}" * 2) | ||
425 | |||
426 | def test_entities_converted_on_the_way_out(self): | ||
427 | text = "<p><<sacré bleu!>></p>" | ||
428 | expected = "<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8") | ||
429 | soup = self.soup(text) | ||
430 | self.assertEqual(soup.p.encode("utf-8"), expected) | ||
431 | |||
432 | def test_real_iso_latin_document(self): | ||
433 | # Smoke test of interrelated functionality, using an | ||
434 | # easy-to-understand document. | ||
435 | |||
436 | # Here it is in Unicode. Note that it claims to be in ISO-Latin-1. | ||
437 | unicode_html = '<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>' | ||
438 | |||
439 | # That's because we're going to encode it into ISO-Latin-1, and use | ||
440 | # that to test. | ||
441 | iso_latin_html = unicode_html.encode("iso-8859-1") | ||
442 | |||
443 | # Parse the ISO-Latin-1 HTML. | ||
444 | soup = self.soup(iso_latin_html) | ||
445 | # Encode it to UTF-8. | ||
446 | result = soup.encode("utf-8") | ||
447 | |||
448 | # What do we expect the result to look like? Well, it would | ||
449 | # look like unicode_html, except that the META tag would say | ||
450 | # UTF-8 instead of ISO-Latin-1. | ||
451 | expected = unicode_html.replace("ISO-Latin-1", "utf-8") | ||
452 | |||
453 | # And, of course, it would be in UTF-8, not Unicode. | ||
454 | expected = expected.encode("utf-8") | ||
455 | |||
456 | # Ta-da! | ||
457 | self.assertEqual(result, expected) | ||
458 | |||
459 | def test_real_shift_jis_document(self): | ||
460 | # Smoke test to make sure the parser can handle a document in | ||
461 | # Shift-JIS encoding, without choking. | ||
462 | shift_jis_html = ( | ||
463 | b'<html><head></head><body><pre>' | ||
464 | b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f' | ||
465 | b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c' | ||
466 | b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B' | ||
467 | b'</pre></body></html>') | ||
468 | unicode_html = shift_jis_html.decode("shift-jis") | ||
469 | soup = self.soup(unicode_html) | ||
470 | |||
471 | # Make sure the parse tree is correctly encoded to various | ||
472 | # encodings. | ||
473 | self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8")) | ||
474 | self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp")) | ||
475 | |||
476 | def test_real_hebrew_document(self): | ||
477 | # A real-world test to make sure we can convert ISO-8859-9 (a | ||
478 | # Hebrew encoding) to UTF-8. | ||
479 | hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>' | ||
480 | soup = self.soup( | ||
481 | hebrew_document, from_encoding="iso8859-8") | ||
482 | self.assertEqual(soup.original_encoding, 'iso8859-8') | ||
483 | self.assertEqual( | ||
484 | soup.encode('utf-8'), | ||
485 | hebrew_document.decode("iso8859-8").encode("utf-8")) | ||
486 | |||
487 | def test_meta_tag_reflects_current_encoding(self): | ||
488 | # Here's the <meta> tag saying that a document is | ||
489 | # encoded in Shift-JIS. | ||
490 | meta_tag = ('<meta content="text/html; charset=x-sjis" ' | ||
491 | 'http-equiv="Content-type"/>') | ||
492 | |||
493 | # Here's a document incorporating that meta tag. | ||
494 | shift_jis_html = ( | ||
495 | '<html><head>\n%s\n' | ||
496 | '<meta http-equiv="Content-language" content="ja"/>' | ||
497 | '</head><body>Shift-JIS markup goes here.') % meta_tag | ||
498 | soup = self.soup(shift_jis_html) | ||
499 | |||
500 | # Parse the document, and the charset is seemingly unaffected. | ||
501 | parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'}) | ||
502 | content = parsed_meta['content'] | ||
503 | self.assertEqual('text/html; charset=x-sjis', content) | ||
504 | |||
505 | # But that value is actually a ContentMetaAttributeValue object. | ||
506 | self.assertTrue(isinstance(content, ContentMetaAttributeValue)) | ||
507 | |||
508 | # And it will take on a value that reflects its current | ||
509 | # encoding. | ||
510 | self.assertEqual('text/html; charset=utf8', content.encode("utf8")) | ||
511 | |||
512 | # For the rest of the story, see TestSubstitutions in | ||
513 | # test_tree.py. | ||
514 | |||
515 | def test_html5_style_meta_tag_reflects_current_encoding(self): | ||
516 | # Here's the <meta> tag saying that a document is | ||
517 | # encoded in Shift-JIS. | ||
518 | meta_tag = ('<meta id="encoding" charset="x-sjis" />') | ||
519 | |||
520 | # Here's a document incorporating that meta tag. | ||
521 | shift_jis_html = ( | ||
522 | '<html><head>\n%s\n' | ||
523 | '<meta http-equiv="Content-language" content="ja"/>' | ||
524 | '</head><body>Shift-JIS markup goes here.') % meta_tag | ||
525 | soup = self.soup(shift_jis_html) | ||
526 | |||
527 | # Parse the document, and the charset is seemingly unaffected. | ||
528 | parsed_meta = soup.find('meta', id="encoding") | ||
529 | charset = parsed_meta['charset'] | ||
530 | self.assertEqual('x-sjis', charset) | ||
531 | |||
532 | # But that value is actually a CharsetMetaAttributeValue object. | ||
533 | self.assertTrue(isinstance(charset, CharsetMetaAttributeValue)) | ||
534 | |||
535 | # And it will take on a value that reflects its current | ||
536 | # encoding. | ||
537 | self.assertEqual('utf8', charset.encode("utf8")) | ||
538 | |||
539 | def test_tag_with_no_attributes_can_have_attributes_added(self): | ||
540 | data = self.soup("<a>text</a>") | ||
541 | data.a['foo'] = 'bar' | ||
542 | self.assertEqual('<a foo="bar">text</a>', data.a.decode()) | ||
543 | |||
544 | class XMLTreeBuilderSmokeTest(SoupTest): | ||
545 | |||
546 | def test_pickle_and_unpickle_identity(self): | ||
547 | # Pickling a tree, then unpickling it, yields a tree identical | ||
548 | # to the original. | ||
549 | tree = self.soup("<a><b>foo</a>") | ||
550 | dumped = pickle.dumps(tree, 2) | ||
551 | loaded = pickle.loads(dumped) | ||
552 | self.assertEqual(loaded.__class__, BeautifulSoup) | ||
553 | self.assertEqual(loaded.decode(), tree.decode()) | ||
554 | |||
555 | def test_docstring_generated(self): | ||
556 | soup = self.soup("<root/>") | ||
557 | self.assertEqual( | ||
558 | soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>') | ||
559 | |||
560 | def test_xml_declaration(self): | ||
561 | markup = b"""<?xml version="1.0" encoding="utf8"?>\n<foo/>""" | ||
562 | soup = self.soup(markup) | ||
563 | self.assertEqual(markup, soup.encode("utf8")) | ||
564 | |||
565 | def test_real_xhtml_document(self): | ||
566 | """A real XHTML document should come out *exactly* the same as it went in.""" | ||
567 | markup = b"""<?xml version="1.0" encoding="utf-8"?> | ||
568 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"> | ||
569 | <html xmlns="http://www.w3.org/1999/xhtml"> | ||
570 | <head><title>Hello.</title></head> | ||
571 | <body>Goodbye.</body> | ||
572 | </html>""" | ||
573 | soup = self.soup(markup) | ||
574 | self.assertEqual( | ||
575 | soup.encode("utf-8"), markup) | ||
576 | |||
577 | def test_formatter_processes_script_tag_for_xml_documents(self): | ||
578 | doc = """ | ||
579 | <script type="text/javascript"> | ||
580 | </script> | ||
581 | """ | ||
582 | soup = BeautifulSoup(doc, "lxml-xml") | ||
583 | # lxml would have stripped this while parsing, but we can add | ||
584 | # it later. | ||
585 | soup.script.string = 'console.log("< < hey > > ");' | ||
586 | encoded = soup.encode() | ||
587 | self.assertTrue(b"< < hey > >" in encoded) | ||
588 | |||
589 | def test_can_parse_unicode_document(self): | ||
590 | markup = '<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>' | ||
591 | soup = self.soup(markup) | ||
592 | self.assertEqual('Sacr\xe9 bleu!', soup.root.string) | ||
593 | |||
594 | def test_popping_namespaced_tag(self): | ||
595 | markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>' | ||
596 | soup = self.soup(markup) | ||
597 | self.assertEqual( | ||
598 | str(soup.rss), markup) | ||
599 | |||
600 | def test_docstring_includes_correct_encoding(self): | ||
601 | soup = self.soup("<root/>") | ||
602 | self.assertEqual( | ||
603 | soup.encode("latin1"), | ||
604 | b'<?xml version="1.0" encoding="latin1"?>\n<root/>') | ||
605 | |||
606 | def test_large_xml_document(self): | ||
607 | """A large XML document should come out the same as it went in.""" | ||
608 | markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>' | ||
609 | + b'0' * (2**12) | ||
610 | + b'</root>') | ||
611 | soup = self.soup(markup) | ||
612 | self.assertEqual(soup.encode("utf-8"), markup) | ||
613 | |||
614 | |||
615 | def test_tags_are_empty_element_if_and_only_if_they_are_empty(self): | ||
616 | self.assertSoupEquals("<p>", "<p/>") | ||
617 | self.assertSoupEquals("<p>foo</p>") | ||
618 | |||
619 | def test_namespaces_are_preserved(self): | ||
620 | markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>' | ||
621 | soup = self.soup(markup) | ||
622 | root = soup.root | ||
623 | self.assertEqual("http://example.com/", root['xmlns:a']) | ||
624 | self.assertEqual("http://example.net/", root['xmlns:b']) | ||
625 | |||
626 | def test_closing_namespaced_tag(self): | ||
627 | markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>' | ||
628 | soup = self.soup(markup) | ||
629 | self.assertEqual(str(soup.p), markup) | ||
630 | |||
631 | def test_namespaced_attributes(self): | ||
632 | markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>' | ||
633 | soup = self.soup(markup) | ||
634 | self.assertEqual(str(soup.foo), markup) | ||
635 | |||
636 | def test_namespaced_attributes_xml_namespace(self): | ||
637 | markup = '<foo xml:lang="fr">bar</foo>' | ||
638 | soup = self.soup(markup) | ||
639 | self.assertEqual(str(soup.foo), markup) | ||
640 | |||
641 | class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest): | ||
642 | """Smoke test for a tree builder that supports HTML5.""" | ||
643 | |||
644 | def test_real_xhtml_document(self): | ||
645 | # Since XHTML is not HTML5, HTML5 parsers are not tested to handle | ||
646 | # XHTML documents in any particular way. | ||
647 | pass | ||
648 | |||
649 | def test_html_tags_have_namespace(self): | ||
650 | markup = "<a>" | ||
651 | soup = self.soup(markup) | ||
652 | self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace) | ||
653 | |||
654 | def test_svg_tags_have_namespace(self): | ||
655 | markup = '<svg><circle/></svg>' | ||
656 | soup = self.soup(markup) | ||
657 | namespace = "http://www.w3.org/2000/svg" | ||
658 | self.assertEqual(namespace, soup.svg.namespace) | ||
659 | self.assertEqual(namespace, soup.circle.namespace) | ||
660 | |||
661 | |||
662 | def test_mathml_tags_have_namespace(self): | ||
663 | markup = '<math><msqrt>5</msqrt></math>' | ||
664 | soup = self.soup(markup) | ||
665 | namespace = 'http://www.w3.org/1998/Math/MathML' | ||
666 | self.assertEqual(namespace, soup.math.namespace) | ||
667 | self.assertEqual(namespace, soup.msqrt.namespace) | ||
668 | |||
669 | def test_xml_declaration_becomes_comment(self): | ||
670 | markup = '<?xml version="1.0" encoding="utf-8"?><html></html>' | ||
671 | soup = self.soup(markup) | ||
672 | self.assertTrue(isinstance(soup.contents[0], Comment)) | ||
673 | self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?') | ||
674 | self.assertEqual("html", soup.contents[0].next_element.name) | ||
675 | |||
676 | def skipIf(condition, reason): | ||
677 | def nothing(test, *args, **kwargs): | ||
678 | return None | ||
679 | |||
680 | def decorator(test_item): | ||
681 | if condition: | ||
682 | return nothing | ||
683 | else: | ||
684 | return test_item | ||
685 | |||
686 | return decorator | ||
diff --git a/bitbake/lib/bs4/tests/__init__.py b/bitbake/lib/bs4/tests/__init__.py deleted file mode 100644 index 142c8cc3f1..0000000000 --- a/bitbake/lib/bs4/tests/__init__.py +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | "The beautifulsoup tests." | ||
diff --git a/bitbake/lib/bs4/tests/test_builder_registry.py b/bitbake/lib/bs4/tests/test_builder_registry.py deleted file mode 100644 index 90cad82933..0000000000 --- a/bitbake/lib/bs4/tests/test_builder_registry.py +++ /dev/null | |||
@@ -1,147 +0,0 @@ | |||
1 | """Tests of the builder registry.""" | ||
2 | |||
3 | import unittest | ||
4 | import warnings | ||
5 | |||
6 | from bs4 import BeautifulSoup | ||
7 | from bs4.builder import ( | ||
8 | builder_registry as registry, | ||
9 | HTMLParserTreeBuilder, | ||
10 | TreeBuilderRegistry, | ||
11 | ) | ||
12 | |||
13 | try: | ||
14 | from bs4.builder import HTML5TreeBuilder | ||
15 | HTML5LIB_PRESENT = True | ||
16 | except ImportError: | ||
17 | HTML5LIB_PRESENT = False | ||
18 | |||
19 | try: | ||
20 | from bs4.builder import ( | ||
21 | LXMLTreeBuilderForXML, | ||
22 | LXMLTreeBuilder, | ||
23 | ) | ||
24 | LXML_PRESENT = True | ||
25 | except ImportError: | ||
26 | LXML_PRESENT = False | ||
27 | |||
28 | |||
29 | class BuiltInRegistryTest(unittest.TestCase): | ||
30 | """Test the built-in registry with the default builders registered.""" | ||
31 | |||
32 | def test_combination(self): | ||
33 | if LXML_PRESENT: | ||
34 | self.assertEqual(registry.lookup('fast', 'html'), | ||
35 | LXMLTreeBuilder) | ||
36 | |||
37 | if LXML_PRESENT: | ||
38 | self.assertEqual(registry.lookup('permissive', 'xml'), | ||
39 | LXMLTreeBuilderForXML) | ||
40 | self.assertEqual(registry.lookup('strict', 'html'), | ||
41 | HTMLParserTreeBuilder) | ||
42 | if HTML5LIB_PRESENT: | ||
43 | self.assertEqual(registry.lookup('html5lib', 'html'), | ||
44 | HTML5TreeBuilder) | ||
45 | |||
46 | def test_lookup_by_markup_type(self): | ||
47 | if LXML_PRESENT: | ||
48 | self.assertEqual(registry.lookup('html'), LXMLTreeBuilder) | ||
49 | self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML) | ||
50 | else: | ||
51 | self.assertEqual(registry.lookup('xml'), None) | ||
52 | if HTML5LIB_PRESENT: | ||
53 | self.assertEqual(registry.lookup('html'), HTML5TreeBuilder) | ||
54 | else: | ||
55 | self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder) | ||
56 | |||
57 | def test_named_library(self): | ||
58 | if LXML_PRESENT: | ||
59 | self.assertEqual(registry.lookup('lxml', 'xml'), | ||
60 | LXMLTreeBuilderForXML) | ||
61 | self.assertEqual(registry.lookup('lxml', 'html'), | ||
62 | LXMLTreeBuilder) | ||
63 | if HTML5LIB_PRESENT: | ||
64 | self.assertEqual(registry.lookup('html5lib'), | ||
65 | HTML5TreeBuilder) | ||
66 | |||
67 | self.assertEqual(registry.lookup('html.parser'), | ||
68 | HTMLParserTreeBuilder) | ||
69 | |||
70 | def test_beautifulsoup_constructor_does_lookup(self): | ||
71 | |||
72 | with warnings.catch_warnings(record=True) as w: | ||
73 | # This will create a warning about not explicitly | ||
74 | # specifying a parser, but we'll ignore it. | ||
75 | |||
76 | # You can pass in a string. | ||
77 | BeautifulSoup("", features="html") | ||
78 | # Or a list of strings. | ||
79 | BeautifulSoup("", features=["html", "fast"]) | ||
80 | |||
81 | # You'll get an exception if BS can't find an appropriate | ||
82 | # builder. | ||
83 | self.assertRaises(ValueError, BeautifulSoup, | ||
84 | "", features="no-such-feature") | ||
85 | |||
86 | class RegistryTest(unittest.TestCase): | ||
87 | """Test the TreeBuilderRegistry class in general.""" | ||
88 | |||
89 | def setUp(self): | ||
90 | self.registry = TreeBuilderRegistry() | ||
91 | |||
92 | def builder_for_features(self, *feature_list): | ||
93 | cls = type('Builder_' + '_'.join(feature_list), | ||
94 | (object,), {'features' : feature_list}) | ||
95 | |||
96 | self.registry.register(cls) | ||
97 | return cls | ||
98 | |||
99 | def test_register_with_no_features(self): | ||
100 | builder = self.builder_for_features() | ||
101 | |||
102 | # Since the builder advertises no features, you can't find it | ||
103 | # by looking up features. | ||
104 | self.assertEqual(self.registry.lookup('foo'), None) | ||
105 | |||
106 | # But you can find it by doing a lookup with no features, if | ||
107 | # this happens to be the only registered builder. | ||
108 | self.assertEqual(self.registry.lookup(), builder) | ||
109 | |||
110 | def test_register_with_features_makes_lookup_succeed(self): | ||
111 | builder = self.builder_for_features('foo', 'bar') | ||
112 | self.assertEqual(self.registry.lookup('foo'), builder) | ||
113 | self.assertEqual(self.registry.lookup('bar'), builder) | ||
114 | |||
115 | def test_lookup_fails_when_no_builder_implements_feature(self): | ||
116 | builder = self.builder_for_features('foo', 'bar') | ||
117 | self.assertEqual(self.registry.lookup('baz'), None) | ||
118 | |||
119 | def test_lookup_gets_most_recent_registration_when_no_feature_specified(self): | ||
120 | builder1 = self.builder_for_features('foo') | ||
121 | builder2 = self.builder_for_features('bar') | ||
122 | self.assertEqual(self.registry.lookup(), builder2) | ||
123 | |||
124 | def test_lookup_fails_when_no_tree_builders_registered(self): | ||
125 | self.assertEqual(self.registry.lookup(), None) | ||
126 | |||
127 | def test_lookup_gets_most_recent_builder_supporting_all_features(self): | ||
128 | has_one = self.builder_for_features('foo') | ||
129 | has_the_other = self.builder_for_features('bar') | ||
130 | has_both_early = self.builder_for_features('foo', 'bar', 'baz') | ||
131 | has_both_late = self.builder_for_features('foo', 'bar', 'quux') | ||
132 | lacks_one = self.builder_for_features('bar') | ||
133 | has_the_other = self.builder_for_features('foo') | ||
134 | |||
135 | # There are two builders featuring 'foo' and 'bar', but | ||
136 | # the one that also features 'quux' was registered later. | ||
137 | self.assertEqual(self.registry.lookup('foo', 'bar'), | ||
138 | has_both_late) | ||
139 | |||
140 | # There is only one builder featuring 'foo', 'bar', and 'baz'. | ||
141 | self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'), | ||
142 | has_both_early) | ||
143 | |||
144 | def test_lookup_fails_when_cannot_reconcile_requested_features(self): | ||
145 | builder1 = self.builder_for_features('foo', 'bar') | ||
146 | builder2 = self.builder_for_features('foo', 'baz') | ||
147 | self.assertEqual(self.registry.lookup('bar', 'baz'), None) | ||
diff --git a/bitbake/lib/bs4/tests/test_docs.py b/bitbake/lib/bs4/tests/test_docs.py deleted file mode 100644 index d1d76a33bf..0000000000 --- a/bitbake/lib/bs4/tests/test_docs.py +++ /dev/null | |||
@@ -1,32 +0,0 @@ | |||
1 | "Test harness for doctests." | ||
2 | |||
3 | # pylint: disable-msg=E0611,W0142 | ||
4 | |||
5 | __metaclass__ = type | ||
6 | __all__ = [ | ||
7 | 'additional_tests', | ||
8 | ] | ||
9 | |||
10 | import doctest | ||
11 | #from pkg_resources import ( | ||
12 | # resource_filename, resource_exists, resource_listdir, cleanup_resources) | ||
13 | |||
14 | DOCTEST_FLAGS = ( | ||
15 | doctest.ELLIPSIS | | ||
16 | doctest.NORMALIZE_WHITESPACE | | ||
17 | doctest.REPORT_NDIFF) | ||
18 | |||
19 | # def additional_tests(): | ||
20 | # "Run the doc tests (README.txt and docs/*, if any exist)" | ||
21 | # doctest_files = [ | ||
22 | # os.path.abspath(resource_filename('bs4', 'README.txt'))] | ||
23 | # if resource_exists('bs4', 'docs'): | ||
24 | # for name in resource_listdir('bs4', 'docs'): | ||
25 | # if name.endswith('.txt'): | ||
26 | # doctest_files.append( | ||
27 | # os.path.abspath( | ||
28 | # resource_filename('bs4', 'docs/%s' % name))) | ||
29 | # kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS) | ||
30 | # atexit.register(cleanup_resources) | ||
31 | # return unittest.TestSuite(( | ||
32 | # doctest.DocFileSuite(*doctest_files, **kwargs))) | ||
diff --git a/bitbake/lib/bs4/tests/test_html5lib.py b/bitbake/lib/bs4/tests/test_html5lib.py deleted file mode 100644 index a7494ca5ba..0000000000 --- a/bitbake/lib/bs4/tests/test_html5lib.py +++ /dev/null | |||
@@ -1,98 +0,0 @@ | |||
1 | """Tests to ensure that the html5lib tree builder generates good trees.""" | ||
2 | |||
3 | import warnings | ||
4 | |||
5 | try: | ||
6 | from bs4.builder import HTML5TreeBuilder | ||
7 | HTML5LIB_PRESENT = True | ||
8 | except ImportError as e: | ||
9 | HTML5LIB_PRESENT = False | ||
10 | from bs4.element import SoupStrainer | ||
11 | from bs4.testing import ( | ||
12 | HTML5TreeBuilderSmokeTest, | ||
13 | SoupTest, | ||
14 | skipIf, | ||
15 | ) | ||
16 | |||
17 | @skipIf( | ||
18 | not HTML5LIB_PRESENT, | ||
19 | "html5lib seems not to be present, not testing its tree builder.") | ||
20 | class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): | ||
21 | """See ``HTML5TreeBuilderSmokeTest``.""" | ||
22 | |||
23 | @property | ||
24 | def default_builder(self): | ||
25 | return HTML5TreeBuilder() | ||
26 | |||
27 | def test_soupstrainer(self): | ||
28 | # The html5lib tree builder does not support SoupStrainers. | ||
29 | strainer = SoupStrainer("b") | ||
30 | markup = "<p>A <b>bold</b> statement.</p>" | ||
31 | with warnings.catch_warnings(record=True) as w: | ||
32 | soup = self.soup(markup, parse_only=strainer) | ||
33 | self.assertEqual( | ||
34 | soup.decode(), self.document_for(markup)) | ||
35 | |||
36 | self.assertTrue( | ||
37 | "the html5lib tree builder doesn't support parse_only" in | ||
38 | str(w[0].message)) | ||
39 | |||
40 | def test_correctly_nested_tables(self): | ||
41 | """html5lib inserts <tbody> tags where other parsers don't.""" | ||
42 | markup = ('<table id="1">' | ||
43 | '<tr>' | ||
44 | "<td>Here's another table:" | ||
45 | '<table id="2">' | ||
46 | '<tr><td>foo</td></tr>' | ||
47 | '</table></td>') | ||
48 | |||
49 | self.assertSoupEquals( | ||
50 | markup, | ||
51 | '<table id="1"><tbody><tr><td>Here\'s another table:' | ||
52 | '<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>' | ||
53 | '</td></tr></tbody></table>') | ||
54 | |||
55 | self.assertSoupEquals( | ||
56 | "<table><thead><tr><td>Foo</td></tr></thead>" | ||
57 | "<tbody><tr><td>Bar</td></tr></tbody>" | ||
58 | "<tfoot><tr><td>Baz</td></tr></tfoot></table>") | ||
59 | |||
60 | def test_xml_declaration_followed_by_doctype(self): | ||
61 | markup = '''<?xml version="1.0" encoding="utf-8"?> | ||
62 | <!DOCTYPE html> | ||
63 | <html> | ||
64 | <head> | ||
65 | </head> | ||
66 | <body> | ||
67 | <p>foo</p> | ||
68 | </body> | ||
69 | </html>''' | ||
70 | soup = self.soup(markup) | ||
71 | # Verify that we can reach the <p> tag; this means the tree is connected. | ||
72 | self.assertEqual(b"<p>foo</p>", soup.p.encode()) | ||
73 | |||
74 | def test_reparented_markup(self): | ||
75 | markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>' | ||
76 | soup = self.soup(markup) | ||
77 | self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode()) | ||
78 | self.assertEqual(2, len(soup.find_all('p'))) | ||
79 | |||
80 | |||
81 | def test_reparented_markup_ends_with_whitespace(self): | ||
82 | markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n' | ||
83 | soup = self.soup(markup) | ||
84 | self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode()) | ||
85 | self.assertEqual(2, len(soup.find_all('p'))) | ||
86 | |||
87 | def test_processing_instruction(self): | ||
88 | """Processing instructions become comments.""" | ||
89 | markup = b"""<?PITarget PIContent?>""" | ||
90 | soup = self.soup(markup) | ||
91 | assert str(soup).startswith("<!--?PITarget PIContent?-->") | ||
92 | |||
93 | def test_cloned_multivalue_node(self): | ||
94 | markup = b"""<a class="my_class"><p></a>""" | ||
95 | soup = self.soup(markup) | ||
96 | a1, a2 = soup.find_all('a') | ||
97 | self.assertEqual(a1, a2) | ||
98 | assert a1 is not a2 | ||
diff --git a/bitbake/lib/bs4/tests/test_htmlparser.py b/bitbake/lib/bs4/tests/test_htmlparser.py deleted file mode 100644 index 30a25e6709..0000000000 --- a/bitbake/lib/bs4/tests/test_htmlparser.py +++ /dev/null | |||
@@ -1,31 +0,0 @@ | |||
1 | """Tests to ensure that the html.parser tree builder generates good | ||
2 | trees.""" | ||
3 | |||
4 | import pickle | ||
5 | from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest | ||
6 | from bs4.builder import HTMLParserTreeBuilder | ||
7 | |||
8 | class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): | ||
9 | |||
10 | @property | ||
11 | def default_builder(self): | ||
12 | return HTMLParserTreeBuilder() | ||
13 | |||
14 | def test_namespaced_system_doctype(self): | ||
15 | # html.parser can't handle namespaced doctypes, so skip this one. | ||
16 | pass | ||
17 | |||
18 | def test_namespaced_public_doctype(self): | ||
19 | # html.parser can't handle namespaced doctypes, so skip this one. | ||
20 | pass | ||
21 | |||
22 | def test_builder_is_pickled(self): | ||
23 | """Unlike most tree builders, HTMLParserTreeBuilder and will | ||
24 | be restored after pickling. | ||
25 | """ | ||
26 | tree = self.soup("<a><b>foo</a>") | ||
27 | dumped = pickle.dumps(tree, 2) | ||
28 | loaded = pickle.loads(dumped) | ||
29 | self.assertTrue(isinstance(loaded.builder, type(tree.builder))) | ||
30 | |||
31 | |||
diff --git a/bitbake/lib/bs4/tests/test_lxml.py b/bitbake/lib/bs4/tests/test_lxml.py deleted file mode 100644 index 6b6cdd07cb..0000000000 --- a/bitbake/lib/bs4/tests/test_lxml.py +++ /dev/null | |||
@@ -1,70 +0,0 @@ | |||
1 | """Tests to ensure that the lxml tree builder generates good trees.""" | ||
2 | |||
3 | import warnings | ||
4 | |||
5 | try: | ||
6 | import lxml.etree | ||
7 | LXML_PRESENT = True | ||
8 | LXML_VERSION = lxml.etree.LXML_VERSION | ||
9 | except ImportError as e: | ||
10 | LXML_PRESENT = False | ||
11 | LXML_VERSION = (0,) | ||
12 | |||
13 | if LXML_PRESENT: | ||
14 | from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML | ||
15 | |||
16 | from bs4 import BeautifulStoneSoup | ||
17 | from bs4.testing import skipIf | ||
18 | from bs4.testing import ( | ||
19 | HTMLTreeBuilderSmokeTest, | ||
20 | XMLTreeBuilderSmokeTest, | ||
21 | SoupTest, | ||
22 | skipIf, | ||
23 | ) | ||
24 | |||
25 | @skipIf( | ||
26 | not LXML_PRESENT, | ||
27 | "lxml seems not to be present, not testing its tree builder.") | ||
28 | class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): | ||
29 | """See ``HTMLTreeBuilderSmokeTest``.""" | ||
30 | |||
31 | @property | ||
32 | def default_builder(self): | ||
33 | return LXMLTreeBuilder() | ||
34 | |||
35 | def test_out_of_range_entity(self): | ||
36 | self.assertSoupEquals( | ||
37 | "<p>foo�bar</p>", "<p>foobar</p>") | ||
38 | self.assertSoupEquals( | ||
39 | "<p>foo�bar</p>", "<p>foobar</p>") | ||
40 | self.assertSoupEquals( | ||
41 | "<p>foo�bar</p>", "<p>foobar</p>") | ||
42 | |||
43 | # In lxml < 2.3.5, an empty doctype causes a segfault. Skip this | ||
44 | # test if an old version of lxml is installed. | ||
45 | |||
46 | @skipIf( | ||
47 | not LXML_PRESENT or LXML_VERSION < (2,3,5,0), | ||
48 | "Skipping doctype test for old version of lxml to avoid segfault.") | ||
49 | def test_empty_doctype(self): | ||
50 | soup = self.soup("<!DOCTYPE>") | ||
51 | doctype = soup.contents[0] | ||
52 | self.assertEqual("", doctype.strip()) | ||
53 | |||
54 | def test_beautifulstonesoup_is_xml_parser(self): | ||
55 | # Make sure that the deprecated BSS class uses an xml builder | ||
56 | # if one is installed. | ||
57 | with warnings.catch_warnings(record=True) as w: | ||
58 | soup = BeautifulStoneSoup("<b />") | ||
59 | self.assertEqual("<b/>", str(soup.b)) | ||
60 | self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message)) | ||
61 | |||
62 | @skipIf( | ||
63 | not LXML_PRESENT, | ||
64 | "lxml seems not to be present, not testing its XML tree builder.") | ||
65 | class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest): | ||
66 | """See ``HTMLTreeBuilderSmokeTest``.""" | ||
67 | |||
68 | @property | ||
69 | def default_builder(self): | ||
70 | return LXMLTreeBuilderForXML() | ||
diff --git a/bitbake/lib/bs4/tests/test_soup.py b/bitbake/lib/bs4/tests/test_soup.py deleted file mode 100644 index 6ad3cb3765..0000000000 --- a/bitbake/lib/bs4/tests/test_soup.py +++ /dev/null | |||
@@ -1,479 +0,0 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | """Tests of Beautiful Soup as a whole.""" | ||
3 | |||
4 | import logging | ||
5 | import unittest | ||
6 | import sys | ||
7 | import tempfile | ||
8 | |||
9 | from bs4 import BeautifulSoup | ||
10 | from bs4.element import ( | ||
11 | CharsetMetaAttributeValue, | ||
12 | ContentMetaAttributeValue, | ||
13 | SoupStrainer, | ||
14 | NamespacedAttribute, | ||
15 | ) | ||
16 | import bs4.dammit | ||
17 | from bs4.dammit import ( | ||
18 | EntitySubstitution, | ||
19 | UnicodeDammit, | ||
20 | EncodingDetector, | ||
21 | ) | ||
22 | from bs4.testing import ( | ||
23 | SoupTest, | ||
24 | skipIf, | ||
25 | ) | ||
26 | import warnings | ||
27 | |||
28 | try: | ||
29 | from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML | ||
30 | LXML_PRESENT = True | ||
31 | except ImportError as e: | ||
32 | LXML_PRESENT = False | ||
33 | |||
34 | PYTHON_2_PRE_2_7 = (sys.version_info < (2,7)) | ||
35 | PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2)) | ||
36 | |||
37 | class TestConstructor(SoupTest): | ||
38 | |||
39 | def test_short_unicode_input(self): | ||
40 | data = "<h1>éé</h1>" | ||
41 | soup = self.soup(data) | ||
42 | self.assertEqual("éé", soup.h1.string) | ||
43 | |||
44 | def test_embedded_null(self): | ||
45 | data = "<h1>foo\0bar</h1>" | ||
46 | soup = self.soup(data) | ||
47 | self.assertEqual("foo\0bar", soup.h1.string) | ||
48 | |||
49 | def test_exclude_encodings(self): | ||
50 | utf8_data = "Räksmörgås".encode("utf-8") | ||
51 | soup = self.soup(utf8_data, exclude_encodings=["utf-8"]) | ||
52 | self.assertEqual("windows-1252", soup.original_encoding) | ||
53 | |||
54 | |||
55 | class TestWarnings(SoupTest): | ||
56 | |||
57 | def _no_parser_specified(self, s, is_there=True): | ||
58 | v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80]) | ||
59 | self.assertTrue(v) | ||
60 | |||
61 | def test_warning_if_no_parser_specified(self): | ||
62 | with warnings.catch_warnings(record=True) as w: | ||
63 | soup = self.soup("<a><b></b></a>") | ||
64 | msg = str(w[0].message) | ||
65 | self._assert_no_parser_specified(msg) | ||
66 | |||
67 | def test_warning_if_parser_specified_too_vague(self): | ||
68 | with warnings.catch_warnings(record=True) as w: | ||
69 | soup = self.soup("<a><b></b></a>", "html") | ||
70 | msg = str(w[0].message) | ||
71 | self._assert_no_parser_specified(msg) | ||
72 | |||
73 | def test_no_warning_if_explicit_parser_specified(self): | ||
74 | with warnings.catch_warnings(record=True) as w: | ||
75 | soup = self.soup("<a><b></b></a>", "html.parser") | ||
76 | self.assertEqual([], w) | ||
77 | |||
78 | def test_parseOnlyThese_renamed_to_parse_only(self): | ||
79 | with warnings.catch_warnings(record=True) as w: | ||
80 | soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b")) | ||
81 | msg = str(w[0].message) | ||
82 | self.assertTrue("parseOnlyThese" in msg) | ||
83 | self.assertTrue("parse_only" in msg) | ||
84 | self.assertEqual(b"<b></b>", soup.encode()) | ||
85 | |||
86 | def test_fromEncoding_renamed_to_from_encoding(self): | ||
87 | with warnings.catch_warnings(record=True) as w: | ||
88 | utf8 = b"\xc3\xa9" | ||
89 | soup = self.soup(utf8, fromEncoding="utf8") | ||
90 | msg = str(w[0].message) | ||
91 | self.assertTrue("fromEncoding" in msg) | ||
92 | self.assertTrue("from_encoding" in msg) | ||
93 | self.assertEqual("utf8", soup.original_encoding) | ||
94 | |||
95 | def test_unrecognized_keyword_argument(self): | ||
96 | self.assertRaises( | ||
97 | TypeError, self.soup, "<a>", no_such_argument=True) | ||
98 | |||
99 | class TestWarnings(SoupTest): | ||
100 | |||
101 | def test_disk_file_warning(self): | ||
102 | filehandle = tempfile.NamedTemporaryFile() | ||
103 | filename = filehandle.name | ||
104 | try: | ||
105 | with warnings.catch_warnings(record=True) as w: | ||
106 | soup = self.soup(filename) | ||
107 | msg = str(w[0].message) | ||
108 | self.assertTrue("looks like a filename" in msg) | ||
109 | finally: | ||
110 | filehandle.close() | ||
111 | |||
112 | # The file no longer exists, so Beautiful Soup will no longer issue the warning. | ||
113 | with warnings.catch_warnings(record=True) as w: | ||
114 | soup = self.soup(filename) | ||
115 | self.assertEqual(0, len(w)) | ||
116 | |||
117 | def test_url_warning(self): | ||
118 | with warnings.catch_warnings(record=True) as w: | ||
119 | soup = self.soup("http://www.crummy.com/") | ||
120 | msg = str(w[0].message) | ||
121 | self.assertTrue("looks like a URL" in msg) | ||
122 | |||
123 | with warnings.catch_warnings(record=True) as w: | ||
124 | soup = self.soup("http://www.crummy.com/ is great") | ||
125 | self.assertEqual(0, len(w)) | ||
126 | |||
127 | class TestSelectiveParsing(SoupTest): | ||
128 | |||
129 | def test_parse_with_soupstrainer(self): | ||
130 | markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>" | ||
131 | strainer = SoupStrainer("b") | ||
132 | soup = self.soup(markup, parse_only=strainer) | ||
133 | self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>") | ||
134 | |||
135 | |||
136 | class TestEntitySubstitution(unittest.TestCase): | ||
137 | """Standalone tests of the EntitySubstitution class.""" | ||
138 | def setUp(self): | ||
139 | self.sub = EntitySubstitution | ||
140 | |||
141 | def test_simple_html_substitution(self): | ||
142 | # Unicode characters corresponding to named HTML entites | ||
143 | # are substituted, and no others. | ||
144 | s = "foo\u2200\N{SNOWMAN}\u00f5bar" | ||
145 | self.assertEqual(self.sub.substitute_html(s), | ||
146 | "foo∀\N{SNOWMAN}õbar") | ||
147 | |||
148 | def test_smart_quote_substitution(self): | ||
149 | # MS smart quotes are a common source of frustration, so we | ||
150 | # give them a special test. | ||
151 | quotes = b"\x91\x92foo\x93\x94" | ||
152 | dammit = UnicodeDammit(quotes) | ||
153 | self.assertEqual(self.sub.substitute_html(dammit.markup), | ||
154 | "‘’foo“”") | ||
155 | |||
156 | def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self): | ||
157 | s = 'Welcome to "my bar"' | ||
158 | self.assertEqual(self.sub.substitute_xml(s, False), s) | ||
159 | |||
160 | def test_xml_attribute_quoting_normally_uses_double_quotes(self): | ||
161 | self.assertEqual(self.sub.substitute_xml("Welcome", True), | ||
162 | '"Welcome"') | ||
163 | self.assertEqual(self.sub.substitute_xml("Bob's Bar", True), | ||
164 | '"Bob\'s Bar"') | ||
165 | |||
166 | def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self): | ||
167 | s = 'Welcome to "my bar"' | ||
168 | self.assertEqual(self.sub.substitute_xml(s, True), | ||
169 | "'Welcome to \"my bar\"'") | ||
170 | |||
171 | def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self): | ||
172 | s = 'Welcome to "Bob\'s Bar"' | ||
173 | self.assertEqual( | ||
174 | self.sub.substitute_xml(s, True), | ||
175 | '"Welcome to "Bob\'s Bar""') | ||
176 | |||
177 | def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self): | ||
178 | quoted = 'Welcome to "Bob\'s Bar"' | ||
179 | self.assertEqual(self.sub.substitute_xml(quoted), quoted) | ||
180 | |||
181 | def test_xml_quoting_handles_angle_brackets(self): | ||
182 | self.assertEqual( | ||
183 | self.sub.substitute_xml("foo<bar>"), | ||
184 | "foo<bar>") | ||
185 | |||
186 | def test_xml_quoting_handles_ampersands(self): | ||
187 | self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T") | ||
188 | |||
189 | def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self): | ||
190 | self.assertEqual( | ||
191 | self.sub.substitute_xml("ÁT&T"), | ||
192 | "&Aacute;T&T") | ||
193 | |||
194 | def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self): | ||
195 | self.assertEqual( | ||
196 | self.sub.substitute_xml_containing_entities("ÁT&T"), | ||
197 | "ÁT&T") | ||
198 | |||
199 | def test_quotes_not_html_substituted(self): | ||
200 | """There's no need to do this except inside attribute values.""" | ||
201 | text = 'Bob\'s "bar"' | ||
202 | self.assertEqual(self.sub.substitute_html(text), text) | ||
203 | |||
204 | |||
205 | class TestEncodingConversion(SoupTest): | ||
206 | # Test Beautiful Soup's ability to decode and encode from various | ||
207 | # encodings. | ||
208 | |||
209 | def setUp(self): | ||
210 | super(TestEncodingConversion, self).setUp() | ||
211 | self.unicode_data = '<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>' | ||
212 | self.utf8_data = self.unicode_data.encode("utf-8") | ||
213 | # Just so you know what it looks like. | ||
214 | self.assertEqual( | ||
215 | self.utf8_data, | ||
216 | b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>') | ||
217 | |||
218 | def test_ascii_in_unicode_out(self): | ||
219 | # ASCII input is converted to Unicode. The original_encoding | ||
220 | # attribute is set to 'utf-8', a superset of ASCII. | ||
221 | chardet = bs4.dammit.chardet_dammit | ||
222 | logging.disable(logging.WARNING) | ||
223 | try: | ||
224 | def noop(str): | ||
225 | return None | ||
226 | # Disable chardet, which will realize that the ASCII is ASCII. | ||
227 | bs4.dammit.chardet_dammit = noop | ||
228 | ascii = b"<foo>a</foo>" | ||
229 | soup_from_ascii = self.soup(ascii) | ||
230 | unicode_output = soup_from_ascii.decode() | ||
231 | self.assertTrue(isinstance(unicode_output, str)) | ||
232 | self.assertEqual(unicode_output, self.document_for(ascii.decode())) | ||
233 | self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8") | ||
234 | finally: | ||
235 | logging.disable(logging.NOTSET) | ||
236 | bs4.dammit.chardet_dammit = chardet | ||
237 | |||
238 | def test_unicode_in_unicode_out(self): | ||
239 | # Unicode input is left alone. The original_encoding attribute | ||
240 | # is not set. | ||
241 | soup_from_unicode = self.soup(self.unicode_data) | ||
242 | self.assertEqual(soup_from_unicode.decode(), self.unicode_data) | ||
243 | self.assertEqual(soup_from_unicode.foo.string, 'Sacr\xe9 bleu!') | ||
244 | self.assertEqual(soup_from_unicode.original_encoding, None) | ||
245 | |||
246 | def test_utf8_in_unicode_out(self): | ||
247 | # UTF-8 input is converted to Unicode. The original_encoding | ||
248 | # attribute is set. | ||
249 | soup_from_utf8 = self.soup(self.utf8_data) | ||
250 | self.assertEqual(soup_from_utf8.decode(), self.unicode_data) | ||
251 | self.assertEqual(soup_from_utf8.foo.string, 'Sacr\xe9 bleu!') | ||
252 | |||
253 | def test_utf8_out(self): | ||
254 | # The internal data structures can be encoded as UTF-8. | ||
255 | soup_from_unicode = self.soup(self.unicode_data) | ||
256 | self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data) | ||
257 | |||
258 | @skipIf( | ||
259 | PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2, | ||
260 | "Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.") | ||
261 | def test_attribute_name_containing_unicode_characters(self): | ||
262 | markup = '<div><a \N{SNOWMAN}="snowman"></a></div>' | ||
263 | self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8")) | ||
264 | |||
265 | class TestUnicodeDammit(unittest.TestCase): | ||
266 | """Standalone tests of UnicodeDammit.""" | ||
267 | |||
268 | def test_unicode_input(self): | ||
269 | markup = "I'm already Unicode! \N{SNOWMAN}" | ||
270 | dammit = UnicodeDammit(markup) | ||
271 | self.assertEqual(dammit.unicode_markup, markup) | ||
272 | |||
273 | def test_smart_quotes_to_unicode(self): | ||
274 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
275 | dammit = UnicodeDammit(markup) | ||
276 | self.assertEqual( | ||
277 | dammit.unicode_markup, "<foo>\u2018\u2019\u201c\u201d</foo>") | ||
278 | |||
279 | def test_smart_quotes_to_xml_entities(self): | ||
280 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
281 | dammit = UnicodeDammit(markup, smart_quotes_to="xml") | ||
282 | self.assertEqual( | ||
283 | dammit.unicode_markup, "<foo>‘’“”</foo>") | ||
284 | |||
285 | def test_smart_quotes_to_html_entities(self): | ||
286 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
287 | dammit = UnicodeDammit(markup, smart_quotes_to="html") | ||
288 | self.assertEqual( | ||
289 | dammit.unicode_markup, "<foo>‘’“”</foo>") | ||
290 | |||
291 | def test_smart_quotes_to_ascii(self): | ||
292 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
293 | dammit = UnicodeDammit(markup, smart_quotes_to="ascii") | ||
294 | self.assertEqual( | ||
295 | dammit.unicode_markup, """<foo>''""</foo>""") | ||
296 | |||
297 | def test_detect_utf8(self): | ||
298 | utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83" | ||
299 | dammit = UnicodeDammit(utf8) | ||
300 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
301 | self.assertEqual(dammit.unicode_markup, 'Sacr\xe9 bleu! \N{SNOWMAN}') | ||
302 | |||
303 | |||
304 | def test_convert_hebrew(self): | ||
305 | hebrew = b"\xed\xe5\xec\xf9" | ||
306 | dammit = UnicodeDammit(hebrew, ["iso-8859-8"]) | ||
307 | self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8') | ||
308 | self.assertEqual(dammit.unicode_markup, '\u05dd\u05d5\u05dc\u05e9') | ||
309 | |||
310 | def test_dont_see_smart_quotes_where_there_are_none(self): | ||
311 | utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch" | ||
312 | dammit = UnicodeDammit(utf_8) | ||
313 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
314 | self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8) | ||
315 | |||
316 | def test_ignore_inappropriate_codecs(self): | ||
317 | utf8_data = "Räksmörgås".encode("utf-8") | ||
318 | dammit = UnicodeDammit(utf8_data, ["iso-8859-8"]) | ||
319 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
320 | |||
321 | def test_ignore_invalid_codecs(self): | ||
322 | utf8_data = "Räksmörgås".encode("utf-8") | ||
323 | for bad_encoding in ['.utf8', '...', 'utF---16.!']: | ||
324 | dammit = UnicodeDammit(utf8_data, [bad_encoding]) | ||
325 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
326 | |||
327 | def test_exclude_encodings(self): | ||
328 | # This is UTF-8. | ||
329 | utf8_data = "Räksmörgås".encode("utf-8") | ||
330 | |||
331 | # But if we exclude UTF-8 from consideration, the guess is | ||
332 | # Windows-1252. | ||
333 | dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"]) | ||
334 | self.assertEqual(dammit.original_encoding.lower(), 'windows-1252') | ||
335 | |||
336 | # And if we exclude that, there is no valid guess at all. | ||
337 | dammit = UnicodeDammit( | ||
338 | utf8_data, exclude_encodings=["utf-8", "windows-1252"]) | ||
339 | self.assertEqual(dammit.original_encoding, None) | ||
340 | |||
341 | def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self): | ||
342 | detected = EncodingDetector( | ||
343 | b'<?xml version="1.0" encoding="UTF-\xdb" ?>') | ||
344 | encodings = list(detected.encodings) | ||
345 | assert 'utf-\N{REPLACEMENT CHARACTER}' in encodings | ||
346 | |||
347 | def test_detect_html5_style_meta_tag(self): | ||
348 | |||
349 | for data in ( | ||
350 | b'<html><meta charset="euc-jp" /></html>', | ||
351 | b"<html><meta charset='euc-jp' /></html>", | ||
352 | b"<html><meta charset=euc-jp /></html>", | ||
353 | b"<html><meta charset=euc-jp/></html>"): | ||
354 | dammit = UnicodeDammit(data, is_html=True) | ||
355 | self.assertEqual( | ||
356 | "euc-jp", dammit.original_encoding) | ||
357 | |||
358 | def test_last_ditch_entity_replacement(self): | ||
359 | # This is a UTF-8 document that contains bytestrings | ||
360 | # completely incompatible with UTF-8 (ie. encoded with some other | ||
361 | # encoding). | ||
362 | # | ||
363 | # Since there is no consistent encoding for the document, | ||
364 | # Unicode, Dammit will eventually encode the document as UTF-8 | ||
365 | # and encode the incompatible characters as REPLACEMENT | ||
366 | # CHARACTER. | ||
367 | # | ||
368 | # If chardet is installed, it will detect that the document | ||
369 | # can be converted into ISO-8859-1 without errors. This happens | ||
370 | # to be the wrong encoding, but it is a consistent encoding, so the | ||
371 | # code we're testing here won't run. | ||
372 | # | ||
373 | # So we temporarily disable chardet if it's present. | ||
374 | doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?> | ||
375 | <html><b>\330\250\330\252\330\261</b> | ||
376 | <i>\310\322\321\220\312\321\355\344</i></html>""" | ||
377 | chardet = bs4.dammit.chardet_dammit | ||
378 | logging.disable(logging.WARNING) | ||
379 | try: | ||
380 | def noop(str): | ||
381 | return None | ||
382 | bs4.dammit.chardet_dammit = noop | ||
383 | dammit = UnicodeDammit(doc) | ||
384 | self.assertEqual(True, dammit.contains_replacement_characters) | ||
385 | self.assertTrue("\ufffd" in dammit.unicode_markup) | ||
386 | |||
387 | soup = BeautifulSoup(doc, "html.parser") | ||
388 | self.assertTrue(soup.contains_replacement_characters) | ||
389 | finally: | ||
390 | logging.disable(logging.NOTSET) | ||
391 | bs4.dammit.chardet_dammit = chardet | ||
392 | |||
393 | def test_byte_order_mark_removed(self): | ||
394 | # A document written in UTF-16LE will have its byte order marker stripped. | ||
395 | data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00' | ||
396 | dammit = UnicodeDammit(data) | ||
397 | self.assertEqual("<a>áé</a>", dammit.unicode_markup) | ||
398 | self.assertEqual("utf-16le", dammit.original_encoding) | ||
399 | |||
400 | def test_detwingle(self): | ||
401 | # Here's a UTF8 document. | ||
402 | utf8 = ("\N{SNOWMAN}" * 3).encode("utf8") | ||
403 | |||
404 | # Here's a Windows-1252 document. | ||
405 | windows_1252 = ( | ||
406 | "\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!" | ||
407 | "\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252") | ||
408 | |||
409 | # Through some unholy alchemy, they've been stuck together. | ||
410 | doc = utf8 + windows_1252 + utf8 | ||
411 | |||
412 | # The document can't be turned into UTF-8: | ||
413 | self.assertRaises(UnicodeDecodeError, doc.decode, "utf8") | ||
414 | |||
415 | # Unicode, Dammit thinks the whole document is Windows-1252, | ||
416 | # and decodes it into "☃☃☃“Hi, I like Windows!â€Ã¢ËœÆ’☃☃" | ||
417 | |||
418 | # But if we run it through fix_embedded_windows_1252, it's fixed: | ||
419 | |||
420 | fixed = UnicodeDammit.detwingle(doc) | ||
421 | self.assertEqual( | ||
422 | "☃☃☃“Hi, I like Windows!â€â˜ƒâ˜ƒâ˜ƒ", fixed.decode("utf8")) | ||
423 | |||
424 | def test_detwingle_ignores_multibyte_characters(self): | ||
425 | # Each of these characters has a UTF-8 representation ending | ||
426 | # in \x93. \x93 is a smart quote if interpreted as | ||
427 | # Windows-1252. But our code knows to skip over multibyte | ||
428 | # UTF-8 characters, so they'll survive the process unscathed. | ||
429 | for tricky_unicode_char in ( | ||
430 | "\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93' | ||
431 | "\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93' | ||
432 | "\xf0\x90\x90\x93", # This is a CJK character, not sure which one. | ||
433 | ): | ||
434 | input = tricky_unicode_char.encode("utf8") | ||
435 | self.assertTrue(input.endswith(b'\x93')) | ||
436 | output = UnicodeDammit.detwingle(input) | ||
437 | self.assertEqual(output, input) | ||
438 | |||
439 | class TestNamedspacedAttribute(SoupTest): | ||
440 | |||
441 | def test_name_may_be_none(self): | ||
442 | a = NamespacedAttribute("xmlns", None) | ||
443 | self.assertEqual(a, "xmlns") | ||
444 | |||
445 | def test_attribute_is_equivalent_to_colon_separated_string(self): | ||
446 | a = NamespacedAttribute("a", "b") | ||
447 | self.assertEqual("a:b", a) | ||
448 | |||
449 | def test_attributes_are_equivalent_if_prefix_and_name_identical(self): | ||
450 | a = NamespacedAttribute("a", "b", "c") | ||
451 | b = NamespacedAttribute("a", "b", "c") | ||
452 | self.assertEqual(a, b) | ||
453 | |||
454 | # The actual namespace is not considered. | ||
455 | c = NamespacedAttribute("a", "b", None) | ||
456 | self.assertEqual(a, c) | ||
457 | |||
458 | # But name and prefix are important. | ||
459 | d = NamespacedAttribute("a", "z", "c") | ||
460 | self.assertNotEqual(a, d) | ||
461 | |||
462 | e = NamespacedAttribute("z", "b", "c") | ||
463 | self.assertNotEqual(a, e) | ||
464 | |||
465 | |||
466 | class TestAttributeValueWithCharsetSubstitution(unittest.TestCase): | ||
467 | |||
468 | def test_content_meta_attribute_value(self): | ||
469 | value = CharsetMetaAttributeValue("euc-jp") | ||
470 | self.assertEqual("euc-jp", value) | ||
471 | self.assertEqual("euc-jp", value.original_value) | ||
472 | self.assertEqual("utf8", value.encode("utf8")) | ||
473 | |||
474 | |||
475 | def test_content_meta_attribute_value(self): | ||
476 | value = ContentMetaAttributeValue("text/html; charset=euc-jp") | ||
477 | self.assertEqual("text/html; charset=euc-jp", value) | ||
478 | self.assertEqual("text/html; charset=euc-jp", value.original_value) | ||
479 | self.assertEqual("text/html; charset=utf8", value.encode("utf8")) | ||
diff --git a/bitbake/lib/bs4/tests/test_tree.py b/bitbake/lib/bs4/tests/test_tree.py deleted file mode 100644 index cf0f1abe0c..0000000000 --- a/bitbake/lib/bs4/tests/test_tree.py +++ /dev/null | |||
@@ -1,2004 +0,0 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | """Tests for Beautiful Soup's tree traversal methods. | ||
3 | |||
4 | The tree traversal methods are the main advantage of using Beautiful | ||
5 | Soup over just using a parser. | ||
6 | |||
7 | Different parsers will build different Beautiful Soup trees given the | ||
8 | same markup, but all Beautiful Soup trees can be traversed with the | ||
9 | methods tested here. | ||
10 | """ | ||
11 | |||
12 | import copy | ||
13 | import pickle | ||
14 | import re | ||
15 | import warnings | ||
16 | from bs4 import BeautifulSoup | ||
17 | from bs4.builder import builder_registry | ||
18 | from bs4.element import ( | ||
19 | PY3K, | ||
20 | CData, | ||
21 | Comment, | ||
22 | Declaration, | ||
23 | Doctype, | ||
24 | NavigableString, | ||
25 | SoupStrainer, | ||
26 | Tag, | ||
27 | ) | ||
28 | from bs4.testing import SoupTest | ||
29 | |||
30 | XML_BUILDER_PRESENT = (builder_registry.lookup("xml") is not None) | ||
31 | LXML_PRESENT = (builder_registry.lookup("lxml") is not None) | ||
32 | |||
33 | class TreeTest(SoupTest): | ||
34 | |||
35 | def assertSelects(self, tags, should_match): | ||
36 | """Make sure that the given tags have the correct text. | ||
37 | |||
38 | This is used in tests that define a bunch of tags, each | ||
39 | containing a single string, and then select certain strings by | ||
40 | some mechanism. | ||
41 | """ | ||
42 | self.assertEqual([tag.string for tag in tags], should_match) | ||
43 | |||
44 | def assertSelectsIDs(self, tags, should_match): | ||
45 | """Make sure that the given tags have the correct IDs. | ||
46 | |||
47 | This is used in tests that define a bunch of tags, each | ||
48 | containing a single string, and then select certain strings by | ||
49 | some mechanism. | ||
50 | """ | ||
51 | self.assertEqual([tag['id'] for tag in tags], should_match) | ||
52 | |||
53 | |||
54 | class TestFind(TreeTest): | ||
55 | """Basic tests of the find() method. | ||
56 | |||
57 | find() just calls find_all() with limit=1, so it's not tested all | ||
58 | that thouroughly here. | ||
59 | """ | ||
60 | |||
61 | def test_find_tag(self): | ||
62 | soup = self.soup("<a>1</a><b>2</b><a>3</a><b>4</b>") | ||
63 | self.assertEqual(soup.find("b").string, "2") | ||
64 | |||
65 | def test_unicode_text_find(self): | ||
66 | soup = self.soup('<h1>Räksmörgås</h1>') | ||
67 | self.assertEqual(soup.find(string='Räksmörgås'), 'Räksmörgås') | ||
68 | |||
69 | def test_unicode_attribute_find(self): | ||
70 | soup = self.soup('<h1 id="Räksmörgås">here it is</h1>') | ||
71 | str(soup) | ||
72 | self.assertEqual("here it is", soup.find(id='Räksmörgås').text) | ||
73 | |||
74 | |||
75 | def test_find_everything(self): | ||
76 | """Test an optimization that finds all tags.""" | ||
77 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
78 | self.assertEqual(2, len(soup.find_all())) | ||
79 | |||
80 | def test_find_everything_with_name(self): | ||
81 | """Test an optimization that finds all tags with a given name.""" | ||
82 | soup = self.soup("<a>foo</a><b>bar</b><a>baz</a>") | ||
83 | self.assertEqual(2, len(soup.find_all('a'))) | ||
84 | |||
85 | class TestFindAll(TreeTest): | ||
86 | """Basic tests of the find_all() method.""" | ||
87 | |||
88 | def test_find_all_text_nodes(self): | ||
89 | """You can search the tree for text nodes.""" | ||
90 | soup = self.soup("<html>Foo<b>bar</b>\xbb</html>") | ||
91 | # Exact match. | ||
92 | self.assertEqual(soup.find_all(string="bar"), ["bar"]) | ||
93 | self.assertEqual(soup.find_all(text="bar"), ["bar"]) | ||
94 | # Match any of a number of strings. | ||
95 | self.assertEqual( | ||
96 | soup.find_all(text=["Foo", "bar"]), ["Foo", "bar"]) | ||
97 | # Match a regular expression. | ||
98 | self.assertEqual(soup.find_all(text=re.compile('.*')), | ||
99 | ["Foo", "bar", '\xbb']) | ||
100 | # Match anything. | ||
101 | self.assertEqual(soup.find_all(text=True), | ||
102 | ["Foo", "bar", '\xbb']) | ||
103 | |||
104 | def test_find_all_limit(self): | ||
105 | """You can limit the number of items returned by find_all.""" | ||
106 | soup = self.soup("<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a>") | ||
107 | self.assertSelects(soup.find_all('a', limit=3), ["1", "2", "3"]) | ||
108 | self.assertSelects(soup.find_all('a', limit=1), ["1"]) | ||
109 | self.assertSelects( | ||
110 | soup.find_all('a', limit=10), ["1", "2", "3", "4", "5"]) | ||
111 | |||
112 | # A limit of 0 means no limit. | ||
113 | self.assertSelects( | ||
114 | soup.find_all('a', limit=0), ["1", "2", "3", "4", "5"]) | ||
115 | |||
116 | def test_calling_a_tag_is_calling_findall(self): | ||
117 | soup = self.soup("<a>1</a><b>2<a id='foo'>3</a></b>") | ||
118 | self.assertSelects(soup('a', limit=1), ["1"]) | ||
119 | self.assertSelects(soup.b(id="foo"), ["3"]) | ||
120 | |||
121 | def test_find_all_with_self_referential_data_structure_does_not_cause_infinite_recursion(self): | ||
122 | soup = self.soup("<a></a>") | ||
123 | # Create a self-referential list. | ||
124 | l = [] | ||
125 | l.append(l) | ||
126 | |||
127 | # Without special code in _normalize_search_value, this would cause infinite | ||
128 | # recursion. | ||
129 | self.assertEqual([], soup.find_all(l)) | ||
130 | |||
131 | def test_find_all_resultset(self): | ||
132 | """All find_all calls return a ResultSet""" | ||
133 | soup = self.soup("<a></a>") | ||
134 | result = soup.find_all("a") | ||
135 | self.assertTrue(hasattr(result, "source")) | ||
136 | |||
137 | result = soup.find_all(True) | ||
138 | self.assertTrue(hasattr(result, "source")) | ||
139 | |||
140 | result = soup.find_all(text="foo") | ||
141 | self.assertTrue(hasattr(result, "source")) | ||
142 | |||
143 | |||
144 | class TestFindAllBasicNamespaces(TreeTest): | ||
145 | |||
146 | def test_find_by_namespaced_name(self): | ||
147 | soup = self.soup('<mathml:msqrt>4</mathml:msqrt><a svg:fill="red">') | ||
148 | self.assertEqual("4", soup.find("mathml:msqrt").string) | ||
149 | self.assertEqual("a", soup.find(attrs= { "svg:fill" : "red" }).name) | ||
150 | |||
151 | |||
152 | class TestFindAllByName(TreeTest): | ||
153 | """Test ways of finding tags by tag name.""" | ||
154 | |||
155 | def setUp(self): | ||
156 | super(TreeTest, self).setUp() | ||
157 | self.tree = self.soup("""<a>First tag.</a> | ||
158 | <b>Second tag.</b> | ||
159 | <c>Third <a>Nested tag.</a> tag.</c>""") | ||
160 | |||
161 | def test_find_all_by_tag_name(self): | ||
162 | # Find all the <a> tags. | ||
163 | self.assertSelects( | ||
164 | self.tree.find_all('a'), ['First tag.', 'Nested tag.']) | ||
165 | |||
166 | def test_find_all_by_name_and_text(self): | ||
167 | self.assertSelects( | ||
168 | self.tree.find_all('a', text='First tag.'), ['First tag.']) | ||
169 | |||
170 | self.assertSelects( | ||
171 | self.tree.find_all('a', text=True), ['First tag.', 'Nested tag.']) | ||
172 | |||
173 | self.assertSelects( | ||
174 | self.tree.find_all('a', text=re.compile("tag")), | ||
175 | ['First tag.', 'Nested tag.']) | ||
176 | |||
177 | |||
178 | def test_find_all_on_non_root_element(self): | ||
179 | # You can call find_all on any node, not just the root. | ||
180 | self.assertSelects(self.tree.c.find_all('a'), ['Nested tag.']) | ||
181 | |||
182 | def test_calling_element_invokes_find_all(self): | ||
183 | self.assertSelects(self.tree('a'), ['First tag.', 'Nested tag.']) | ||
184 | |||
185 | def test_find_all_by_tag_strainer(self): | ||
186 | self.assertSelects( | ||
187 | self.tree.find_all(SoupStrainer('a')), | ||
188 | ['First tag.', 'Nested tag.']) | ||
189 | |||
190 | def test_find_all_by_tag_names(self): | ||
191 | self.assertSelects( | ||
192 | self.tree.find_all(['a', 'b']), | ||
193 | ['First tag.', 'Second tag.', 'Nested tag.']) | ||
194 | |||
195 | def test_find_all_by_tag_dict(self): | ||
196 | self.assertSelects( | ||
197 | self.tree.find_all({'a' : True, 'b' : True}), | ||
198 | ['First tag.', 'Second tag.', 'Nested tag.']) | ||
199 | |||
200 | def test_find_all_by_tag_re(self): | ||
201 | self.assertSelects( | ||
202 | self.tree.find_all(re.compile('^[ab]$')), | ||
203 | ['First tag.', 'Second tag.', 'Nested tag.']) | ||
204 | |||
205 | def test_find_all_with_tags_matching_method(self): | ||
206 | # You can define an oracle method that determines whether | ||
207 | # a tag matches the search. | ||
208 | def id_matches_name(tag): | ||
209 | return tag.name == tag.get('id') | ||
210 | |||
211 | tree = self.soup("""<a id="a">Match 1.</a> | ||
212 | <a id="1">Does not match.</a> | ||
213 | <b id="b">Match 2.</a>""") | ||
214 | |||
215 | self.assertSelects( | ||
216 | tree.find_all(id_matches_name), ["Match 1.", "Match 2."]) | ||
217 | |||
218 | |||
219 | class TestFindAllByAttribute(TreeTest): | ||
220 | |||
221 | def test_find_all_by_attribute_name(self): | ||
222 | # You can pass in keyword arguments to find_all to search by | ||
223 | # attribute. | ||
224 | tree = self.soup(""" | ||
225 | <a id="first">Matching a.</a> | ||
226 | <a id="second"> | ||
227 | Non-matching <b id="first">Matching b.</b>a. | ||
228 | </a>""") | ||
229 | self.assertSelects(tree.find_all(id='first'), | ||
230 | ["Matching a.", "Matching b."]) | ||
231 | |||
232 | def test_find_all_by_utf8_attribute_value(self): | ||
233 | peace = "×ולש".encode("utf8") | ||
234 | data = '<a title="×ולש"></a>'.encode("utf8") | ||
235 | soup = self.soup(data) | ||
236 | self.assertEqual([soup.a], soup.find_all(title=peace)) | ||
237 | self.assertEqual([soup.a], soup.find_all(title=peace.decode("utf8"))) | ||
238 | self.assertEqual([soup.a], soup.find_all(title=[peace, "something else"])) | ||
239 | |||
240 | def test_find_all_by_attribute_dict(self): | ||
241 | # You can pass in a dictionary as the argument 'attrs'. This | ||
242 | # lets you search for attributes like 'name' (a fixed argument | ||
243 | # to find_all) and 'class' (a reserved word in Python.) | ||
244 | tree = self.soup(""" | ||
245 | <a name="name1" class="class1">Name match.</a> | ||
246 | <a name="name2" class="class2">Class match.</a> | ||
247 | <a name="name3" class="class3">Non-match.</a> | ||
248 | <name1>A tag called 'name1'.</name1> | ||
249 | """) | ||
250 | |||
251 | # This doesn't do what you want. | ||
252 | self.assertSelects(tree.find_all(name='name1'), | ||
253 | ["A tag called 'name1'."]) | ||
254 | # This does what you want. | ||
255 | self.assertSelects(tree.find_all(attrs={'name' : 'name1'}), | ||
256 | ["Name match."]) | ||
257 | |||
258 | self.assertSelects(tree.find_all(attrs={'class' : 'class2'}), | ||
259 | ["Class match."]) | ||
260 | |||
261 | def test_find_all_by_class(self): | ||
262 | tree = self.soup(""" | ||
263 | <a class="1">Class 1.</a> | ||
264 | <a class="2">Class 2.</a> | ||
265 | <b class="1">Class 1.</b> | ||
266 | <c class="3 4">Class 3 and 4.</c> | ||
267 | """) | ||
268 | |||
269 | # Passing in the class_ keyword argument will search against | ||
270 | # the 'class' attribute. | ||
271 | self.assertSelects(tree.find_all('a', class_='1'), ['Class 1.']) | ||
272 | self.assertSelects(tree.find_all('c', class_='3'), ['Class 3 and 4.']) | ||
273 | self.assertSelects(tree.find_all('c', class_='4'), ['Class 3 and 4.']) | ||
274 | |||
275 | # Passing in a string to 'attrs' will also search the CSS class. | ||
276 | self.assertSelects(tree.find_all('a', '1'), ['Class 1.']) | ||
277 | self.assertSelects(tree.find_all(attrs='1'), ['Class 1.', 'Class 1.']) | ||
278 | self.assertSelects(tree.find_all('c', '3'), ['Class 3 and 4.']) | ||
279 | self.assertSelects(tree.find_all('c', '4'), ['Class 3 and 4.']) | ||
280 | |||
281 | def test_find_by_class_when_multiple_classes_present(self): | ||
282 | tree = self.soup("<gar class='foo bar'>Found it</gar>") | ||
283 | |||
284 | f = tree.find_all("gar", class_=re.compile("o")) | ||
285 | self.assertSelects(f, ["Found it"]) | ||
286 | |||
287 | f = tree.find_all("gar", class_=re.compile("a")) | ||
288 | self.assertSelects(f, ["Found it"]) | ||
289 | |||
290 | # Since the class is not the string "foo bar", but the two | ||
291 | # strings "foo" and "bar", this will not find anything. | ||
292 | f = tree.find_all("gar", class_=re.compile("o b")) | ||
293 | self.assertSelects(f, []) | ||
294 | |||
295 | def test_find_all_with_non_dictionary_for_attrs_finds_by_class(self): | ||
296 | soup = self.soup("<a class='bar'>Found it</a>") | ||
297 | |||
298 | self.assertSelects(soup.find_all("a", re.compile("ba")), ["Found it"]) | ||
299 | |||
300 | def big_attribute_value(value): | ||
301 | return len(value) > 3 | ||
302 | |||
303 | self.assertSelects(soup.find_all("a", big_attribute_value), []) | ||
304 | |||
305 | def small_attribute_value(value): | ||
306 | return len(value) <= 3 | ||
307 | |||
308 | self.assertSelects( | ||
309 | soup.find_all("a", small_attribute_value), ["Found it"]) | ||
310 | |||
311 | def test_find_all_with_string_for_attrs_finds_multiple_classes(self): | ||
312 | soup = self.soup('<a class="foo bar"></a><a class="foo"></a>') | ||
313 | a, a2 = soup.find_all("a") | ||
314 | self.assertEqual([a, a2], soup.find_all("a", "foo")) | ||
315 | self.assertEqual([a], soup.find_all("a", "bar")) | ||
316 | |||
317 | # If you specify the class as a string that contains a | ||
318 | # space, only that specific value will be found. | ||
319 | self.assertEqual([a], soup.find_all("a", class_="foo bar")) | ||
320 | self.assertEqual([a], soup.find_all("a", "foo bar")) | ||
321 | self.assertEqual([], soup.find_all("a", "bar foo")) | ||
322 | |||
323 | def test_find_all_by_attribute_soupstrainer(self): | ||
324 | tree = self.soup(""" | ||
325 | <a id="first">Match.</a> | ||
326 | <a id="second">Non-match.</a>""") | ||
327 | |||
328 | strainer = SoupStrainer(attrs={'id' : 'first'}) | ||
329 | self.assertSelects(tree.find_all(strainer), ['Match.']) | ||
330 | |||
331 | def test_find_all_with_missing_atribute(self): | ||
332 | # You can pass in None as the value of an attribute to find_all. | ||
333 | # This will match tags that do not have that attribute set. | ||
334 | tree = self.soup("""<a id="1">ID present.</a> | ||
335 | <a>No ID present.</a> | ||
336 | <a id="">ID is empty.</a>""") | ||
337 | self.assertSelects(tree.find_all('a', id=None), ["No ID present."]) | ||
338 | |||
339 | def test_find_all_with_defined_attribute(self): | ||
340 | # You can pass in None as the value of an attribute to find_all. | ||
341 | # This will match tags that have that attribute set to any value. | ||
342 | tree = self.soup("""<a id="1">ID present.</a> | ||
343 | <a>No ID present.</a> | ||
344 | <a id="">ID is empty.</a>""") | ||
345 | self.assertSelects( | ||
346 | tree.find_all(id=True), ["ID present.", "ID is empty."]) | ||
347 | |||
348 | def test_find_all_with_numeric_attribute(self): | ||
349 | # If you search for a number, it's treated as a string. | ||
350 | tree = self.soup("""<a id=1>Unquoted attribute.</a> | ||
351 | <a id="1">Quoted attribute.</a>""") | ||
352 | |||
353 | expected = ["Unquoted attribute.", "Quoted attribute."] | ||
354 | self.assertSelects(tree.find_all(id=1), expected) | ||
355 | self.assertSelects(tree.find_all(id="1"), expected) | ||
356 | |||
357 | def test_find_all_with_list_attribute_values(self): | ||
358 | # You can pass a list of attribute values instead of just one, | ||
359 | # and you'll get tags that match any of the values. | ||
360 | tree = self.soup("""<a id="1">1</a> | ||
361 | <a id="2">2</a> | ||
362 | <a id="3">3</a> | ||
363 | <a>No ID.</a>""") | ||
364 | self.assertSelects(tree.find_all(id=["1", "3", "4"]), | ||
365 | ["1", "3"]) | ||
366 | |||
367 | def test_find_all_with_regular_expression_attribute_value(self): | ||
368 | # You can pass a regular expression as an attribute value, and | ||
369 | # you'll get tags whose values for that attribute match the | ||
370 | # regular expression. | ||
371 | tree = self.soup("""<a id="a">One a.</a> | ||
372 | <a id="aa">Two as.</a> | ||
373 | <a id="ab">Mixed as and bs.</a> | ||
374 | <a id="b">One b.</a> | ||
375 | <a>No ID.</a>""") | ||
376 | |||
377 | self.assertSelects(tree.find_all(id=re.compile("^a+$")), | ||
378 | ["One a.", "Two as."]) | ||
379 | |||
380 | def test_find_by_name_and_containing_string(self): | ||
381 | soup = self.soup("<b>foo</b><b>bar</b><a>foo</a>") | ||
382 | a = soup.a | ||
383 | |||
384 | self.assertEqual([a], soup.find_all("a", text="foo")) | ||
385 | self.assertEqual([], soup.find_all("a", text="bar")) | ||
386 | self.assertEqual([], soup.find_all("a", text="bar")) | ||
387 | |||
388 | def test_find_by_name_and_containing_string_when_string_is_buried(self): | ||
389 | soup = self.soup("<a>foo</a><a><b><c>foo</c></b></a>") | ||
390 | self.assertEqual(soup.find_all("a"), soup.find_all("a", text="foo")) | ||
391 | |||
392 | def test_find_by_attribute_and_containing_string(self): | ||
393 | soup = self.soup('<b id="1">foo</b><a id="2">foo</a>') | ||
394 | a = soup.a | ||
395 | |||
396 | self.assertEqual([a], soup.find_all(id=2, text="foo")) | ||
397 | self.assertEqual([], soup.find_all(id=1, text="bar")) | ||
398 | |||
399 | |||
400 | |||
401 | |||
402 | class TestIndex(TreeTest): | ||
403 | """Test Tag.index""" | ||
404 | def test_index(self): | ||
405 | tree = self.soup("""<div> | ||
406 | <a>Identical</a> | ||
407 | <b>Not identical</b> | ||
408 | <a>Identical</a> | ||
409 | |||
410 | <c><d>Identical with child</d></c> | ||
411 | <b>Also not identical</b> | ||
412 | <c><d>Identical with child</d></c> | ||
413 | </div>""") | ||
414 | div = tree.div | ||
415 | for i, element in enumerate(div.contents): | ||
416 | self.assertEqual(i, div.index(element)) | ||
417 | self.assertRaises(ValueError, tree.index, 1) | ||
418 | |||
419 | |||
420 | class TestParentOperations(TreeTest): | ||
421 | """Test navigation and searching through an element's parents.""" | ||
422 | |||
423 | def setUp(self): | ||
424 | super(TestParentOperations, self).setUp() | ||
425 | self.tree = self.soup('''<ul id="empty"></ul> | ||
426 | <ul id="top"> | ||
427 | <ul id="middle"> | ||
428 | <ul id="bottom"> | ||
429 | <b>Start here</b> | ||
430 | </ul> | ||
431 | </ul>''') | ||
432 | self.start = self.tree.b | ||
433 | |||
434 | |||
435 | def test_parent(self): | ||
436 | self.assertEqual(self.start.parent['id'], 'bottom') | ||
437 | self.assertEqual(self.start.parent.parent['id'], 'middle') | ||
438 | self.assertEqual(self.start.parent.parent.parent['id'], 'top') | ||
439 | |||
440 | def test_parent_of_top_tag_is_soup_object(self): | ||
441 | top_tag = self.tree.contents[0] | ||
442 | self.assertEqual(top_tag.parent, self.tree) | ||
443 | |||
444 | def test_soup_object_has_no_parent(self): | ||
445 | self.assertEqual(None, self.tree.parent) | ||
446 | |||
447 | def test_find_parents(self): | ||
448 | self.assertSelectsIDs( | ||
449 | self.start.find_parents('ul'), ['bottom', 'middle', 'top']) | ||
450 | self.assertSelectsIDs( | ||
451 | self.start.find_parents('ul', id="middle"), ['middle']) | ||
452 | |||
453 | def test_find_parent(self): | ||
454 | self.assertEqual(self.start.find_parent('ul')['id'], 'bottom') | ||
455 | self.assertEqual(self.start.find_parent('ul', id='top')['id'], 'top') | ||
456 | |||
457 | def test_parent_of_text_element(self): | ||
458 | text = self.tree.find(text="Start here") | ||
459 | self.assertEqual(text.parent.name, 'b') | ||
460 | |||
461 | def test_text_element_find_parent(self): | ||
462 | text = self.tree.find(text="Start here") | ||
463 | self.assertEqual(text.find_parent('ul')['id'], 'bottom') | ||
464 | |||
465 | def test_parent_generator(self): | ||
466 | parents = [parent['id'] for parent in self.start.parents | ||
467 | if parent is not None and 'id' in parent.attrs] | ||
468 | self.assertEqual(parents, ['bottom', 'middle', 'top']) | ||
469 | |||
470 | |||
471 | class ProximityTest(TreeTest): | ||
472 | |||
473 | def setUp(self): | ||
474 | super(TreeTest, self).setUp() | ||
475 | self.tree = self.soup( | ||
476 | '<html id="start"><head></head><body><b id="1">One</b><b id="2">Two</b><b id="3">Three</b></body></html>') | ||
477 | |||
478 | |||
479 | class TestNextOperations(ProximityTest): | ||
480 | |||
481 | def setUp(self): | ||
482 | super(TestNextOperations, self).setUp() | ||
483 | self.start = self.tree.b | ||
484 | |||
485 | def test_next(self): | ||
486 | self.assertEqual(self.start.next_element, "One") | ||
487 | self.assertEqual(self.start.next_element.next_element['id'], "2") | ||
488 | |||
489 | def test_next_of_last_item_is_none(self): | ||
490 | last = self.tree.find(text="Three") | ||
491 | self.assertEqual(last.next_element, None) | ||
492 | |||
493 | def test_next_of_root_is_none(self): | ||
494 | # The document root is outside the next/previous chain. | ||
495 | self.assertEqual(self.tree.next_element, None) | ||
496 | |||
497 | def test_find_all_next(self): | ||
498 | self.assertSelects(self.start.find_all_next('b'), ["Two", "Three"]) | ||
499 | self.start.find_all_next(id=3) | ||
500 | self.assertSelects(self.start.find_all_next(id=3), ["Three"]) | ||
501 | |||
502 | def test_find_next(self): | ||
503 | self.assertEqual(self.start.find_next('b')['id'], '2') | ||
504 | self.assertEqual(self.start.find_next(text="Three"), "Three") | ||
505 | |||
506 | def test_find_next_for_text_element(self): | ||
507 | text = self.tree.find(text="One") | ||
508 | self.assertEqual(text.find_next("b").string, "Two") | ||
509 | self.assertSelects(text.find_all_next("b"), ["Two", "Three"]) | ||
510 | |||
511 | def test_next_generator(self): | ||
512 | start = self.tree.find(text="Two") | ||
513 | successors = [node for node in start.next_elements] | ||
514 | # There are two successors: the final <b> tag and its text contents. | ||
515 | tag, contents = successors | ||
516 | self.assertEqual(tag['id'], '3') | ||
517 | self.assertEqual(contents, "Three") | ||
518 | |||
519 | class TestPreviousOperations(ProximityTest): | ||
520 | |||
521 | def setUp(self): | ||
522 | super(TestPreviousOperations, self).setUp() | ||
523 | self.end = self.tree.find(text="Three") | ||
524 | |||
525 | def test_previous(self): | ||
526 | self.assertEqual(self.end.previous_element['id'], "3") | ||
527 | self.assertEqual(self.end.previous_element.previous_element, "Two") | ||
528 | |||
529 | def test_previous_of_first_item_is_none(self): | ||
530 | first = self.tree.find('html') | ||
531 | self.assertEqual(first.previous_element, None) | ||
532 | |||
533 | def test_previous_of_root_is_none(self): | ||
534 | # The document root is outside the next/previous chain. | ||
535 | # XXX This is broken! | ||
536 | #self.assertEqual(self.tree.previous_element, None) | ||
537 | pass | ||
538 | |||
539 | def test_find_all_previous(self): | ||
540 | # The <b> tag containing the "Three" node is the predecessor | ||
541 | # of the "Three" node itself, which is why "Three" shows up | ||
542 | # here. | ||
543 | self.assertSelects( | ||
544 | self.end.find_all_previous('b'), ["Three", "Two", "One"]) | ||
545 | self.assertSelects(self.end.find_all_previous(id=1), ["One"]) | ||
546 | |||
547 | def test_find_previous(self): | ||
548 | self.assertEqual(self.end.find_previous('b')['id'], '3') | ||
549 | self.assertEqual(self.end.find_previous(text="One"), "One") | ||
550 | |||
551 | def test_find_previous_for_text_element(self): | ||
552 | text = self.tree.find(text="Three") | ||
553 | self.assertEqual(text.find_previous("b").string, "Three") | ||
554 | self.assertSelects( | ||
555 | text.find_all_previous("b"), ["Three", "Two", "One"]) | ||
556 | |||
557 | def test_previous_generator(self): | ||
558 | start = self.tree.find(text="One") | ||
559 | predecessors = [node for node in start.previous_elements] | ||
560 | |||
561 | # There are four predecessors: the <b> tag containing "One" | ||
562 | # the <body> tag, the <head> tag, and the <html> tag. | ||
563 | b, body, head, html = predecessors | ||
564 | self.assertEqual(b['id'], '1') | ||
565 | self.assertEqual(body.name, "body") | ||
566 | self.assertEqual(head.name, "head") | ||
567 | self.assertEqual(html.name, "html") | ||
568 | |||
569 | |||
570 | class SiblingTest(TreeTest): | ||
571 | |||
572 | def setUp(self): | ||
573 | super(SiblingTest, self).setUp() | ||
574 | markup = '''<html> | ||
575 | <span id="1"> | ||
576 | <span id="1.1"></span> | ||
577 | </span> | ||
578 | <span id="2"> | ||
579 | <span id="2.1"></span> | ||
580 | </span> | ||
581 | <span id="3"> | ||
582 | <span id="3.1"></span> | ||
583 | </span> | ||
584 | <span id="4"></span> | ||
585 | </html>''' | ||
586 | # All that whitespace looks good but makes the tests more | ||
587 | # difficult. Get rid of it. | ||
588 | markup = re.compile(r"\n\s*").sub("", markup) | ||
589 | self.tree = self.soup(markup) | ||
590 | |||
591 | |||
592 | class TestNextSibling(SiblingTest): | ||
593 | |||
594 | def setUp(self): | ||
595 | super(TestNextSibling, self).setUp() | ||
596 | self.start = self.tree.find(id="1") | ||
597 | |||
598 | def test_next_sibling_of_root_is_none(self): | ||
599 | self.assertEqual(self.tree.next_sibling, None) | ||
600 | |||
601 | def test_next_sibling(self): | ||
602 | self.assertEqual(self.start.next_sibling['id'], '2') | ||
603 | self.assertEqual(self.start.next_sibling.next_sibling['id'], '3') | ||
604 | |||
605 | # Note the difference between next_sibling and next_element. | ||
606 | self.assertEqual(self.start.next_element['id'], '1.1') | ||
607 | |||
608 | def test_next_sibling_may_not_exist(self): | ||
609 | self.assertEqual(self.tree.html.next_sibling, None) | ||
610 | |||
611 | nested_span = self.tree.find(id="1.1") | ||
612 | self.assertEqual(nested_span.next_sibling, None) | ||
613 | |||
614 | last_span = self.tree.find(id="4") | ||
615 | self.assertEqual(last_span.next_sibling, None) | ||
616 | |||
617 | def test_find_next_sibling(self): | ||
618 | self.assertEqual(self.start.find_next_sibling('span')['id'], '2') | ||
619 | |||
620 | def test_next_siblings(self): | ||
621 | self.assertSelectsIDs(self.start.find_next_siblings("span"), | ||
622 | ['2', '3', '4']) | ||
623 | |||
624 | self.assertSelectsIDs(self.start.find_next_siblings(id='3'), ['3']) | ||
625 | |||
626 | def test_next_sibling_for_text_element(self): | ||
627 | soup = self.soup("Foo<b>bar</b>baz") | ||
628 | start = soup.find(text="Foo") | ||
629 | self.assertEqual(start.next_sibling.name, 'b') | ||
630 | self.assertEqual(start.next_sibling.next_sibling, 'baz') | ||
631 | |||
632 | self.assertSelects(start.find_next_siblings('b'), ['bar']) | ||
633 | self.assertEqual(start.find_next_sibling(text="baz"), "baz") | ||
634 | self.assertEqual(start.find_next_sibling(text="nonesuch"), None) | ||
635 | |||
636 | |||
637 | class TestPreviousSibling(SiblingTest): | ||
638 | |||
639 | def setUp(self): | ||
640 | super(TestPreviousSibling, self).setUp() | ||
641 | self.end = self.tree.find(id="4") | ||
642 | |||
643 | def test_previous_sibling_of_root_is_none(self): | ||
644 | self.assertEqual(self.tree.previous_sibling, None) | ||
645 | |||
646 | def test_previous_sibling(self): | ||
647 | self.assertEqual(self.end.previous_sibling['id'], '3') | ||
648 | self.assertEqual(self.end.previous_sibling.previous_sibling['id'], '2') | ||
649 | |||
650 | # Note the difference between previous_sibling and previous_element. | ||
651 | self.assertEqual(self.end.previous_element['id'], '3.1') | ||
652 | |||
653 | def test_previous_sibling_may_not_exist(self): | ||
654 | self.assertEqual(self.tree.html.previous_sibling, None) | ||
655 | |||
656 | nested_span = self.tree.find(id="1.1") | ||
657 | self.assertEqual(nested_span.previous_sibling, None) | ||
658 | |||
659 | first_span = self.tree.find(id="1") | ||
660 | self.assertEqual(first_span.previous_sibling, None) | ||
661 | |||
662 | def test_find_previous_sibling(self): | ||
663 | self.assertEqual(self.end.find_previous_sibling('span')['id'], '3') | ||
664 | |||
665 | def test_previous_siblings(self): | ||
666 | self.assertSelectsIDs(self.end.find_previous_siblings("span"), | ||
667 | ['3', '2', '1']) | ||
668 | |||
669 | self.assertSelectsIDs(self.end.find_previous_siblings(id='1'), ['1']) | ||
670 | |||
671 | def test_previous_sibling_for_text_element(self): | ||
672 | soup = self.soup("Foo<b>bar</b>baz") | ||
673 | start = soup.find(text="baz") | ||
674 | self.assertEqual(start.previous_sibling.name, 'b') | ||
675 | self.assertEqual(start.previous_sibling.previous_sibling, 'Foo') | ||
676 | |||
677 | self.assertSelects(start.find_previous_siblings('b'), ['bar']) | ||
678 | self.assertEqual(start.find_previous_sibling(text="Foo"), "Foo") | ||
679 | self.assertEqual(start.find_previous_sibling(text="nonesuch"), None) | ||
680 | |||
681 | |||
682 | class TestTagCreation(SoupTest): | ||
683 | """Test the ability to create new tags.""" | ||
684 | def test_new_tag(self): | ||
685 | soup = self.soup("") | ||
686 | new_tag = soup.new_tag("foo", bar="baz") | ||
687 | self.assertTrue(isinstance(new_tag, Tag)) | ||
688 | self.assertEqual("foo", new_tag.name) | ||
689 | self.assertEqual(dict(bar="baz"), new_tag.attrs) | ||
690 | self.assertEqual(None, new_tag.parent) | ||
691 | |||
692 | def test_tag_inherits_self_closing_rules_from_builder(self): | ||
693 | if XML_BUILDER_PRESENT: | ||
694 | xml_soup = BeautifulSoup("", "lxml-xml") | ||
695 | xml_br = xml_soup.new_tag("br") | ||
696 | xml_p = xml_soup.new_tag("p") | ||
697 | |||
698 | # Both the <br> and <p> tag are empty-element, just because | ||
699 | # they have no contents. | ||
700 | self.assertEqual(b"<br/>", xml_br.encode()) | ||
701 | self.assertEqual(b"<p/>", xml_p.encode()) | ||
702 | |||
703 | html_soup = BeautifulSoup("", "html.parser") | ||
704 | html_br = html_soup.new_tag("br") | ||
705 | html_p = html_soup.new_tag("p") | ||
706 | |||
707 | # The HTML builder users HTML's rules about which tags are | ||
708 | # empty-element tags, and the new tags reflect these rules. | ||
709 | self.assertEqual(b"<br/>", html_br.encode()) | ||
710 | self.assertEqual(b"<p></p>", html_p.encode()) | ||
711 | |||
712 | def test_new_string_creates_navigablestring(self): | ||
713 | soup = self.soup("") | ||
714 | s = soup.new_string("foo") | ||
715 | self.assertEqual("foo", s) | ||
716 | self.assertTrue(isinstance(s, NavigableString)) | ||
717 | |||
718 | def test_new_string_can_create_navigablestring_subclass(self): | ||
719 | soup = self.soup("") | ||
720 | s = soup.new_string("foo", Comment) | ||
721 | self.assertEqual("foo", s) | ||
722 | self.assertTrue(isinstance(s, Comment)) | ||
723 | |||
724 | class TestTreeModification(SoupTest): | ||
725 | |||
726 | def test_attribute_modification(self): | ||
727 | soup = self.soup('<a id="1"></a>') | ||
728 | soup.a['id'] = 2 | ||
729 | self.assertEqual(soup.decode(), self.document_for('<a id="2"></a>')) | ||
730 | del(soup.a['id']) | ||
731 | self.assertEqual(soup.decode(), self.document_for('<a></a>')) | ||
732 | soup.a['id2'] = 'foo' | ||
733 | self.assertEqual(soup.decode(), self.document_for('<a id2="foo"></a>')) | ||
734 | |||
735 | def test_new_tag_creation(self): | ||
736 | builder = builder_registry.lookup('html')() | ||
737 | soup = self.soup("<body></body>", builder=builder) | ||
738 | a = Tag(soup, builder, 'a') | ||
739 | ol = Tag(soup, builder, 'ol') | ||
740 | a['href'] = 'http://foo.com/' | ||
741 | soup.body.insert(0, a) | ||
742 | soup.body.insert(1, ol) | ||
743 | self.assertEqual( | ||
744 | soup.body.encode(), | ||
745 | b'<body><a href="http://foo.com/"></a><ol></ol></body>') | ||
746 | |||
747 | def test_append_to_contents_moves_tag(self): | ||
748 | doc = """<p id="1">Don't leave me <b>here</b>.</p> | ||
749 | <p id="2">Don\'t leave!</p>""" | ||
750 | soup = self.soup(doc) | ||
751 | second_para = soup.find(id='2') | ||
752 | bold = soup.b | ||
753 | |||
754 | # Move the <b> tag to the end of the second paragraph. | ||
755 | soup.find(id='2').append(soup.b) | ||
756 | |||
757 | # The <b> tag is now a child of the second paragraph. | ||
758 | self.assertEqual(bold.parent, second_para) | ||
759 | |||
760 | self.assertEqual( | ||
761 | soup.decode(), self.document_for( | ||
762 | '<p id="1">Don\'t leave me .</p>\n' | ||
763 | '<p id="2">Don\'t leave!<b>here</b></p>')) | ||
764 | |||
765 | def test_replace_with_returns_thing_that_was_replaced(self): | ||
766 | text = "<a></a><b><c></c></b>" | ||
767 | soup = self.soup(text) | ||
768 | a = soup.a | ||
769 | new_a = a.replace_with(soup.c) | ||
770 | self.assertEqual(a, new_a) | ||
771 | |||
772 | def test_unwrap_returns_thing_that_was_replaced(self): | ||
773 | text = "<a><b></b><c></c></a>" | ||
774 | soup = self.soup(text) | ||
775 | a = soup.a | ||
776 | new_a = a.unwrap() | ||
777 | self.assertEqual(a, new_a) | ||
778 | |||
779 | def test_replace_with_and_unwrap_give_useful_exception_when_tag_has_no_parent(self): | ||
780 | soup = self.soup("<a><b>Foo</b></a><c>Bar</c>") | ||
781 | a = soup.a | ||
782 | a.extract() | ||
783 | self.assertEqual(None, a.parent) | ||
784 | self.assertRaises(ValueError, a.unwrap) | ||
785 | self.assertRaises(ValueError, a.replace_with, soup.c) | ||
786 | |||
787 | def test_replace_tag_with_itself(self): | ||
788 | text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>" | ||
789 | soup = self.soup(text) | ||
790 | c = soup.c | ||
791 | soup.c.replace_with(c) | ||
792 | self.assertEqual(soup.decode(), self.document_for(text)) | ||
793 | |||
794 | def test_replace_tag_with_its_parent_raises_exception(self): | ||
795 | text = "<a><b></b></a>" | ||
796 | soup = self.soup(text) | ||
797 | self.assertRaises(ValueError, soup.b.replace_with, soup.a) | ||
798 | |||
799 | def test_insert_tag_into_itself_raises_exception(self): | ||
800 | text = "<a><b></b></a>" | ||
801 | soup = self.soup(text) | ||
802 | self.assertRaises(ValueError, soup.a.insert, 0, soup.a) | ||
803 | |||
804 | def test_replace_with_maintains_next_element_throughout(self): | ||
805 | soup = self.soup('<p><a>one</a><b>three</b></p>') | ||
806 | a = soup.a | ||
807 | b = a.contents[0] | ||
808 | # Make it so the <a> tag has two text children. | ||
809 | a.insert(1, "two") | ||
810 | |||
811 | # Now replace each one with the empty string. | ||
812 | left, right = a.contents | ||
813 | left.replaceWith('') | ||
814 | right.replaceWith('') | ||
815 | |||
816 | # The <b> tag is still connected to the tree. | ||
817 | self.assertEqual("three", soup.b.string) | ||
818 | |||
819 | def test_replace_final_node(self): | ||
820 | soup = self.soup("<b>Argh!</b>") | ||
821 | soup.find(text="Argh!").replace_with("Hooray!") | ||
822 | new_text = soup.find(text="Hooray!") | ||
823 | b = soup.b | ||
824 | self.assertEqual(new_text.previous_element, b) | ||
825 | self.assertEqual(new_text.parent, b) | ||
826 | self.assertEqual(new_text.previous_element.next_element, new_text) | ||
827 | self.assertEqual(new_text.next_element, None) | ||
828 | |||
829 | def test_consecutive_text_nodes(self): | ||
830 | # A builder should never create two consecutive text nodes, | ||
831 | # but if you insert one next to another, Beautiful Soup will | ||
832 | # handle it correctly. | ||
833 | soup = self.soup("<a><b>Argh!</b><c></c></a>") | ||
834 | soup.b.insert(1, "Hooray!") | ||
835 | |||
836 | self.assertEqual( | ||
837 | soup.decode(), self.document_for( | ||
838 | "<a><b>Argh!Hooray!</b><c></c></a>")) | ||
839 | |||
840 | new_text = soup.find(text="Hooray!") | ||
841 | self.assertEqual(new_text.previous_element, "Argh!") | ||
842 | self.assertEqual(new_text.previous_element.next_element, new_text) | ||
843 | |||
844 | self.assertEqual(new_text.previous_sibling, "Argh!") | ||
845 | self.assertEqual(new_text.previous_sibling.next_sibling, new_text) | ||
846 | |||
847 | self.assertEqual(new_text.next_sibling, None) | ||
848 | self.assertEqual(new_text.next_element, soup.c) | ||
849 | |||
850 | def test_insert_string(self): | ||
851 | soup = self.soup("<a></a>") | ||
852 | soup.a.insert(0, "bar") | ||
853 | soup.a.insert(0, "foo") | ||
854 | # The string were added to the tag. | ||
855 | self.assertEqual(["foo", "bar"], soup.a.contents) | ||
856 | # And they were converted to NavigableStrings. | ||
857 | self.assertEqual(soup.a.contents[0].next_element, "bar") | ||
858 | |||
859 | def test_insert_tag(self): | ||
860 | builder = self.default_builder | ||
861 | soup = self.soup( | ||
862 | "<a><b>Find</b><c>lady!</c><d></d></a>", builder=builder) | ||
863 | magic_tag = Tag(soup, builder, 'magictag') | ||
864 | magic_tag.insert(0, "the") | ||
865 | soup.a.insert(1, magic_tag) | ||
866 | |||
867 | self.assertEqual( | ||
868 | soup.decode(), self.document_for( | ||
869 | "<a><b>Find</b><magictag>the</magictag><c>lady!</c><d></d></a>")) | ||
870 | |||
871 | # Make sure all the relationships are hooked up correctly. | ||
872 | b_tag = soup.b | ||
873 | self.assertEqual(b_tag.next_sibling, magic_tag) | ||
874 | self.assertEqual(magic_tag.previous_sibling, b_tag) | ||
875 | |||
876 | find = b_tag.find(text="Find") | ||
877 | self.assertEqual(find.next_element, magic_tag) | ||
878 | self.assertEqual(magic_tag.previous_element, find) | ||
879 | |||
880 | c_tag = soup.c | ||
881 | self.assertEqual(magic_tag.next_sibling, c_tag) | ||
882 | self.assertEqual(c_tag.previous_sibling, magic_tag) | ||
883 | |||
884 | the = magic_tag.find(text="the") | ||
885 | self.assertEqual(the.parent, magic_tag) | ||
886 | self.assertEqual(the.next_element, c_tag) | ||
887 | self.assertEqual(c_tag.previous_element, the) | ||
888 | |||
889 | def test_append_child_thats_already_at_the_end(self): | ||
890 | data = "<a><b></b></a>" | ||
891 | soup = self.soup(data) | ||
892 | soup.a.append(soup.b) | ||
893 | self.assertEqual(data, soup.decode()) | ||
894 | |||
895 | def test_move_tag_to_beginning_of_parent(self): | ||
896 | data = "<a><b></b><c></c><d></d></a>" | ||
897 | soup = self.soup(data) | ||
898 | soup.a.insert(0, soup.d) | ||
899 | self.assertEqual("<a><d></d><b></b><c></c></a>", soup.decode()) | ||
900 | |||
901 | def test_insert_works_on_empty_element_tag(self): | ||
902 | # This is a little strange, since most HTML parsers don't allow | ||
903 | # markup like this to come through. But in general, we don't | ||
904 | # know what the parser would or wouldn't have allowed, so | ||
905 | # I'm letting this succeed for now. | ||
906 | soup = self.soup("<br/>") | ||
907 | soup.br.insert(1, "Contents") | ||
908 | self.assertEqual(str(soup.br), "<br>Contents</br>") | ||
909 | |||
910 | def test_insert_before(self): | ||
911 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
912 | soup.b.insert_before("BAZ") | ||
913 | soup.a.insert_before("QUUX") | ||
914 | self.assertEqual( | ||
915 | soup.decode(), self.document_for("QUUX<a>foo</a>BAZ<b>bar</b>")) | ||
916 | |||
917 | soup.a.insert_before(soup.b) | ||
918 | self.assertEqual( | ||
919 | soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ")) | ||
920 | |||
921 | def test_insert_after(self): | ||
922 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
923 | soup.b.insert_after("BAZ") | ||
924 | soup.a.insert_after("QUUX") | ||
925 | self.assertEqual( | ||
926 | soup.decode(), self.document_for("<a>foo</a>QUUX<b>bar</b>BAZ")) | ||
927 | soup.b.insert_after(soup.a) | ||
928 | self.assertEqual( | ||
929 | soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ")) | ||
930 | |||
931 | def test_insert_after_raises_exception_if_after_has_no_meaning(self): | ||
932 | soup = self.soup("") | ||
933 | tag = soup.new_tag("a") | ||
934 | string = soup.new_string("") | ||
935 | self.assertRaises(ValueError, string.insert_after, tag) | ||
936 | self.assertRaises(NotImplementedError, soup.insert_after, tag) | ||
937 | self.assertRaises(ValueError, tag.insert_after, tag) | ||
938 | |||
939 | def test_insert_before_raises_notimplementederror_if_before_has_no_meaning(self): | ||
940 | soup = self.soup("") | ||
941 | tag = soup.new_tag("a") | ||
942 | string = soup.new_string("") | ||
943 | self.assertRaises(ValueError, string.insert_before, tag) | ||
944 | self.assertRaises(NotImplementedError, soup.insert_before, tag) | ||
945 | self.assertRaises(ValueError, tag.insert_before, tag) | ||
946 | |||
947 | def test_replace_with(self): | ||
948 | soup = self.soup( | ||
949 | "<p>There's <b>no</b> business like <b>show</b> business</p>") | ||
950 | no, show = soup.find_all('b') | ||
951 | show.replace_with(no) | ||
952 | self.assertEqual( | ||
953 | soup.decode(), | ||
954 | self.document_for( | ||
955 | "<p>There's business like <b>no</b> business</p>")) | ||
956 | |||
957 | self.assertEqual(show.parent, None) | ||
958 | self.assertEqual(no.parent, soup.p) | ||
959 | self.assertEqual(no.next_element, "no") | ||
960 | self.assertEqual(no.next_sibling, " business") | ||
961 | |||
962 | def test_replace_first_child(self): | ||
963 | data = "<a><b></b><c></c></a>" | ||
964 | soup = self.soup(data) | ||
965 | soup.b.replace_with(soup.c) | ||
966 | self.assertEqual("<a><c></c></a>", soup.decode()) | ||
967 | |||
968 | def test_replace_last_child(self): | ||
969 | data = "<a><b></b><c></c></a>" | ||
970 | soup = self.soup(data) | ||
971 | soup.c.replace_with(soup.b) | ||
972 | self.assertEqual("<a><b></b></a>", soup.decode()) | ||
973 | |||
974 | def test_nested_tag_replace_with(self): | ||
975 | soup = self.soup( | ||
976 | """<a>We<b>reserve<c>the</c><d>right</d></b></a><e>to<f>refuse</f><g>service</g></e>""") | ||
977 | |||
978 | # Replace the entire <b> tag and its contents ("reserve the | ||
979 | # right") with the <f> tag ("refuse"). | ||
980 | remove_tag = soup.b | ||
981 | move_tag = soup.f | ||
982 | remove_tag.replace_with(move_tag) | ||
983 | |||
984 | self.assertEqual( | ||
985 | soup.decode(), self.document_for( | ||
986 | "<a>We<f>refuse</f></a><e>to<g>service</g></e>")) | ||
987 | |||
988 | # The <b> tag is now an orphan. | ||
989 | self.assertEqual(remove_tag.parent, None) | ||
990 | self.assertEqual(remove_tag.find(text="right").next_element, None) | ||
991 | self.assertEqual(remove_tag.previous_element, None) | ||
992 | self.assertEqual(remove_tag.next_sibling, None) | ||
993 | self.assertEqual(remove_tag.previous_sibling, None) | ||
994 | |||
995 | # The <f> tag is now connected to the <a> tag. | ||
996 | self.assertEqual(move_tag.parent, soup.a) | ||
997 | self.assertEqual(move_tag.previous_element, "We") | ||
998 | self.assertEqual(move_tag.next_element.next_element, soup.e) | ||
999 | self.assertEqual(move_tag.next_sibling, None) | ||
1000 | |||
1001 | # The gap where the <f> tag used to be has been mended, and | ||
1002 | # the word "to" is now connected to the <g> tag. | ||
1003 | to_text = soup.find(text="to") | ||
1004 | g_tag = soup.g | ||
1005 | self.assertEqual(to_text.next_element, g_tag) | ||
1006 | self.assertEqual(to_text.next_sibling, g_tag) | ||
1007 | self.assertEqual(g_tag.previous_element, to_text) | ||
1008 | self.assertEqual(g_tag.previous_sibling, to_text) | ||
1009 | |||
1010 | def test_unwrap(self): | ||
1011 | tree = self.soup(""" | ||
1012 | <p>Unneeded <em>formatting</em> is unneeded</p> | ||
1013 | """) | ||
1014 | tree.em.unwrap() | ||
1015 | self.assertEqual(tree.em, None) | ||
1016 | self.assertEqual(tree.p.text, "Unneeded formatting is unneeded") | ||
1017 | |||
1018 | def test_wrap(self): | ||
1019 | soup = self.soup("I wish I was bold.") | ||
1020 | value = soup.string.wrap(soup.new_tag("b")) | ||
1021 | self.assertEqual(value.decode(), "<b>I wish I was bold.</b>") | ||
1022 | self.assertEqual( | ||
1023 | soup.decode(), self.document_for("<b>I wish I was bold.</b>")) | ||
1024 | |||
1025 | def test_wrap_extracts_tag_from_elsewhere(self): | ||
1026 | soup = self.soup("<b></b>I wish I was bold.") | ||
1027 | soup.b.next_sibling.wrap(soup.b) | ||
1028 | self.assertEqual( | ||
1029 | soup.decode(), self.document_for("<b>I wish I was bold.</b>")) | ||
1030 | |||
1031 | def test_wrap_puts_new_contents_at_the_end(self): | ||
1032 | soup = self.soup("<b>I like being bold.</b>I wish I was bold.") | ||
1033 | soup.b.next_sibling.wrap(soup.b) | ||
1034 | self.assertEqual(2, len(soup.b.contents)) | ||
1035 | self.assertEqual( | ||
1036 | soup.decode(), self.document_for( | ||
1037 | "<b>I like being bold.I wish I was bold.</b>")) | ||
1038 | |||
1039 | def test_extract(self): | ||
1040 | soup = self.soup( | ||
1041 | '<html><body>Some content. <div id="nav">Nav crap</div> More content.</body></html>') | ||
1042 | |||
1043 | self.assertEqual(len(soup.body.contents), 3) | ||
1044 | extracted = soup.find(id="nav").extract() | ||
1045 | |||
1046 | self.assertEqual( | ||
1047 | soup.decode(), "<html><body>Some content. More content.</body></html>") | ||
1048 | self.assertEqual(extracted.decode(), '<div id="nav">Nav crap</div>') | ||
1049 | |||
1050 | # The extracted tag is now an orphan. | ||
1051 | self.assertEqual(len(soup.body.contents), 2) | ||
1052 | self.assertEqual(extracted.parent, None) | ||
1053 | self.assertEqual(extracted.previous_element, None) | ||
1054 | self.assertEqual(extracted.next_element.next_element, None) | ||
1055 | |||
1056 | # The gap where the extracted tag used to be has been mended. | ||
1057 | content_1 = soup.find(text="Some content. ") | ||
1058 | content_2 = soup.find(text=" More content.") | ||
1059 | self.assertEqual(content_1.next_element, content_2) | ||
1060 | self.assertEqual(content_1.next_sibling, content_2) | ||
1061 | self.assertEqual(content_2.previous_element, content_1) | ||
1062 | self.assertEqual(content_2.previous_sibling, content_1) | ||
1063 | |||
1064 | def test_extract_distinguishes_between_identical_strings(self): | ||
1065 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
1066 | foo_1 = soup.a.string | ||
1067 | bar_1 = soup.b.string | ||
1068 | foo_2 = soup.new_string("foo") | ||
1069 | bar_2 = soup.new_string("bar") | ||
1070 | soup.a.append(foo_2) | ||
1071 | soup.b.append(bar_2) | ||
1072 | |||
1073 | # Now there are two identical strings in the <a> tag, and two | ||
1074 | # in the <b> tag. Let's remove the first "foo" and the second | ||
1075 | # "bar". | ||
1076 | foo_1.extract() | ||
1077 | bar_2.extract() | ||
1078 | self.assertEqual(foo_2, soup.a.string) | ||
1079 | self.assertEqual(bar_2, soup.b.string) | ||
1080 | |||
1081 | def test_extract_multiples_of_same_tag(self): | ||
1082 | soup = self.soup(""" | ||
1083 | <html> | ||
1084 | <head> | ||
1085 | <script>foo</script> | ||
1086 | </head> | ||
1087 | <body> | ||
1088 | <script>bar</script> | ||
1089 | <a></a> | ||
1090 | </body> | ||
1091 | <script>baz</script> | ||
1092 | </html>""") | ||
1093 | [soup.script.extract() for i in soup.find_all("script")] | ||
1094 | self.assertEqual("<body>\n\n<a></a>\n</body>", str(soup.body)) | ||
1095 | |||
1096 | |||
1097 | def test_extract_works_when_element_is_surrounded_by_identical_strings(self): | ||
1098 | soup = self.soup( | ||
1099 | '<html>\n' | ||
1100 | '<body>hi</body>\n' | ||
1101 | '</html>') | ||
1102 | soup.find('body').extract() | ||
1103 | self.assertEqual(None, soup.find('body')) | ||
1104 | |||
1105 | |||
1106 | def test_clear(self): | ||
1107 | """Tag.clear()""" | ||
1108 | soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>") | ||
1109 | # clear using extract() | ||
1110 | a = soup.a | ||
1111 | soup.p.clear() | ||
1112 | self.assertEqual(len(soup.p.contents), 0) | ||
1113 | self.assertTrue(hasattr(a, "contents")) | ||
1114 | |||
1115 | # clear using decompose() | ||
1116 | em = a.em | ||
1117 | a.clear(decompose=True) | ||
1118 | self.assertEqual(0, len(em.contents)) | ||
1119 | |||
1120 | def test_string_set(self): | ||
1121 | """Tag.string = 'string'""" | ||
1122 | soup = self.soup("<a></a> <b><c></c></b>") | ||
1123 | soup.a.string = "foo" | ||
1124 | self.assertEqual(soup.a.contents, ["foo"]) | ||
1125 | soup.b.string = "bar" | ||
1126 | self.assertEqual(soup.b.contents, ["bar"]) | ||
1127 | |||
1128 | def test_string_set_does_not_affect_original_string(self): | ||
1129 | soup = self.soup("<a><b>foo</b><c>bar</c>") | ||
1130 | soup.b.string = soup.c.string | ||
1131 | self.assertEqual(soup.a.encode(), b"<a><b>bar</b><c>bar</c></a>") | ||
1132 | |||
1133 | def test_set_string_preserves_class_of_string(self): | ||
1134 | soup = self.soup("<a></a>") | ||
1135 | cdata = CData("foo") | ||
1136 | soup.a.string = cdata | ||
1137 | self.assertTrue(isinstance(soup.a.string, CData)) | ||
1138 | |||
1139 | class TestElementObjects(SoupTest): | ||
1140 | """Test various features of element objects.""" | ||
1141 | |||
1142 | def test_len(self): | ||
1143 | """The length of an element is its number of children.""" | ||
1144 | soup = self.soup("<top>1<b>2</b>3</top>") | ||
1145 | |||
1146 | # The BeautifulSoup object itself contains one element: the | ||
1147 | # <top> tag. | ||
1148 | self.assertEqual(len(soup.contents), 1) | ||
1149 | self.assertEqual(len(soup), 1) | ||
1150 | |||
1151 | # The <top> tag contains three elements: the text node "1", the | ||
1152 | # <b> tag, and the text node "3". | ||
1153 | self.assertEqual(len(soup.top), 3) | ||
1154 | self.assertEqual(len(soup.top.contents), 3) | ||
1155 | |||
1156 | def test_member_access_invokes_find(self): | ||
1157 | """Accessing a Python member .foo invokes find('foo')""" | ||
1158 | soup = self.soup('<b><i></i></b>') | ||
1159 | self.assertEqual(soup.b, soup.find('b')) | ||
1160 | self.assertEqual(soup.b.i, soup.find('b').find('i')) | ||
1161 | self.assertEqual(soup.a, None) | ||
1162 | |||
1163 | def test_deprecated_member_access(self): | ||
1164 | soup = self.soup('<b><i></i></b>') | ||
1165 | with warnings.catch_warnings(record=True) as w: | ||
1166 | tag = soup.bTag | ||
1167 | self.assertEqual(soup.b, tag) | ||
1168 | self.assertEqual( | ||
1169 | '.bTag is deprecated, use .find("b") instead.', | ||
1170 | str(w[0].message)) | ||
1171 | |||
1172 | def test_has_attr(self): | ||
1173 | """has_attr() checks for the presence of an attribute. | ||
1174 | |||
1175 | Please note note: has_attr() is different from | ||
1176 | __in__. has_attr() checks the tag's attributes and __in__ | ||
1177 | checks the tag's chidlren. | ||
1178 | """ | ||
1179 | soup = self.soup("<foo attr='bar'>") | ||
1180 | self.assertTrue(soup.foo.has_attr('attr')) | ||
1181 | self.assertFalse(soup.foo.has_attr('attr2')) | ||
1182 | |||
1183 | |||
1184 | def test_attributes_come_out_in_alphabetical_order(self): | ||
1185 | markup = '<b a="1" z="5" m="3" f="2" y="4"></b>' | ||
1186 | self.assertSoupEquals(markup, '<b a="1" f="2" m="3" y="4" z="5"></b>') | ||
1187 | |||
1188 | def test_string(self): | ||
1189 | # A tag that contains only a text node makes that node | ||
1190 | # available as .string. | ||
1191 | soup = self.soup("<b>foo</b>") | ||
1192 | self.assertEqual(soup.b.string, 'foo') | ||
1193 | |||
1194 | def test_empty_tag_has_no_string(self): | ||
1195 | # A tag with no children has no .stirng. | ||
1196 | soup = self.soup("<b></b>") | ||
1197 | self.assertEqual(soup.b.string, None) | ||
1198 | |||
1199 | def test_tag_with_multiple_children_has_no_string(self): | ||
1200 | # A tag with no children has no .string. | ||
1201 | soup = self.soup("<a>foo<b></b><b></b></b>") | ||
1202 | self.assertEqual(soup.b.string, None) | ||
1203 | |||
1204 | soup = self.soup("<a>foo<b></b>bar</b>") | ||
1205 | self.assertEqual(soup.b.string, None) | ||
1206 | |||
1207 | # Even if all the children are strings, due to trickery, | ||
1208 | # it won't work--but this would be a good optimization. | ||
1209 | soup = self.soup("<a>foo</b>") | ||
1210 | soup.a.insert(1, "bar") | ||
1211 | self.assertEqual(soup.a.string, None) | ||
1212 | |||
1213 | def test_tag_with_recursive_string_has_string(self): | ||
1214 | # A tag with a single child which has a .string inherits that | ||
1215 | # .string. | ||
1216 | soup = self.soup("<a><b>foo</b></a>") | ||
1217 | self.assertEqual(soup.a.string, "foo") | ||
1218 | self.assertEqual(soup.string, "foo") | ||
1219 | |||
1220 | def test_lack_of_string(self): | ||
1221 | """Only a tag containing a single text node has a .string.""" | ||
1222 | soup = self.soup("<b>f<i>e</i>o</b>") | ||
1223 | self.assertFalse(soup.b.string) | ||
1224 | |||
1225 | soup = self.soup("<b></b>") | ||
1226 | self.assertFalse(soup.b.string) | ||
1227 | |||
1228 | def test_all_text(self): | ||
1229 | """Tag.text and Tag.get_text(sep=u"") -> all child text, concatenated""" | ||
1230 | soup = self.soup("<a>a<b>r</b> <r> t </r></a>") | ||
1231 | self.assertEqual(soup.a.text, "ar t ") | ||
1232 | self.assertEqual(soup.a.get_text(strip=True), "art") | ||
1233 | self.assertEqual(soup.a.get_text(","), "a,r, , t ") | ||
1234 | self.assertEqual(soup.a.get_text(",", strip=True), "a,r,t") | ||
1235 | |||
1236 | def test_get_text_ignores_comments(self): | ||
1237 | soup = self.soup("foo<!--IGNORE-->bar") | ||
1238 | self.assertEqual(soup.get_text(), "foobar") | ||
1239 | |||
1240 | self.assertEqual( | ||
1241 | soup.get_text(types=(NavigableString, Comment)), "fooIGNOREbar") | ||
1242 | self.assertEqual( | ||
1243 | soup.get_text(types=None), "fooIGNOREbar") | ||
1244 | |||
1245 | def test_all_strings_ignores_comments(self): | ||
1246 | soup = self.soup("foo<!--IGNORE-->bar") | ||
1247 | self.assertEqual(['foo', 'bar'], list(soup.strings)) | ||
1248 | |||
1249 | class TestCDAtaListAttributes(SoupTest): | ||
1250 | |||
1251 | """Testing cdata-list attributes like 'class'. | ||
1252 | """ | ||
1253 | def test_single_value_becomes_list(self): | ||
1254 | soup = self.soup("<a class='foo'>") | ||
1255 | self.assertEqual(["foo"],soup.a['class']) | ||
1256 | |||
1257 | def test_multiple_values_becomes_list(self): | ||
1258 | soup = self.soup("<a class='foo bar'>") | ||
1259 | self.assertEqual(["foo", "bar"], soup.a['class']) | ||
1260 | |||
1261 | def test_multiple_values_separated_by_weird_whitespace(self): | ||
1262 | soup = self.soup("<a class='foo\tbar\nbaz'>") | ||
1263 | self.assertEqual(["foo", "bar", "baz"],soup.a['class']) | ||
1264 | |||
1265 | def test_attributes_joined_into_string_on_output(self): | ||
1266 | soup = self.soup("<a class='foo\tbar'>") | ||
1267 | self.assertEqual(b'<a class="foo bar"></a>', soup.a.encode()) | ||
1268 | |||
1269 | def test_accept_charset(self): | ||
1270 | soup = self.soup('<form accept-charset="ISO-8859-1 UTF-8">') | ||
1271 | self.assertEqual(['ISO-8859-1', 'UTF-8'], soup.form['accept-charset']) | ||
1272 | |||
1273 | def test_cdata_attribute_applying_only_to_one_tag(self): | ||
1274 | data = '<a accept-charset="ISO-8859-1 UTF-8"></a>' | ||
1275 | soup = self.soup(data) | ||
1276 | # We saw in another test that accept-charset is a cdata-list | ||
1277 | # attribute for the <form> tag. But it's not a cdata-list | ||
1278 | # attribute for any other tag. | ||
1279 | self.assertEqual('ISO-8859-1 UTF-8', soup.a['accept-charset']) | ||
1280 | |||
1281 | def test_string_has_immutable_name_property(self): | ||
1282 | string = self.soup("s").string | ||
1283 | self.assertEqual(None, string.name) | ||
1284 | def t(): | ||
1285 | string.name = 'foo' | ||
1286 | self.assertRaises(AttributeError, t) | ||
1287 | |||
1288 | class TestPersistence(SoupTest): | ||
1289 | "Testing features like pickle and deepcopy." | ||
1290 | |||
1291 | def setUp(self): | ||
1292 | super(TestPersistence, self).setUp() | ||
1293 | self.page = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" | ||
1294 | "http://www.w3.org/TR/REC-html40/transitional.dtd"> | ||
1295 | <html> | ||
1296 | <head> | ||
1297 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> | ||
1298 | <title>Beautiful Soup: We called him Tortoise because he taught us.</title> | ||
1299 | <link rev="made" href="mailto:leonardr@segfault.org"> | ||
1300 | <meta name="Description" content="Beautiful Soup: an HTML parser optimized for screen-scraping."> | ||
1301 | <meta name="generator" content="Markov Approximation 1.4 (module: leonardr)"> | ||
1302 | <meta name="author" content="Leonard Richardson"> | ||
1303 | </head> | ||
1304 | <body> | ||
1305 | <a href="foo">foo</a> | ||
1306 | <a href="foo"><b>bar</b></a> | ||
1307 | </body> | ||
1308 | </html>""" | ||
1309 | self.tree = self.soup(self.page) | ||
1310 | |||
1311 | def test_pickle_and_unpickle_identity(self): | ||
1312 | # Pickling a tree, then unpickling it, yields a tree identical | ||
1313 | # to the original. | ||
1314 | dumped = pickle.dumps(self.tree, 2) | ||
1315 | loaded = pickle.loads(dumped) | ||
1316 | self.assertEqual(loaded.__class__, BeautifulSoup) | ||
1317 | self.assertEqual(loaded.decode(), self.tree.decode()) | ||
1318 | |||
1319 | def test_deepcopy_identity(self): | ||
1320 | # Making a deepcopy of a tree yields an identical tree. | ||
1321 | copied = copy.deepcopy(self.tree) | ||
1322 | self.assertEqual(copied.decode(), self.tree.decode()) | ||
1323 | |||
1324 | def test_unicode_pickle(self): | ||
1325 | # A tree containing Unicode characters can be pickled. | ||
1326 | html = "<b>\N{SNOWMAN}</b>" | ||
1327 | soup = self.soup(html) | ||
1328 | dumped = pickle.dumps(soup, pickle.HIGHEST_PROTOCOL) | ||
1329 | loaded = pickle.loads(dumped) | ||
1330 | self.assertEqual(loaded.decode(), soup.decode()) | ||
1331 | |||
1332 | def test_copy_navigablestring_is_not_attached_to_tree(self): | ||
1333 | html = "<b>Foo<a></a></b><b>Bar</b>" | ||
1334 | soup = self.soup(html) | ||
1335 | s1 = soup.find(string="Foo") | ||
1336 | s2 = copy.copy(s1) | ||
1337 | self.assertEqual(s1, s2) | ||
1338 | self.assertEqual(None, s2.parent) | ||
1339 | self.assertEqual(None, s2.next_element) | ||
1340 | self.assertNotEqual(None, s1.next_sibling) | ||
1341 | self.assertEqual(None, s2.next_sibling) | ||
1342 | self.assertEqual(None, s2.previous_element) | ||
1343 | |||
1344 | def test_copy_navigablestring_subclass_has_same_type(self): | ||
1345 | html = "<b><!--Foo--></b>" | ||
1346 | soup = self.soup(html) | ||
1347 | s1 = soup.string | ||
1348 | s2 = copy.copy(s1) | ||
1349 | self.assertEqual(s1, s2) | ||
1350 | self.assertTrue(isinstance(s2, Comment)) | ||
1351 | |||
1352 | def test_copy_entire_soup(self): | ||
1353 | html = "<div><b>Foo<a></a></b><b>Bar</b></div>end" | ||
1354 | soup = self.soup(html) | ||
1355 | soup_copy = copy.copy(soup) | ||
1356 | self.assertEqual(soup, soup_copy) | ||
1357 | |||
1358 | def test_copy_tag_copies_contents(self): | ||
1359 | html = "<div><b>Foo<a></a></b><b>Bar</b></div>end" | ||
1360 | soup = self.soup(html) | ||
1361 | div = soup.div | ||
1362 | div_copy = copy.copy(div) | ||
1363 | |||
1364 | # The two tags look the same, and evaluate to equal. | ||
1365 | self.assertEqual(str(div), str(div_copy)) | ||
1366 | self.assertEqual(div, div_copy) | ||
1367 | |||
1368 | # But they're not the same object. | ||
1369 | self.assertFalse(div is div_copy) | ||
1370 | |||
1371 | # And they don't have the same relation to the parse tree. The | ||
1372 | # copy is not associated with a parse tree at all. | ||
1373 | self.assertEqual(None, div_copy.parent) | ||
1374 | self.assertEqual(None, div_copy.previous_element) | ||
1375 | self.assertEqual(None, div_copy.find(string='Bar').next_element) | ||
1376 | self.assertNotEqual(None, div.find(string='Bar').next_element) | ||
1377 | |||
1378 | class TestSubstitutions(SoupTest): | ||
1379 | |||
1380 | def test_default_formatter_is_minimal(self): | ||
1381 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1382 | soup = self.soup(markup) | ||
1383 | decoded = soup.decode(formatter="minimal") | ||
1384 | # The < is converted back into < but the e-with-acute is left alone. | ||
1385 | self.assertEqual( | ||
1386 | decoded, | ||
1387 | self.document_for( | ||
1388 | "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>")) | ||
1389 | |||
1390 | def test_formatter_html(self): | ||
1391 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1392 | soup = self.soup(markup) | ||
1393 | decoded = soup.decode(formatter="html") | ||
1394 | self.assertEqual( | ||
1395 | decoded, | ||
1396 | self.document_for("<b><<Sacré bleu!>></b>")) | ||
1397 | |||
1398 | def test_formatter_minimal(self): | ||
1399 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1400 | soup = self.soup(markup) | ||
1401 | decoded = soup.decode(formatter="minimal") | ||
1402 | # The < is converted back into < but the e-with-acute is left alone. | ||
1403 | self.assertEqual( | ||
1404 | decoded, | ||
1405 | self.document_for( | ||
1406 | "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>")) | ||
1407 | |||
1408 | def test_formatter_null(self): | ||
1409 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1410 | soup = self.soup(markup) | ||
1411 | decoded = soup.decode(formatter=None) | ||
1412 | # Neither the angle brackets nor the e-with-acute are converted. | ||
1413 | # This is not valid HTML, but it's what the user wanted. | ||
1414 | self.assertEqual(decoded, | ||
1415 | self.document_for("<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>")) | ||
1416 | |||
1417 | def test_formatter_custom(self): | ||
1418 | markup = "<b><foo></b><b>bar</b>" | ||
1419 | soup = self.soup(markup) | ||
1420 | decoded = soup.decode(formatter = lambda x: x.upper()) | ||
1421 | # Instead of normal entity conversion code, the custom | ||
1422 | # callable is called on every string. | ||
1423 | self.assertEqual( | ||
1424 | decoded, | ||
1425 | self.document_for("<b><FOO></b><b>BAR</b>")) | ||
1426 | |||
1427 | def test_formatter_is_run_on_attribute_values(self): | ||
1428 | markup = '<a href="http://a.com?a=b&c=é">e</a>' | ||
1429 | soup = self.soup(markup) | ||
1430 | a = soup.a | ||
1431 | |||
1432 | expect_minimal = '<a href="http://a.com?a=b&c=é">e</a>' | ||
1433 | |||
1434 | self.assertEqual(expect_minimal, a.decode()) | ||
1435 | self.assertEqual(expect_minimal, a.decode(formatter="minimal")) | ||
1436 | |||
1437 | expect_html = '<a href="http://a.com?a=b&c=é">e</a>' | ||
1438 | self.assertEqual(expect_html, a.decode(formatter="html")) | ||
1439 | |||
1440 | self.assertEqual(markup, a.decode(formatter=None)) | ||
1441 | expect_upper = '<a href="HTTP://A.COM?A=B&C=É">E</a>' | ||
1442 | self.assertEqual(expect_upper, a.decode(formatter=lambda x: x.upper())) | ||
1443 | |||
1444 | def test_formatter_skips_script_tag_for_html_documents(self): | ||
1445 | doc = """ | ||
1446 | <script type="text/javascript"> | ||
1447 | console.log("< < hey > > "); | ||
1448 | </script> | ||
1449 | """ | ||
1450 | encoded = BeautifulSoup(doc, 'html.parser').encode() | ||
1451 | self.assertTrue(b"< < hey > >" in encoded) | ||
1452 | |||
1453 | def test_formatter_skips_style_tag_for_html_documents(self): | ||
1454 | doc = """ | ||
1455 | <style type="text/css"> | ||
1456 | console.log("< < hey > > "); | ||
1457 | </style> | ||
1458 | """ | ||
1459 | encoded = BeautifulSoup(doc, 'html.parser').encode() | ||
1460 | self.assertTrue(b"< < hey > >" in encoded) | ||
1461 | |||
1462 | def test_prettify_leaves_preformatted_text_alone(self): | ||
1463 | soup = self.soup("<div> foo <pre> \tbar\n \n </pre> baz ") | ||
1464 | # Everything outside the <pre> tag is reformatted, but everything | ||
1465 | # inside is left alone. | ||
1466 | self.assertEqual( | ||
1467 | '<div>\n foo\n <pre> \tbar\n \n </pre>\n baz\n</div>', | ||
1468 | soup.div.prettify()) | ||
1469 | |||
1470 | def test_prettify_accepts_formatter(self): | ||
1471 | soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser') | ||
1472 | pretty = soup.prettify(formatter = lambda x: x.upper()) | ||
1473 | self.assertTrue("FOO" in pretty) | ||
1474 | |||
1475 | def test_prettify_outputs_unicode_by_default(self): | ||
1476 | soup = self.soup("<a></a>") | ||
1477 | self.assertEqual(str, type(soup.prettify())) | ||
1478 | |||
1479 | def test_prettify_can_encode_data(self): | ||
1480 | soup = self.soup("<a></a>") | ||
1481 | self.assertEqual(bytes, type(soup.prettify("utf-8"))) | ||
1482 | |||
1483 | def test_html_entity_substitution_off_by_default(self): | ||
1484 | markup = "<b>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</b>" | ||
1485 | soup = self.soup(markup) | ||
1486 | encoded = soup.b.encode("utf-8") | ||
1487 | self.assertEqual(encoded, markup.encode('utf-8')) | ||
1488 | |||
1489 | def test_encoding_substitution(self): | ||
1490 | # Here's the <meta> tag saying that a document is | ||
1491 | # encoded in Shift-JIS. | ||
1492 | meta_tag = ('<meta content="text/html; charset=x-sjis" ' | ||
1493 | 'http-equiv="Content-type"/>') | ||
1494 | soup = self.soup(meta_tag) | ||
1495 | |||
1496 | # Parse the document, and the charset apprears unchanged. | ||
1497 | self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis') | ||
1498 | |||
1499 | # Encode the document into some encoding, and the encoding is | ||
1500 | # substituted into the meta tag. | ||
1501 | utf_8 = soup.encode("utf-8") | ||
1502 | self.assertTrue(b"charset=utf-8" in utf_8) | ||
1503 | |||
1504 | euc_jp = soup.encode("euc_jp") | ||
1505 | self.assertTrue(b"charset=euc_jp" in euc_jp) | ||
1506 | |||
1507 | shift_jis = soup.encode("shift-jis") | ||
1508 | self.assertTrue(b"charset=shift-jis" in shift_jis) | ||
1509 | |||
1510 | utf_16_u = soup.encode("utf-16").decode("utf-16") | ||
1511 | self.assertTrue("charset=utf-16" in utf_16_u) | ||
1512 | |||
1513 | def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self): | ||
1514 | markup = ('<head><meta content="text/html; charset=x-sjis" ' | ||
1515 | 'http-equiv="Content-type"/></head><pre>foo</pre>') | ||
1516 | |||
1517 | # Beautiful Soup used to try to rewrite the meta tag even if the | ||
1518 | # meta tag got filtered out by the strainer. This test makes | ||
1519 | # sure that doesn't happen. | ||
1520 | strainer = SoupStrainer('pre') | ||
1521 | soup = self.soup(markup, parse_only=strainer) | ||
1522 | self.assertEqual(soup.contents[0].name, 'pre') | ||
1523 | |||
1524 | class TestEncoding(SoupTest): | ||
1525 | """Test the ability to encode objects into strings.""" | ||
1526 | |||
1527 | def test_unicode_string_can_be_encoded(self): | ||
1528 | html = "<b>\N{SNOWMAN}</b>" | ||
1529 | soup = self.soup(html) | ||
1530 | self.assertEqual(soup.b.string.encode("utf-8"), | ||
1531 | "\N{SNOWMAN}".encode("utf-8")) | ||
1532 | |||
1533 | def test_tag_containing_unicode_string_can_be_encoded(self): | ||
1534 | html = "<b>\N{SNOWMAN}</b>" | ||
1535 | soup = self.soup(html) | ||
1536 | self.assertEqual( | ||
1537 | soup.b.encode("utf-8"), html.encode("utf-8")) | ||
1538 | |||
1539 | def test_encoding_substitutes_unrecognized_characters_by_default(self): | ||
1540 | html = "<b>\N{SNOWMAN}</b>" | ||
1541 | soup = self.soup(html) | ||
1542 | self.assertEqual(soup.b.encode("ascii"), b"<b>☃</b>") | ||
1543 | |||
1544 | def test_encoding_can_be_made_strict(self): | ||
1545 | html = "<b>\N{SNOWMAN}</b>" | ||
1546 | soup = self.soup(html) | ||
1547 | self.assertRaises( | ||
1548 | UnicodeEncodeError, soup.encode, "ascii", errors="strict") | ||
1549 | |||
1550 | def test_decode_contents(self): | ||
1551 | html = "<b>\N{SNOWMAN}</b>" | ||
1552 | soup = self.soup(html) | ||
1553 | self.assertEqual("\N{SNOWMAN}", soup.b.decode_contents()) | ||
1554 | |||
1555 | def test_encode_contents(self): | ||
1556 | html = "<b>\N{SNOWMAN}</b>" | ||
1557 | soup = self.soup(html) | ||
1558 | self.assertEqual( | ||
1559 | "\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents( | ||
1560 | encoding="utf8")) | ||
1561 | |||
1562 | def test_deprecated_renderContents(self): | ||
1563 | html = "<b>\N{SNOWMAN}</b>" | ||
1564 | soup = self.soup(html) | ||
1565 | self.assertEqual( | ||
1566 | "\N{SNOWMAN}".encode("utf8"), soup.b.renderContents()) | ||
1567 | |||
1568 | def test_repr(self): | ||
1569 | html = "<b>\N{SNOWMAN}</b>" | ||
1570 | soup = self.soup(html) | ||
1571 | if PY3K: | ||
1572 | self.assertEqual(html, repr(soup)) | ||
1573 | else: | ||
1574 | self.assertEqual(b'<b>\\u2603</b>', repr(soup)) | ||
1575 | |||
1576 | class TestNavigableStringSubclasses(SoupTest): | ||
1577 | |||
1578 | def test_cdata(self): | ||
1579 | # None of the current builders turn CDATA sections into CData | ||
1580 | # objects, but you can create them manually. | ||
1581 | soup = self.soup("") | ||
1582 | cdata = CData("foo") | ||
1583 | soup.insert(1, cdata) | ||
1584 | self.assertEqual(str(soup), "<![CDATA[foo]]>") | ||
1585 | self.assertEqual(soup.find(text="foo"), "foo") | ||
1586 | self.assertEqual(soup.contents[0], "foo") | ||
1587 | |||
1588 | def test_cdata_is_never_formatted(self): | ||
1589 | """Text inside a CData object is passed into the formatter. | ||
1590 | |||
1591 | But the return value is ignored. | ||
1592 | """ | ||
1593 | |||
1594 | self.count = 0 | ||
1595 | def increment(*args): | ||
1596 | self.count += 1 | ||
1597 | return "BITTER FAILURE" | ||
1598 | |||
1599 | soup = self.soup("") | ||
1600 | cdata = CData("<><><>") | ||
1601 | soup.insert(1, cdata) | ||
1602 | self.assertEqual( | ||
1603 | b"<![CDATA[<><><>]]>", soup.encode(formatter=increment)) | ||
1604 | self.assertEqual(1, self.count) | ||
1605 | |||
1606 | def test_doctype_ends_in_newline(self): | ||
1607 | # Unlike other NavigableString subclasses, a DOCTYPE always ends | ||
1608 | # in a newline. | ||
1609 | doctype = Doctype("foo") | ||
1610 | soup = self.soup("") | ||
1611 | soup.insert(1, doctype) | ||
1612 | self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n") | ||
1613 | |||
1614 | def test_declaration(self): | ||
1615 | d = Declaration("foo") | ||
1616 | self.assertEqual("<?foo?>", d.output_ready()) | ||
1617 | |||
1618 | class TestSoupSelector(TreeTest): | ||
1619 | |||
1620 | HTML = """ | ||
1621 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" | ||
1622 | "http://www.w3.org/TR/html4/strict.dtd"> | ||
1623 | <html> | ||
1624 | <head> | ||
1625 | <title>The title</title> | ||
1626 | <link rel="stylesheet" href="blah.css" type="text/css" id="l1"> | ||
1627 | </head> | ||
1628 | <body> | ||
1629 | <custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag> | ||
1630 | <div id="main" class="fancy"> | ||
1631 | <div id="inner"> | ||
1632 | <h1 id="header1">An H1</h1> | ||
1633 | <p>Some text</p> | ||
1634 | <p class="onep" id="p1">Some more text</p> | ||
1635 | <h2 id="header2">An H2</h2> | ||
1636 | <p class="class1 class2 class3" id="pmulti">Another</p> | ||
1637 | <a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a> | ||
1638 | <h2 id="header3">Another H2</h2> | ||
1639 | <a id="me" href="http://simonwillison.net/" rel="me">me</a> | ||
1640 | <span class="s1"> | ||
1641 | <a href="#" id="s1a1">span1a1</a> | ||
1642 | <a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a> | ||
1643 | <span class="span2"> | ||
1644 | <a href="#" id="s2a1">span2a1</a> | ||
1645 | </span> | ||
1646 | <span class="span3"></span> | ||
1647 | <custom-dashed-tag class="dashed" id="dash2"/> | ||
1648 | <div data-tag="dashedvalue" id="data1"/> | ||
1649 | </span> | ||
1650 | </div> | ||
1651 | <x id="xid"> | ||
1652 | <z id="zida"/> | ||
1653 | <z id="zidab"/> | ||
1654 | <z id="zidac"/> | ||
1655 | </x> | ||
1656 | <y id="yid"> | ||
1657 | <z id="zidb"/> | ||
1658 | </y> | ||
1659 | <p lang="en" id="lang-en">English</p> | ||
1660 | <p lang="en-gb" id="lang-en-gb">English UK</p> | ||
1661 | <p lang="en-us" id="lang-en-us">English US</p> | ||
1662 | <p lang="fr" id="lang-fr">French</p> | ||
1663 | </div> | ||
1664 | |||
1665 | <div id="footer"> | ||
1666 | </div> | ||
1667 | """ | ||
1668 | |||
1669 | def setUp(self): | ||
1670 | self.soup = BeautifulSoup(self.HTML, 'html.parser') | ||
1671 | |||
1672 | def assertSelects(self, selector, expected_ids): | ||
1673 | el_ids = [el['id'] for el in self.soup.select(selector)] | ||
1674 | el_ids.sort() | ||
1675 | expected_ids.sort() | ||
1676 | self.assertEqual(expected_ids, el_ids, | ||
1677 | "Selector %s, expected [%s], got [%s]" % ( | ||
1678 | selector, ', '.join(expected_ids), ', '.join(el_ids) | ||
1679 | ) | ||
1680 | ) | ||
1681 | |||
1682 | assertSelect = assertSelects | ||
1683 | |||
1684 | def assertSelectMultiple(self, *tests): | ||
1685 | for selector, expected_ids in tests: | ||
1686 | self.assertSelect(selector, expected_ids) | ||
1687 | |||
1688 | def test_one_tag_one(self): | ||
1689 | els = self.soup.select('title') | ||
1690 | self.assertEqual(len(els), 1) | ||
1691 | self.assertEqual(els[0].name, 'title') | ||
1692 | self.assertEqual(els[0].contents, ['The title']) | ||
1693 | |||
1694 | def test_one_tag_many(self): | ||
1695 | els = self.soup.select('div') | ||
1696 | self.assertEqual(len(els), 4) | ||
1697 | for div in els: | ||
1698 | self.assertEqual(div.name, 'div') | ||
1699 | |||
1700 | el = self.soup.select_one('div') | ||
1701 | self.assertEqual('main', el['id']) | ||
1702 | |||
1703 | def test_select_one_returns_none_if_no_match(self): | ||
1704 | match = self.soup.select_one('nonexistenttag') | ||
1705 | self.assertEqual(None, match) | ||
1706 | |||
1707 | |||
1708 | def test_tag_in_tag_one(self): | ||
1709 | els = self.soup.select('div div') | ||
1710 | self.assertSelects('div div', ['inner', 'data1']) | ||
1711 | |||
1712 | def test_tag_in_tag_many(self): | ||
1713 | for selector in ('html div', 'html body div', 'body div'): | ||
1714 | self.assertSelects(selector, ['data1', 'main', 'inner', 'footer']) | ||
1715 | |||
1716 | def test_tag_no_match(self): | ||
1717 | self.assertEqual(len(self.soup.select('del')), 0) | ||
1718 | |||
1719 | def test_invalid_tag(self): | ||
1720 | self.assertRaises(ValueError, self.soup.select, 'tag%t') | ||
1721 | |||
1722 | def test_select_dashed_tag_ids(self): | ||
1723 | self.assertSelects('custom-dashed-tag', ['dash1', 'dash2']) | ||
1724 | |||
1725 | def test_select_dashed_by_id(self): | ||
1726 | dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]') | ||
1727 | self.assertEqual(dashed[0].name, 'custom-dashed-tag') | ||
1728 | self.assertEqual(dashed[0]['id'], 'dash2') | ||
1729 | |||
1730 | def test_dashed_tag_text(self): | ||
1731 | self.assertEqual(self.soup.select('body > custom-dashed-tag')[0].text, 'Hello there.') | ||
1732 | |||
1733 | def test_select_dashed_matches_find_all(self): | ||
1734 | self.assertEqual(self.soup.select('custom-dashed-tag'), self.soup.find_all('custom-dashed-tag')) | ||
1735 | |||
1736 | def test_header_tags(self): | ||
1737 | self.assertSelectMultiple( | ||
1738 | ('h1', ['header1']), | ||
1739 | ('h2', ['header2', 'header3']), | ||
1740 | ) | ||
1741 | |||
1742 | def test_class_one(self): | ||
1743 | for selector in ('.onep', 'p.onep', 'html p.onep'): | ||
1744 | els = self.soup.select(selector) | ||
1745 | self.assertEqual(len(els), 1) | ||
1746 | self.assertEqual(els[0].name, 'p') | ||
1747 | self.assertEqual(els[0]['class'], ['onep']) | ||
1748 | |||
1749 | def test_class_mismatched_tag(self): | ||
1750 | els = self.soup.select('div.onep') | ||
1751 | self.assertEqual(len(els), 0) | ||
1752 | |||
1753 | def test_one_id(self): | ||
1754 | for selector in ('div#inner', '#inner', 'div div#inner'): | ||
1755 | self.assertSelects(selector, ['inner']) | ||
1756 | |||
1757 | def test_bad_id(self): | ||
1758 | els = self.soup.select('#doesnotexist') | ||
1759 | self.assertEqual(len(els), 0) | ||
1760 | |||
1761 | def test_items_in_id(self): | ||
1762 | els = self.soup.select('div#inner p') | ||
1763 | self.assertEqual(len(els), 3) | ||
1764 | for el in els: | ||
1765 | self.assertEqual(el.name, 'p') | ||
1766 | self.assertEqual(els[1]['class'], ['onep']) | ||
1767 | self.assertFalse(els[0].has_attr('class')) | ||
1768 | |||
1769 | def test_a_bunch_of_emptys(self): | ||
1770 | for selector in ('div#main del', 'div#main div.oops', 'div div#main'): | ||
1771 | self.assertEqual(len(self.soup.select(selector)), 0) | ||
1772 | |||
1773 | def test_multi_class_support(self): | ||
1774 | for selector in ('.class1', 'p.class1', '.class2', 'p.class2', | ||
1775 | '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'): | ||
1776 | self.assertSelects(selector, ['pmulti']) | ||
1777 | |||
1778 | def test_multi_class_selection(self): | ||
1779 | for selector in ('.class1.class3', '.class3.class2', | ||
1780 | '.class1.class2.class3'): | ||
1781 | self.assertSelects(selector, ['pmulti']) | ||
1782 | |||
1783 | def test_child_selector(self): | ||
1784 | self.assertSelects('.s1 > a', ['s1a1', 's1a2']) | ||
1785 | self.assertSelects('.s1 > a span', ['s1a2s1']) | ||
1786 | |||
1787 | def test_child_selector_id(self): | ||
1788 | self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1']) | ||
1789 | |||
1790 | def test_attribute_equals(self): | ||
1791 | self.assertSelectMultiple( | ||
1792 | ('p[class="onep"]', ['p1']), | ||
1793 | ('p[id="p1"]', ['p1']), | ||
1794 | ('[class="onep"]', ['p1']), | ||
1795 | ('[id="p1"]', ['p1']), | ||
1796 | ('link[rel="stylesheet"]', ['l1']), | ||
1797 | ('link[type="text/css"]', ['l1']), | ||
1798 | ('link[href="blah.css"]', ['l1']), | ||
1799 | ('link[href="no-blah.css"]', []), | ||
1800 | ('[rel="stylesheet"]', ['l1']), | ||
1801 | ('[type="text/css"]', ['l1']), | ||
1802 | ('[href="blah.css"]', ['l1']), | ||
1803 | ('[href="no-blah.css"]', []), | ||
1804 | ('p[href="no-blah.css"]', []), | ||
1805 | ('[href="no-blah.css"]', []), | ||
1806 | ) | ||
1807 | |||
1808 | def test_attribute_tilde(self): | ||
1809 | self.assertSelectMultiple( | ||
1810 | ('p[class~="class1"]', ['pmulti']), | ||
1811 | ('p[class~="class2"]', ['pmulti']), | ||
1812 | ('p[class~="class3"]', ['pmulti']), | ||
1813 | ('[class~="class1"]', ['pmulti']), | ||
1814 | ('[class~="class2"]', ['pmulti']), | ||
1815 | ('[class~="class3"]', ['pmulti']), | ||
1816 | ('a[rel~="friend"]', ['bob']), | ||
1817 | ('a[rel~="met"]', ['bob']), | ||
1818 | ('[rel~="friend"]', ['bob']), | ||
1819 | ('[rel~="met"]', ['bob']), | ||
1820 | ) | ||
1821 | |||
1822 | def test_attribute_startswith(self): | ||
1823 | self.assertSelectMultiple( | ||
1824 | ('[rel^="style"]', ['l1']), | ||
1825 | ('link[rel^="style"]', ['l1']), | ||
1826 | ('notlink[rel^="notstyle"]', []), | ||
1827 | ('[rel^="notstyle"]', []), | ||
1828 | ('link[rel^="notstyle"]', []), | ||
1829 | ('link[href^="bla"]', ['l1']), | ||
1830 | ('a[href^="http://"]', ['bob', 'me']), | ||
1831 | ('[href^="http://"]', ['bob', 'me']), | ||
1832 | ('[id^="p"]', ['pmulti', 'p1']), | ||
1833 | ('[id^="m"]', ['me', 'main']), | ||
1834 | ('div[id^="m"]', ['main']), | ||
1835 | ('a[id^="m"]', ['me']), | ||
1836 | ('div[data-tag^="dashed"]', ['data1']) | ||
1837 | ) | ||
1838 | |||
1839 | def test_attribute_endswith(self): | ||
1840 | self.assertSelectMultiple( | ||
1841 | ('[href$=".css"]', ['l1']), | ||
1842 | ('link[href$=".css"]', ['l1']), | ||
1843 | ('link[id$="1"]', ['l1']), | ||
1844 | ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']), | ||
1845 | ('div[id$="1"]', ['data1']), | ||
1846 | ('[id$="noending"]', []), | ||
1847 | ) | ||
1848 | |||
1849 | def test_attribute_contains(self): | ||
1850 | self.assertSelectMultiple( | ||
1851 | # From test_attribute_startswith | ||
1852 | ('[rel*="style"]', ['l1']), | ||
1853 | ('link[rel*="style"]', ['l1']), | ||
1854 | ('notlink[rel*="notstyle"]', []), | ||
1855 | ('[rel*="notstyle"]', []), | ||
1856 | ('link[rel*="notstyle"]', []), | ||
1857 | ('link[href*="bla"]', ['l1']), | ||
1858 | ('[href*="http://"]', ['bob', 'me']), | ||
1859 | ('[id*="p"]', ['pmulti', 'p1']), | ||
1860 | ('div[id*="m"]', ['main']), | ||
1861 | ('a[id*="m"]', ['me']), | ||
1862 | # From test_attribute_endswith | ||
1863 | ('[href*=".css"]', ['l1']), | ||
1864 | ('link[href*=".css"]', ['l1']), | ||
1865 | ('link[id*="1"]', ['l1']), | ||
1866 | ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']), | ||
1867 | ('div[id*="1"]', ['data1']), | ||
1868 | ('[id*="noending"]', []), | ||
1869 | # New for this test | ||
1870 | ('[href*="."]', ['bob', 'me', 'l1']), | ||
1871 | ('a[href*="."]', ['bob', 'me']), | ||
1872 | ('link[href*="."]', ['l1']), | ||
1873 | ('div[id*="n"]', ['main', 'inner']), | ||
1874 | ('div[id*="nn"]', ['inner']), | ||
1875 | ('div[data-tag*="edval"]', ['data1']) | ||
1876 | ) | ||
1877 | |||
1878 | def test_attribute_exact_or_hypen(self): | ||
1879 | self.assertSelectMultiple( | ||
1880 | ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), | ||
1881 | ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), | ||
1882 | ('p[lang|="fr"]', ['lang-fr']), | ||
1883 | ('p[lang|="gb"]', []), | ||
1884 | ) | ||
1885 | |||
1886 | def test_attribute_exists(self): | ||
1887 | self.assertSelectMultiple( | ||
1888 | ('[rel]', ['l1', 'bob', 'me']), | ||
1889 | ('link[rel]', ['l1']), | ||
1890 | ('a[rel]', ['bob', 'me']), | ||
1891 | ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']), | ||
1892 | ('p[class]', ['p1', 'pmulti']), | ||
1893 | ('[blah]', []), | ||
1894 | ('p[blah]', []), | ||
1895 | ('div[data-tag]', ['data1']) | ||
1896 | ) | ||
1897 | |||
1898 | def test_unsupported_pseudoclass(self): | ||
1899 | self.assertRaises( | ||
1900 | NotImplementedError, self.soup.select, "a:no-such-pseudoclass") | ||
1901 | |||
1902 | self.assertRaises( | ||
1903 | NotImplementedError, self.soup.select, "a:nth-of-type(a)") | ||
1904 | |||
1905 | |||
1906 | def test_nth_of_type(self): | ||
1907 | # Try to select first paragraph | ||
1908 | els = self.soup.select('div#inner p:nth-of-type(1)') | ||
1909 | self.assertEqual(len(els), 1) | ||
1910 | self.assertEqual(els[0].string, 'Some text') | ||
1911 | |||
1912 | # Try to select third paragraph | ||
1913 | els = self.soup.select('div#inner p:nth-of-type(3)') | ||
1914 | self.assertEqual(len(els), 1) | ||
1915 | self.assertEqual(els[0].string, 'Another') | ||
1916 | |||
1917 | # Try to select (non-existent!) fourth paragraph | ||
1918 | els = self.soup.select('div#inner p:nth-of-type(4)') | ||
1919 | self.assertEqual(len(els), 0) | ||
1920 | |||
1921 | # Pass in an invalid value. | ||
1922 | self.assertRaises( | ||
1923 | ValueError, self.soup.select, 'div p:nth-of-type(0)') | ||
1924 | |||
1925 | def test_nth_of_type_direct_descendant(self): | ||
1926 | els = self.soup.select('div#inner > p:nth-of-type(1)') | ||
1927 | self.assertEqual(len(els), 1) | ||
1928 | self.assertEqual(els[0].string, 'Some text') | ||
1929 | |||
1930 | def test_id_child_selector_nth_of_type(self): | ||
1931 | self.assertSelects('#inner > p:nth-of-type(2)', ['p1']) | ||
1932 | |||
1933 | def test_select_on_element(self): | ||
1934 | # Other tests operate on the tree; this operates on an element | ||
1935 | # within the tree. | ||
1936 | inner = self.soup.find("div", id="main") | ||
1937 | selected = inner.select("div") | ||
1938 | # The <div id="inner"> tag was selected. The <div id="footer"> | ||
1939 | # tag was not. | ||
1940 | self.assertSelectsIDs(selected, ['inner', 'data1']) | ||
1941 | |||
1942 | def test_overspecified_child_id(self): | ||
1943 | self.assertSelects(".fancy #inner", ['inner']) | ||
1944 | self.assertSelects(".normal #inner", []) | ||
1945 | |||
1946 | def test_adjacent_sibling_selector(self): | ||
1947 | self.assertSelects('#p1 + h2', ['header2']) | ||
1948 | self.assertSelects('#p1 + h2 + p', ['pmulti']) | ||
1949 | self.assertSelects('#p1 + #header2 + .class1', ['pmulti']) | ||
1950 | self.assertEqual([], self.soup.select('#p1 + p')) | ||
1951 | |||
1952 | def test_general_sibling_selector(self): | ||
1953 | self.assertSelects('#p1 ~ h2', ['header2', 'header3']) | ||
1954 | self.assertSelects('#p1 ~ #header2', ['header2']) | ||
1955 | self.assertSelects('#p1 ~ h2 + a', ['me']) | ||
1956 | self.assertSelects('#p1 ~ h2 + [rel="me"]', ['me']) | ||
1957 | self.assertEqual([], self.soup.select('#inner ~ h2')) | ||
1958 | |||
1959 | def test_dangling_combinator(self): | ||
1960 | self.assertRaises(ValueError, self.soup.select, 'h1 >') | ||
1961 | |||
1962 | def test_sibling_combinator_wont_select_same_tag_twice(self): | ||
1963 | self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr']) | ||
1964 | |||
1965 | # Test the selector grouping operator (the comma) | ||
1966 | def test_multiple_select(self): | ||
1967 | self.assertSelects('x, y', ['xid', 'yid']) | ||
1968 | |||
1969 | def test_multiple_select_with_no_space(self): | ||
1970 | self.assertSelects('x,y', ['xid', 'yid']) | ||
1971 | |||
1972 | def test_multiple_select_with_more_space(self): | ||
1973 | self.assertSelects('x, y', ['xid', 'yid']) | ||
1974 | |||
1975 | def test_multiple_select_duplicated(self): | ||
1976 | self.assertSelects('x, x', ['xid']) | ||
1977 | |||
1978 | def test_multiple_select_sibling(self): | ||
1979 | self.assertSelects('x, y ~ p[lang=fr]', ['xid', 'lang-fr']) | ||
1980 | |||
1981 | def test_multiple_select_tag_and_direct_descendant(self): | ||
1982 | self.assertSelects('x, y > z', ['xid', 'zidb']) | ||
1983 | |||
1984 | def test_multiple_select_direct_descendant_and_tags(self): | ||
1985 | self.assertSelects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac']) | ||
1986 | |||
1987 | def test_multiple_select_indirect_descendant(self): | ||
1988 | self.assertSelects('div x,y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac']) | ||
1989 | |||
1990 | def test_invalid_multiple_select(self): | ||
1991 | self.assertRaises(ValueError, self.soup.select, ',x, y') | ||
1992 | self.assertRaises(ValueError, self.soup.select, 'x,,y') | ||
1993 | |||
1994 | def test_multiple_select_attrs(self): | ||
1995 | self.assertSelects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb']) | ||
1996 | |||
1997 | def test_multiple_select_ids(self): | ||
1998 | self.assertSelects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab']) | ||
1999 | |||
2000 | def test_multiple_select_nested(self): | ||
2001 | self.assertSelects('body > div > x, y > z', ['xid', 'zidb']) | ||
2002 | |||
2003 | |||
2004 | |||
diff --git a/bitbake/lib/hashserv/__init__.py b/bitbake/lib/hashserv/__init__.py index 74367eb6b4..ac891e0174 100644 --- a/bitbake/lib/hashserv/__init__.py +++ b/bitbake/lib/hashserv/__init__.py | |||
@@ -13,6 +13,7 @@ from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS | |||
13 | 13 | ||
14 | User = namedtuple("User", ("username", "permissions")) | 14 | User = namedtuple("User", ("username", "permissions")) |
15 | 15 | ||
16 | |||
16 | def create_server( | 17 | def create_server( |
17 | addr, | 18 | addr, |
18 | dbname, | 19 | dbname, |
@@ -25,6 +26,7 @@ def create_server( | |||
25 | anon_perms=None, | 26 | anon_perms=None, |
26 | admin_username=None, | 27 | admin_username=None, |
27 | admin_password=None, | 28 | admin_password=None, |
29 | reuseport=False, | ||
28 | ): | 30 | ): |
29 | def sqlite_engine(): | 31 | def sqlite_engine(): |
30 | from .sqlite import DatabaseEngine | 32 | from .sqlite import DatabaseEngine |
@@ -60,9 +62,9 @@ def create_server( | |||
60 | s.start_unix_server(*a) | 62 | s.start_unix_server(*a) |
61 | elif typ == ADDR_TYPE_WS: | 63 | elif typ == ADDR_TYPE_WS: |
62 | url = urlparse(a[0]) | 64 | url = urlparse(a[0]) |
63 | s.start_websocket_server(url.hostname, url.port) | 65 | s.start_websocket_server(url.hostname, url.port, reuseport=reuseport) |
64 | else: | 66 | else: |
65 | s.start_tcp_server(*a) | 67 | s.start_tcp_server(*a, reuseport=reuseport) |
66 | 68 | ||
67 | return s | 69 | return s |
68 | 70 | ||
diff --git a/bitbake/lib/hashserv/client.py b/bitbake/lib/hashserv/client.py index 0b254beddd..8cb18050a6 100644 --- a/bitbake/lib/hashserv/client.py +++ b/bitbake/lib/hashserv/client.py | |||
@@ -5,6 +5,7 @@ | |||
5 | 5 | ||
6 | import logging | 6 | import logging |
7 | import socket | 7 | import socket |
8 | import asyncio | ||
8 | import bb.asyncrpc | 9 | import bb.asyncrpc |
9 | import json | 10 | import json |
10 | from . import create_async_client | 11 | from . import create_async_client |
@@ -13,10 +14,71 @@ from . import create_async_client | |||
13 | logger = logging.getLogger("hashserv.client") | 14 | logger = logging.getLogger("hashserv.client") |
14 | 15 | ||
15 | 16 | ||
17 | class Batch(object): | ||
18 | def __init__(self): | ||
19 | self.done = False | ||
20 | self.cond = asyncio.Condition() | ||
21 | self.pending = [] | ||
22 | self.results = [] | ||
23 | self.sent_count = 0 | ||
24 | |||
25 | async def recv(self, socket): | ||
26 | while True: | ||
27 | async with self.cond: | ||
28 | await self.cond.wait_for(lambda: self.pending or self.done) | ||
29 | |||
30 | if not self.pending: | ||
31 | if self.done: | ||
32 | return | ||
33 | continue | ||
34 | |||
35 | r = await socket.recv() | ||
36 | self.results.append(r) | ||
37 | |||
38 | async with self.cond: | ||
39 | self.pending.pop(0) | ||
40 | |||
41 | async def send(self, socket, msgs): | ||
42 | try: | ||
43 | # In the event of a restart due to a reconnect, all in-flight | ||
44 | # messages need to be resent first to keep to result count in sync | ||
45 | for m in self.pending: | ||
46 | await socket.send(m) | ||
47 | |||
48 | for m in msgs: | ||
49 | # Add the message to the pending list before attempting to send | ||
50 | # it so that if the send fails it will be retried | ||
51 | async with self.cond: | ||
52 | self.pending.append(m) | ||
53 | self.cond.notify() | ||
54 | self.sent_count += 1 | ||
55 | |||
56 | await socket.send(m) | ||
57 | |||
58 | finally: | ||
59 | async with self.cond: | ||
60 | self.done = True | ||
61 | self.cond.notify() | ||
62 | |||
63 | async def process(self, socket, msgs): | ||
64 | await asyncio.gather( | ||
65 | self.recv(socket), | ||
66 | self.send(socket, msgs), | ||
67 | ) | ||
68 | |||
69 | if len(self.results) != self.sent_count: | ||
70 | raise ValueError( | ||
71 | f"Expected result count {len(self.results)}. Expected {self.sent_count}" | ||
72 | ) | ||
73 | |||
74 | return self.results | ||
75 | |||
76 | |||
16 | class AsyncClient(bb.asyncrpc.AsyncClient): | 77 | class AsyncClient(bb.asyncrpc.AsyncClient): |
17 | MODE_NORMAL = 0 | 78 | MODE_NORMAL = 0 |
18 | MODE_GET_STREAM = 1 | 79 | MODE_GET_STREAM = 1 |
19 | MODE_EXIST_STREAM = 2 | 80 | MODE_EXIST_STREAM = 2 |
81 | MODE_MARK_STREAM = 3 | ||
20 | 82 | ||
21 | def __init__(self, username=None, password=None): | 83 | def __init__(self, username=None, password=None): |
22 | super().__init__("OEHASHEQUIV", "1.1", logger) | 84 | super().__init__("OEHASHEQUIV", "1.1", logger) |
@@ -36,32 +98,52 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
36 | if become: | 98 | if become: |
37 | await self.become_user(become) | 99 | await self.become_user(become) |
38 | 100 | ||
39 | async def send_stream(self, mode, msg): | 101 | async def send_stream_batch(self, mode, msgs): |
102 | """ | ||
103 | Does a "batch" process of stream messages. This sends the query | ||
104 | messages as fast as possible, and simultaneously attempts to read the | ||
105 | messages back. This helps to mitigate the effects of latency to the | ||
106 | hash equivalence server be allowing multiple queries to be "in-flight" | ||
107 | at once | ||
108 | |||
109 | The implementation does more complicated tracking using a count of sent | ||
110 | messages so that `msgs` can be a generator function (i.e. its length is | ||
111 | unknown) | ||
112 | |||
113 | """ | ||
114 | |||
115 | b = Batch() | ||
116 | |||
40 | async def proc(): | 117 | async def proc(): |
118 | nonlocal b | ||
119 | |||
41 | await self._set_mode(mode) | 120 | await self._set_mode(mode) |
42 | await self.socket.send(msg) | 121 | return await b.process(self.socket, msgs) |
43 | return await self.socket.recv() | ||
44 | 122 | ||
45 | return await self._send_wrapper(proc) | 123 | return await self._send_wrapper(proc) |
46 | 124 | ||
47 | async def invoke(self, *args, **kwargs): | 125 | async def invoke(self, *args, skip_mode=False, **kwargs): |
48 | # It's OK if connection errors cause a failure here, because the mode | 126 | # It's OK if connection errors cause a failure here, because the mode |
49 | # is also reset to normal on a new connection | 127 | # is also reset to normal on a new connection |
50 | await self._set_mode(self.MODE_NORMAL) | 128 | if not skip_mode: |
129 | await self._set_mode(self.MODE_NORMAL) | ||
51 | return await super().invoke(*args, **kwargs) | 130 | return await super().invoke(*args, **kwargs) |
52 | 131 | ||
53 | async def _set_mode(self, new_mode): | 132 | async def _set_mode(self, new_mode): |
54 | async def stream_to_normal(): | 133 | async def stream_to_normal(): |
134 | # Check if already in normal mode (e.g. due to a connection reset) | ||
135 | if self.mode == self.MODE_NORMAL: | ||
136 | return "ok" | ||
55 | await self.socket.send("END") | 137 | await self.socket.send("END") |
56 | return await self.socket.recv() | 138 | return await self.socket.recv() |
57 | 139 | ||
58 | async def normal_to_stream(command): | 140 | async def normal_to_stream(command): |
59 | r = await self.invoke({command: None}) | 141 | r = await self.invoke({command: None}, skip_mode=True) |
60 | if r != "ok": | 142 | if r != "ok": |
143 | self.check_invoke_error(r) | ||
61 | raise ConnectionError( | 144 | raise ConnectionError( |
62 | f"Unable to transition to stream mode: Bad response from server {r!r}" | 145 | f"Unable to transition to stream mode: Bad response from server {r!r}" |
63 | ) | 146 | ) |
64 | |||
65 | self.logger.debug("Mode is now %s", command) | 147 | self.logger.debug("Mode is now %s", command) |
66 | 148 | ||
67 | if new_mode == self.mode: | 149 | if new_mode == self.mode: |
@@ -83,16 +165,23 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
83 | await normal_to_stream("get-stream") | 165 | await normal_to_stream("get-stream") |
84 | elif new_mode == self.MODE_EXIST_STREAM: | 166 | elif new_mode == self.MODE_EXIST_STREAM: |
85 | await normal_to_stream("exists-stream") | 167 | await normal_to_stream("exists-stream") |
168 | elif new_mode == self.MODE_MARK_STREAM: | ||
169 | await normal_to_stream("gc-mark-stream") | ||
86 | elif new_mode != self.MODE_NORMAL: | 170 | elif new_mode != self.MODE_NORMAL: |
87 | raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}") | 171 | raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}") |
88 | 172 | ||
89 | self.mode = new_mode | 173 | self.mode = new_mode |
90 | 174 | ||
91 | async def get_unihash(self, method, taskhash): | 175 | async def get_unihash(self, method, taskhash): |
92 | r = await self.send_stream(self.MODE_GET_STREAM, "%s %s" % (method, taskhash)) | 176 | r = await self.get_unihash_batch([(method, taskhash)]) |
93 | if not r: | 177 | return r[0] |
94 | return None | 178 | |
95 | return r | 179 | async def get_unihash_batch(self, args): |
180 | result = await self.send_stream_batch( | ||
181 | self.MODE_GET_STREAM, | ||
182 | (f"{method} {taskhash}" for method, taskhash in args), | ||
183 | ) | ||
184 | return [r if r else None for r in result] | ||
96 | 185 | ||
97 | async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): | 186 | async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): |
98 | m = extra.copy() | 187 | m = extra.copy() |
@@ -115,8 +204,12 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
115 | ) | 204 | ) |
116 | 205 | ||
117 | async def unihash_exists(self, unihash): | 206 | async def unihash_exists(self, unihash): |
118 | r = await self.send_stream(self.MODE_EXIST_STREAM, unihash) | 207 | r = await self.unihash_exists_batch([unihash]) |
119 | return r == "true" | 208 | return r[0] |
209 | |||
210 | async def unihash_exists_batch(self, unihashes): | ||
211 | result = await self.send_stream_batch(self.MODE_EXIST_STREAM, unihashes) | ||
212 | return [r == "true" for r in result] | ||
120 | 213 | ||
121 | async def get_outhash(self, method, outhash, taskhash, with_unihash=True): | 214 | async def get_outhash(self, method, outhash, taskhash, with_unihash=True): |
122 | return await self.invoke( | 215 | return await self.invoke( |
@@ -216,6 +309,24 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
216 | """ | 309 | """ |
217 | return await self.invoke({"gc-mark": {"mark": mark, "where": where}}) | 310 | return await self.invoke({"gc-mark": {"mark": mark, "where": where}}) |
218 | 311 | ||
312 | async def gc_mark_stream(self, mark, rows): | ||
313 | """ | ||
314 | Similar to `gc-mark`, but accepts a list of "where" key-value pair | ||
315 | conditions. It utilizes stream mode to mark hashes, which helps reduce | ||
316 | the impact of latency when communicating with the hash equivalence | ||
317 | server. | ||
318 | """ | ||
319 | def row_to_dict(row): | ||
320 | pairs = row.split() | ||
321 | return dict(zip(pairs[::2], pairs[1::2])) | ||
322 | |||
323 | responses = await self.send_stream_batch( | ||
324 | self.MODE_MARK_STREAM, | ||
325 | (json.dumps({"mark": mark, "where": row_to_dict(row)}) for row in rows), | ||
326 | ) | ||
327 | |||
328 | return {"count": sum(int(json.loads(r)["count"]) for r in responses)} | ||
329 | |||
219 | async def gc_sweep(self, mark): | 330 | async def gc_sweep(self, mark): |
220 | """ | 331 | """ |
221 | Finishes garbage collection for "mark". All unihash entries that have | 332 | Finishes garbage collection for "mark". All unihash entries that have |
@@ -237,10 +348,12 @@ class Client(bb.asyncrpc.Client): | |||
237 | "connect_tcp", | 348 | "connect_tcp", |
238 | "connect_websocket", | 349 | "connect_websocket", |
239 | "get_unihash", | 350 | "get_unihash", |
351 | "get_unihash_batch", | ||
240 | "report_unihash", | 352 | "report_unihash", |
241 | "report_unihash_equiv", | 353 | "report_unihash_equiv", |
242 | "get_taskhash", | 354 | "get_taskhash", |
243 | "unihash_exists", | 355 | "unihash_exists", |
356 | "unihash_exists_batch", | ||
244 | "get_outhash", | 357 | "get_outhash", |
245 | "get_stats", | 358 | "get_stats", |
246 | "reset_stats", | 359 | "reset_stats", |
@@ -259,88 +372,9 @@ class Client(bb.asyncrpc.Client): | |||
259 | "get_db_query_columns", | 372 | "get_db_query_columns", |
260 | "gc_status", | 373 | "gc_status", |
261 | "gc_mark", | 374 | "gc_mark", |
375 | "gc_mark_stream", | ||
262 | "gc_sweep", | 376 | "gc_sweep", |
263 | ) | 377 | ) |
264 | 378 | ||
265 | def _get_async_client(self): | 379 | def _get_async_client(self): |
266 | return AsyncClient(self.username, self.password) | 380 | return AsyncClient(self.username, self.password) |
267 | |||
268 | |||
269 | class ClientPool(bb.asyncrpc.ClientPool): | ||
270 | def __init__( | ||
271 | self, | ||
272 | address, | ||
273 | max_clients, | ||
274 | *, | ||
275 | username=None, | ||
276 | password=None, | ||
277 | become=None, | ||
278 | ): | ||
279 | super().__init__(max_clients) | ||
280 | self.address = address | ||
281 | self.username = username | ||
282 | self.password = password | ||
283 | self.become = become | ||
284 | |||
285 | async def _new_client(self): | ||
286 | client = await create_async_client( | ||
287 | self.address, | ||
288 | username=self.username, | ||
289 | password=self.password, | ||
290 | ) | ||
291 | if self.become: | ||
292 | await client.become_user(self.become) | ||
293 | return client | ||
294 | |||
295 | def _run_key_tasks(self, queries, call): | ||
296 | results = {key: None for key in queries.keys()} | ||
297 | |||
298 | def make_task(key, args): | ||
299 | async def task(client): | ||
300 | nonlocal results | ||
301 | unihash = await call(client, args) | ||
302 | results[key] = unihash | ||
303 | |||
304 | return task | ||
305 | |||
306 | def gen_tasks(): | ||
307 | for key, args in queries.items(): | ||
308 | yield make_task(key, args) | ||
309 | |||
310 | self.run_tasks(gen_tasks()) | ||
311 | return results | ||
312 | |||
313 | def get_unihashes(self, queries): | ||
314 | """ | ||
315 | Query multiple unihashes in parallel. | ||
316 | |||
317 | The queries argument is a dictionary with arbitrary key. The values | ||
318 | must be a tuple of (method, taskhash). | ||
319 | |||
320 | Returns a dictionary with a corresponding key for each input key, and | ||
321 | the value is the queried unihash (which might be none if the query | ||
322 | failed) | ||
323 | """ | ||
324 | |||
325 | async def call(client, args): | ||
326 | method, taskhash = args | ||
327 | return await client.get_unihash(method, taskhash) | ||
328 | |||
329 | return self._run_key_tasks(queries, call) | ||
330 | |||
331 | def unihashes_exist(self, queries): | ||
332 | """ | ||
333 | Query multiple unihash existence checks in parallel. | ||
334 | |||
335 | The queries argument is a dictionary with arbitrary key. The values | ||
336 | must be a unihash. | ||
337 | |||
338 | Returns a dictionary with a corresponding key for each input key, and | ||
339 | the value is True or False if the unihash is known by the server (or | ||
340 | None if there was a failure) | ||
341 | """ | ||
342 | |||
343 | async def call(client, unihash): | ||
344 | return await client.unihash_exists(unihash) | ||
345 | |||
346 | return self._run_key_tasks(queries, call) | ||
diff --git a/bitbake/lib/hashserv/server.py b/bitbake/lib/hashserv/server.py index 68f64f983b..58f95c7bcd 100644 --- a/bitbake/lib/hashserv/server.py +++ b/bitbake/lib/hashserv/server.py | |||
@@ -10,6 +10,7 @@ import math | |||
10 | import time | 10 | import time |
11 | import os | 11 | import os |
12 | import base64 | 12 | import base64 |
13 | import json | ||
13 | import hashlib | 14 | import hashlib |
14 | from . import create_async_client | 15 | from . import create_async_client |
15 | import bb.asyncrpc | 16 | import bb.asyncrpc |
@@ -256,6 +257,7 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): | |||
256 | "backfill-wait": self.handle_backfill_wait, | 257 | "backfill-wait": self.handle_backfill_wait, |
257 | "remove": self.handle_remove, | 258 | "remove": self.handle_remove, |
258 | "gc-mark": self.handle_gc_mark, | 259 | "gc-mark": self.handle_gc_mark, |
260 | "gc-mark-stream": self.handle_gc_mark_stream, | ||
259 | "gc-sweep": self.handle_gc_sweep, | 261 | "gc-sweep": self.handle_gc_sweep, |
260 | "gc-status": self.handle_gc_status, | 262 | "gc-status": self.handle_gc_status, |
261 | "clean-unused": self.handle_clean_unused, | 263 | "clean-unused": self.handle_clean_unused, |
@@ -584,6 +586,33 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): | |||
584 | return {"count": await self.db.gc_mark(mark, condition)} | 586 | return {"count": await self.db.gc_mark(mark, condition)} |
585 | 587 | ||
586 | @permissions(DB_ADMIN_PERM) | 588 | @permissions(DB_ADMIN_PERM) |
589 | async def handle_gc_mark_stream(self, request): | ||
590 | async def handler(line): | ||
591 | try: | ||
592 | decoded_line = json.loads(line) | ||
593 | except json.JSONDecodeError as exc: | ||
594 | raise bb.asyncrpc.InvokeError( | ||
595 | "Could not decode JSONL input '%s'" % line | ||
596 | ) from exc | ||
597 | |||
598 | try: | ||
599 | mark = decoded_line["mark"] | ||
600 | condition = decoded_line["where"] | ||
601 | if not isinstance(mark, str): | ||
602 | raise TypeError("Bad mark type %s" % type(mark)) | ||
603 | |||
604 | if not isinstance(condition, dict): | ||
605 | raise TypeError("Bad condition type %s" % type(condition)) | ||
606 | except KeyError as exc: | ||
607 | raise bb.asyncrpc.InvokeError( | ||
608 | "Input line is missing key '%s' " % exc | ||
609 | ) from exc | ||
610 | |||
611 | return json.dumps({"count": await self.db.gc_mark(mark, condition)}) | ||
612 | |||
613 | return await self._stream_handler(handler) | ||
614 | |||
615 | @permissions(DB_ADMIN_PERM) | ||
587 | async def handle_gc_sweep(self, request): | 616 | async def handle_gc_sweep(self, request): |
588 | mark = request["mark"] | 617 | mark = request["mark"] |
589 | 618 | ||
diff --git a/bitbake/lib/hashserv/sqlite.py b/bitbake/lib/hashserv/sqlite.py index da2e844a03..976504d7f4 100644 --- a/bitbake/lib/hashserv/sqlite.py +++ b/bitbake/lib/hashserv/sqlite.py | |||
@@ -4,6 +4,7 @@ | |||
4 | # | 4 | # |
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | from datetime import datetime, timezone | ||
7 | import sqlite3 | 8 | import sqlite3 |
8 | import logging | 9 | import logging |
9 | from contextlib import closing | 10 | from contextlib import closing |
@@ -53,6 +54,22 @@ CONFIG_TABLE_DEFINITION = ( | |||
53 | CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION) | 54 | CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION) |
54 | 55 | ||
55 | 56 | ||
57 | def adapt_datetime_iso(val): | ||
58 | """Adapt datetime.datetime to UTC ISO 8601 date.""" | ||
59 | return val.astimezone(timezone.utc).isoformat() | ||
60 | |||
61 | |||
62 | sqlite3.register_adapter(datetime, adapt_datetime_iso) | ||
63 | |||
64 | |||
65 | def convert_datetime(val): | ||
66 | """Convert ISO 8601 datetime to datetime.datetime object.""" | ||
67 | return datetime.fromisoformat(val.decode()) | ||
68 | |||
69 | |||
70 | sqlite3.register_converter("DATETIME", convert_datetime) | ||
71 | |||
72 | |||
56 | def _make_table(cursor, name, definition): | 73 | def _make_table(cursor, name, definition): |
57 | cursor.execute( | 74 | cursor.execute( |
58 | """ | 75 | """ |
diff --git a/bitbake/lib/hashserv/tests.py b/bitbake/lib/hashserv/tests.py index 0809453cf8..da3f8e0884 100644 --- a/bitbake/lib/hashserv/tests.py +++ b/bitbake/lib/hashserv/tests.py | |||
@@ -8,7 +8,6 @@ | |||
8 | from . import create_server, create_client | 8 | from . import create_server, create_client |
9 | from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS | 9 | from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS |
10 | from bb.asyncrpc import InvokeError | 10 | from bb.asyncrpc import InvokeError |
11 | from .client import ClientPool | ||
12 | import hashlib | 11 | import hashlib |
13 | import logging | 12 | import logging |
14 | import multiprocessing | 13 | import multiprocessing |
@@ -94,9 +93,6 @@ class HashEquivalenceTestSetup(object): | |||
94 | return self.start_client(self.auth_server_address, user["username"], user["token"]) | 93 | return self.start_client(self.auth_server_address, user["username"], user["token"]) |
95 | 94 | ||
96 | def setUp(self): | 95 | def setUp(self): |
97 | if sys.version_info < (3, 5, 0): | ||
98 | self.skipTest('Python 3.5 or later required') | ||
99 | |||
100 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') | 96 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') |
101 | self.addCleanup(self.temp_dir.cleanup) | 97 | self.addCleanup(self.temp_dir.cleanup) |
102 | 98 | ||
@@ -555,8 +551,7 @@ class HashEquivalenceCommonTests(object): | |||
555 | # shares a taskhash with Task 2 | 551 | # shares a taskhash with Task 2 |
556 | self.assertClientGetHash(self.client, taskhash2, unihash2) | 552 | self.assertClientGetHash(self.client, taskhash2, unihash2) |
557 | 553 | ||
558 | 554 | def test_get_unihash_batch(self): | |
559 | def test_client_pool_get_unihashes(self): | ||
560 | TEST_INPUT = ( | 555 | TEST_INPUT = ( |
561 | # taskhash outhash unihash | 556 | # taskhash outhash unihash |
562 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), | 557 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), |
@@ -573,28 +568,27 @@ class HashEquivalenceCommonTests(object): | |||
573 | "6b6be7a84ab179b4240c4302518dc3f6", | 568 | "6b6be7a84ab179b4240c4302518dc3f6", |
574 | ) | 569 | ) |
575 | 570 | ||
576 | with ClientPool(self.server_address, 10) as client_pool: | 571 | for taskhash, outhash, unihash in TEST_INPUT: |
577 | for taskhash, outhash, unihash in TEST_INPUT: | 572 | self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) |
578 | self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | 573 | |
579 | 574 | ||
580 | query = {idx: (self.METHOD, data[0]) for idx, data in enumerate(TEST_INPUT)} | 575 | result = self.client.get_unihash_batch( |
581 | for idx, taskhash in enumerate(EXTRA_QUERIES): | 576 | [(self.METHOD, data[0]) for data in TEST_INPUT] + |
582 | query[idx + len(TEST_INPUT)] = (self.METHOD, taskhash) | 577 | [(self.METHOD, e) for e in EXTRA_QUERIES] |
583 | 578 | ) | |
584 | result = client_pool.get_unihashes(query) | 579 | |
585 | 580 | self.assertListEqual(result, [ | |
586 | self.assertDictEqual(result, { | 581 | "218e57509998197d570e2c98512d0105985dffc9", |
587 | 0: "218e57509998197d570e2c98512d0105985dffc9", | 582 | "218e57509998197d570e2c98512d0105985dffc9", |
588 | 1: "218e57509998197d570e2c98512d0105985dffc9", | 583 | "218e57509998197d570e2c98512d0105985dffc9", |
589 | 2: "218e57509998197d570e2c98512d0105985dffc9", | 584 | "3b5d3d83f07f259e9086fcb422c855286e18a57d", |
590 | 3: "3b5d3d83f07f259e9086fcb422c855286e18a57d", | 585 | "f46d3fbb439bd9b921095da657a4de906510d2cd", |
591 | 4: "f46d3fbb439bd9b921095da657a4de906510d2cd", | 586 | "f46d3fbb439bd9b921095da657a4de906510d2cd", |
592 | 5: "f46d3fbb439bd9b921095da657a4de906510d2cd", | 587 | "05d2a63c81e32f0a36542ca677e8ad852365c538", |
593 | 6: "05d2a63c81e32f0a36542ca677e8ad852365c538", | 588 | None, |
594 | 7: None, | 589 | ]) |
595 | }) | ||
596 | 590 | ||
597 | def test_client_pool_unihash_exists(self): | 591 | def test_unihash_exists_batch(self): |
598 | TEST_INPUT = ( | 592 | TEST_INPUT = ( |
599 | # taskhash outhash unihash | 593 | # taskhash outhash unihash |
600 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), | 594 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), |
@@ -614,28 +608,24 @@ class HashEquivalenceCommonTests(object): | |||
614 | result_unihashes = set() | 608 | result_unihashes = set() |
615 | 609 | ||
616 | 610 | ||
617 | with ClientPool(self.server_address, 10) as client_pool: | 611 | for taskhash, outhash, unihash in TEST_INPUT: |
618 | for taskhash, outhash, unihash in TEST_INPUT: | 612 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) |
619 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | 613 | result_unihashes.add(result["unihash"]) |
620 | result_unihashes.add(result["unihash"]) | ||
621 | 614 | ||
622 | query = {} | 615 | query = [] |
623 | expected = {} | 616 | expected = [] |
624 | 617 | ||
625 | for _, _, unihash in TEST_INPUT: | 618 | for _, _, unihash in TEST_INPUT: |
626 | idx = len(query) | 619 | query.append(unihash) |
627 | query[idx] = unihash | 620 | expected.append(unihash in result_unihashes) |
628 | expected[idx] = unihash in result_unihashes | ||
629 | 621 | ||
630 | 622 | ||
631 | for unihash in EXTRA_QUERIES: | 623 | for unihash in EXTRA_QUERIES: |
632 | idx = len(query) | 624 | query.append(unihash) |
633 | query[idx] = unihash | 625 | expected.append(False) |
634 | expected[idx] = False | ||
635 | |||
636 | result = client_pool.unihashes_exist(query) | ||
637 | self.assertDictEqual(result, expected) | ||
638 | 626 | ||
627 | result = self.client.unihash_exists_batch(query) | ||
628 | self.assertListEqual(result, expected) | ||
639 | 629 | ||
640 | def test_auth_read_perms(self): | 630 | def test_auth_read_perms(self): |
641 | admin_client = self.start_auth_server() | 631 | admin_client = self.start_auth_server() |
@@ -979,6 +969,48 @@ class HashEquivalenceCommonTests(object): | |||
979 | # First hash is still present | 969 | # First hash is still present |
980 | self.assertClientGetHash(self.client, taskhash, unihash) | 970 | self.assertClientGetHash(self.client, taskhash, unihash) |
981 | 971 | ||
972 | def test_gc_stream(self): | ||
973 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | ||
974 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | ||
975 | unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646' | ||
976 | |||
977 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
978 | self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') | ||
979 | |||
980 | taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4' | ||
981 | outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4' | ||
982 | unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b' | ||
983 | |||
984 | result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) | ||
985 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
986 | |||
987 | taskhash3 = 'a1117c1f5a7c9ab2f5a39cc6fe5e6152169d09c0' | ||
988 | outhash3 = '7289c414905303700a1117c1f5a7c9ab2f5a39cc6fe5e6152169d09c04f9a53c' | ||
989 | unihash3 = '905303700a1117c1f5a7c9ab2f5a39cc6fe5e615' | ||
990 | |||
991 | result = self.client.report_unihash(taskhash3, self.METHOD, outhash3, unihash3) | ||
992 | self.assertClientGetHash(self.client, taskhash3, unihash3) | ||
993 | |||
994 | # Mark the first unihash to be kept | ||
995 | ret = self.client.gc_mark_stream("ABC", (f"unihash {h}" for h in [unihash, unihash2])) | ||
996 | self.assertEqual(ret, {"count": 2}) | ||
997 | |||
998 | ret = self.client.gc_status() | ||
999 | self.assertEqual(ret, {"mark": "ABC", "keep": 2, "remove": 1}) | ||
1000 | |||
1001 | # Third hash is still there; mark doesn't delete hashes | ||
1002 | self.assertClientGetHash(self.client, taskhash3, unihash3) | ||
1003 | |||
1004 | ret = self.client.gc_sweep("ABC") | ||
1005 | self.assertEqual(ret, {"count": 1}) | ||
1006 | |||
1007 | # Hash is gone. Taskhash is returned for second hash | ||
1008 | self.assertClientGetHash(self.client, taskhash3, None) | ||
1009 | # First hash is still present | ||
1010 | self.assertClientGetHash(self.client, taskhash, unihash) | ||
1011 | # Second hash is still present | ||
1012 | self.assertClientGetHash(self.client, taskhash2, unihash2) | ||
1013 | |||
982 | def test_gc_switch_mark(self): | 1014 | def test_gc_switch_mark(self): |
983 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' | 1015 | taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4' |
984 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' | 1016 | outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8' |
diff --git a/bitbake/lib/ply/yacc.py b/bitbake/lib/ply/yacc.py index 381b50cf0b..529f85b081 100644 --- a/bitbake/lib/ply/yacc.py +++ b/bitbake/lib/ply/yacc.py | |||
@@ -1122,7 +1122,6 @@ class LRParser: | |||
1122 | # manipulate the rules that make up a grammar. | 1122 | # manipulate the rules that make up a grammar. |
1123 | # ----------------------------------------------------------------------------- | 1123 | # ----------------------------------------------------------------------------- |
1124 | 1124 | ||
1125 | import re | ||
1126 | 1125 | ||
1127 | # regex matching identifiers | 1126 | # regex matching identifiers |
1128 | _is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') | 1127 | _is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') |
diff --git a/bitbake/lib/prserv/__init__.py b/bitbake/lib/prserv/__init__.py index 0e0aa34d0e..ffc5a40a28 100644 --- a/bitbake/lib/prserv/__init__.py +++ b/bitbake/lib/prserv/__init__.py | |||
@@ -4,17 +4,92 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | 6 | ||
7 | __version__ = "1.0.0" | ||
8 | 7 | ||
9 | import os, time | 8 | __version__ = "2.0.0" |
10 | import sys, logging | ||
11 | 9 | ||
12 | def init_logger(logfile, loglevel): | 10 | import logging |
13 | numeric_level = getattr(logging, loglevel.upper(), None) | 11 | logger = logging.getLogger("BitBake.PRserv") |
14 | if not isinstance(numeric_level, int): | ||
15 | raise ValueError("Invalid log level: %s" % loglevel) | ||
16 | FORMAT = "%(asctime)-15s %(message)s" | ||
17 | logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT) | ||
18 | 12 | ||
19 | class NotFoundError(Exception): | 13 | from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS |
20 | pass | 14 | |
15 | def create_server(addr, dbpath, upstream=None, read_only=False): | ||
16 | from . import serv | ||
17 | |||
18 | s = serv.PRServer(dbpath, upstream=upstream, read_only=read_only) | ||
19 | host, port = addr.split(":") | ||
20 | s.start_tcp_server(host, int(port)) | ||
21 | |||
22 | return s | ||
23 | |||
24 | def increase_revision(ver): | ||
25 | """Take a revision string such as "1" or "1.2.3" or even a number and increase its last number | ||
26 | This fails if the last number is not an integer""" | ||
27 | |||
28 | fields=str(ver).split('.') | ||
29 | last = fields[-1] | ||
30 | |||
31 | try: | ||
32 | val = int(last) | ||
33 | except Exception as e: | ||
34 | logger.critical("Unable to increase revision value %s: %s" % (ver, e)) | ||
35 | raise e | ||
36 | |||
37 | return ".".join(fields[0:-1] + [ str(val + 1) ]) | ||
38 | |||
39 | def _revision_greater_or_equal(rev1, rev2): | ||
40 | """Compares x.y.z revision numbers, using integer comparison | ||
41 | Returns True if rev1 is greater or equal to rev2""" | ||
42 | |||
43 | fields1 = rev1.split(".") | ||
44 | fields2 = rev2.split(".") | ||
45 | l1 = len(fields1) | ||
46 | l2 = len(fields2) | ||
47 | |||
48 | for i in range(l1): | ||
49 | val1 = int(fields1[i]) | ||
50 | if i < l2: | ||
51 | val2 = int(fields2[i]) | ||
52 | if val2 < val1: | ||
53 | return True | ||
54 | elif val2 > val1: | ||
55 | return False | ||
56 | else: | ||
57 | return True | ||
58 | return True | ||
59 | |||
60 | def revision_smaller(rev1, rev2): | ||
61 | """Compares x.y.z revision numbers, using integer comparison | ||
62 | Returns True if rev1 is strictly smaller than rev2""" | ||
63 | return not(_revision_greater_or_equal(rev1, rev2)) | ||
64 | |||
65 | def revision_greater(rev1, rev2): | ||
66 | """Compares x.y.z revision numbers, using integer comparison | ||
67 | Returns True if rev1 is strictly greater than rev2""" | ||
68 | return _revision_greater_or_equal(rev1, rev2) and (rev1 != rev2) | ||
69 | |||
70 | def create_client(addr): | ||
71 | from . import client | ||
72 | |||
73 | c = client.PRClient() | ||
74 | |||
75 | try: | ||
76 | (typ, a) = parse_address(addr) | ||
77 | c.connect_tcp(*a) | ||
78 | return c | ||
79 | except Exception as e: | ||
80 | c.close() | ||
81 | raise e | ||
82 | |||
83 | async def create_async_client(addr): | ||
84 | from . import client | ||
85 | |||
86 | c = client.PRAsyncClient() | ||
87 | |||
88 | try: | ||
89 | (typ, a) = parse_address(addr) | ||
90 | await c.connect_tcp(*a) | ||
91 | return c | ||
92 | |||
93 | except Exception as e: | ||
94 | await c.close() | ||
95 | raise e | ||
diff --git a/bitbake/lib/prserv/client.py b/bitbake/lib/prserv/client.py index 8471ee3046..9f5794c433 100644 --- a/bitbake/lib/prserv/client.py +++ b/bitbake/lib/prserv/client.py | |||
@@ -6,6 +6,7 @@ | |||
6 | 6 | ||
7 | import logging | 7 | import logging |
8 | import bb.asyncrpc | 8 | import bb.asyncrpc |
9 | from . import create_async_client | ||
9 | 10 | ||
10 | logger = logging.getLogger("BitBake.PRserv") | 11 | logger = logging.getLogger("BitBake.PRserv") |
11 | 12 | ||
@@ -13,16 +14,16 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient): | |||
13 | def __init__(self): | 14 | def __init__(self): |
14 | super().__init__("PRSERVICE", "1.0", logger) | 15 | super().__init__("PRSERVICE", "1.0", logger) |
15 | 16 | ||
16 | async def getPR(self, version, pkgarch, checksum): | 17 | async def getPR(self, version, pkgarch, checksum, history=False): |
17 | response = await self.invoke( | 18 | response = await self.invoke( |
18 | {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} | 19 | {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}} |
19 | ) | 20 | ) |
20 | if response: | 21 | if response: |
21 | return response["value"] | 22 | return response["value"] |
22 | 23 | ||
23 | async def test_pr(self, version, pkgarch, checksum): | 24 | async def test_pr(self, version, pkgarch, checksum, history=False): |
24 | response = await self.invoke( | 25 | response = await self.invoke( |
25 | {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} | 26 | {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}} |
26 | ) | 27 | ) |
27 | if response: | 28 | if response: |
28 | return response["value"] | 29 | return response["value"] |
@@ -48,9 +49,9 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient): | |||
48 | if response: | 49 | if response: |
49 | return response["value"] | 50 | return response["value"] |
50 | 51 | ||
51 | async def export(self, version, pkgarch, checksum, colinfo): | 52 | async def export(self, version, pkgarch, checksum, colinfo, history=False): |
52 | response = await self.invoke( | 53 | response = await self.invoke( |
53 | {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo}} | 54 | {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo, "history": history}} |
54 | ) | 55 | ) |
55 | if response: | 56 | if response: |
56 | return (response["metainfo"], response["datainfo"]) | 57 | return (response["metainfo"], response["datainfo"]) |
@@ -65,7 +66,7 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient): | |||
65 | class PRClient(bb.asyncrpc.Client): | 66 | class PRClient(bb.asyncrpc.Client): |
66 | def __init__(self): | 67 | def __init__(self): |
67 | super().__init__() | 68 | super().__init__() |
68 | self._add_methods("getPR", "test_pr", "test_package", "importone", "export", "is_readonly") | 69 | self._add_methods("getPR", "test_pr", "test_package", "max_package_pr", "importone", "export", "is_readonly") |
69 | 70 | ||
70 | def _get_async_client(self): | 71 | def _get_async_client(self): |
71 | return PRAsyncClient() | 72 | return PRAsyncClient() |
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py index eb41508198..2da493ddf5 100644 --- a/bitbake/lib/prserv/db.py +++ b/bitbake/lib/prserv/db.py | |||
@@ -8,19 +8,13 @@ import logging | |||
8 | import os.path | 8 | import os.path |
9 | import errno | 9 | import errno |
10 | import prserv | 10 | import prserv |
11 | import time | 11 | import sqlite3 |
12 | 12 | ||
13 | try: | 13 | from contextlib import closing |
14 | import sqlite3 | 14 | from . import increase_revision, revision_greater, revision_smaller |
15 | except ImportError: | ||
16 | from pysqlite2 import dbapi2 as sqlite3 | ||
17 | 15 | ||
18 | logger = logging.getLogger("BitBake.PRserv") | 16 | logger = logging.getLogger("BitBake.PRserv") |
19 | 17 | ||
20 | sqlversion = sqlite3.sqlite_version_info | ||
21 | if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | ||
22 | raise Exception("sqlite3 version 3.3.0 or later is required.") | ||
23 | |||
24 | # | 18 | # |
25 | # "No History" mode - for a given query tuple (version, pkgarch, checksum), | 19 | # "No History" mode - for a given query tuple (version, pkgarch, checksum), |
26 | # the returned value will be the largest among all the values of the same | 20 | # the returned value will be the largest among all the values of the same |
@@ -29,287 +23,232 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | |||
29 | # "History" mode - Return a new higher value for previously unseen query | 23 | # "History" mode - Return a new higher value for previously unseen query |
30 | # tuple (version, pkgarch, checksum), otherwise return historical value. | 24 | # tuple (version, pkgarch, checksum), otherwise return historical value. |
31 | # Value can decrement if returning to a previous build. | 25 | # Value can decrement if returning to a previous build. |
32 | # | ||
33 | 26 | ||
34 | class PRTable(object): | 27 | class PRTable(object): |
35 | def __init__(self, conn, table, nohist, read_only): | 28 | def __init__(self, conn, table, read_only): |
36 | self.conn = conn | 29 | self.conn = conn |
37 | self.nohist = nohist | ||
38 | self.read_only = read_only | 30 | self.read_only = read_only |
39 | self.dirty = False | 31 | self.table = table |
40 | if nohist: | 32 | |
41 | self.table = "%s_nohist" % table | 33 | # Creating the table even if the server is read-only. |
42 | else: | 34 | # This avoids a race condition if a shared database |
43 | self.table = "%s_hist" % table | 35 | # is accessed by a read-only server first. |
44 | 36 | ||
45 | if self.read_only: | 37 | with closing(self.conn.cursor()) as cursor: |
46 | table_exists = self._execute( | 38 | cursor.execute("CREATE TABLE IF NOT EXISTS %s \ |
47 | "SELECT count(*) FROM sqlite_master \ | ||
48 | WHERE type='table' AND name='%s'" % (self.table)) | ||
49 | if not table_exists: | ||
50 | raise prserv.NotFoundError | ||
51 | else: | ||
52 | self._execute("CREATE TABLE IF NOT EXISTS %s \ | ||
53 | (version TEXT NOT NULL, \ | 39 | (version TEXT NOT NULL, \ |
54 | pkgarch TEXT NOT NULL, \ | 40 | pkgarch TEXT NOT NULL, \ |
55 | checksum TEXT NOT NULL, \ | 41 | checksum TEXT NOT NULL, \ |
56 | value INTEGER, \ | 42 | value TEXT, \ |
57 | PRIMARY KEY (version, pkgarch, checksum));" % self.table) | 43 | PRIMARY KEY (version, pkgarch, checksum, value));" % self.table) |
58 | |||
59 | def _execute(self, *query): | ||
60 | """Execute a query, waiting to acquire a lock if necessary""" | ||
61 | start = time.time() | ||
62 | end = start + 20 | ||
63 | while True: | ||
64 | try: | ||
65 | return self.conn.execute(*query) | ||
66 | except sqlite3.OperationalError as exc: | ||
67 | if "is locked" in str(exc) and end > time.time(): | ||
68 | continue | ||
69 | raise exc | ||
70 | |||
71 | def sync(self): | ||
72 | if not self.read_only: | ||
73 | self.conn.commit() | 44 | self.conn.commit() |
74 | self._execute("BEGIN EXCLUSIVE TRANSACTION") | ||
75 | 45 | ||
76 | def sync_if_dirty(self): | 46 | def _extremum_value(self, rows, is_max): |
77 | if self.dirty: | 47 | value = None |
78 | self.sync() | 48 | |
79 | self.dirty = False | 49 | for row in rows: |
50 | current_value = row[0] | ||
51 | if value is None: | ||
52 | value = current_value | ||
53 | else: | ||
54 | if is_max: | ||
55 | is_new_extremum = revision_greater(current_value, value) | ||
56 | else: | ||
57 | is_new_extremum = revision_smaller(current_value, value) | ||
58 | if is_new_extremum: | ||
59 | value = current_value | ||
60 | return value | ||
61 | |||
62 | def _max_value(self, rows): | ||
63 | return self._extremum_value(rows, True) | ||
64 | |||
65 | def _min_value(self, rows): | ||
66 | return self._extremum_value(rows, False) | ||
80 | 67 | ||
81 | def test_package(self, version, pkgarch): | 68 | def test_package(self, version, pkgarch): |
82 | """Returns whether the specified package version is found in the database for the specified architecture""" | 69 | """Returns whether the specified package version is found in the database for the specified architecture""" |
83 | 70 | ||
84 | # Just returns the value if found or None otherwise | 71 | # Just returns the value if found or None otherwise |
85 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table, | 72 | with closing(self.conn.cursor()) as cursor: |
86 | (version, pkgarch)) | 73 | data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table, |
87 | row=data.fetchone() | 74 | (version, pkgarch)) |
88 | if row is not None: | 75 | row=data.fetchone() |
89 | return True | 76 | if row is not None: |
90 | else: | 77 | return True |
91 | return False | 78 | else: |
79 | return False | ||
80 | |||
81 | def test_checksum_value(self, version, pkgarch, checksum, value): | ||
82 | """Returns whether the specified value is found in the database for the specified package, architecture and checksum""" | ||
83 | |||
84 | with closing(self.conn.cursor()) as cursor: | ||
85 | data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and checksum=? and value=?;" % self.table, | ||
86 | (version, pkgarch, checksum, value)) | ||
87 | row=data.fetchone() | ||
88 | if row is not None: | ||
89 | return True | ||
90 | else: | ||
91 | return False | ||
92 | 92 | ||
93 | def test_value(self, version, pkgarch, value): | 93 | def test_value(self, version, pkgarch, value): |
94 | """Returns whether the specified value is found in the database for the specified package and architecture""" | 94 | """Returns whether the specified value is found in the database for the specified package and architecture""" |
95 | 95 | ||
96 | # Just returns the value if found or None otherwise | 96 | # Just returns the value if found or None otherwise |
97 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table, | 97 | with closing(self.conn.cursor()) as cursor: |
98 | (version, pkgarch, value)) | 98 | data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table, |
99 | row=data.fetchone() | 99 | (version, pkgarch, value)) |
100 | if row is not None: | 100 | row=data.fetchone() |
101 | return True | 101 | if row is not None: |
102 | else: | 102 | return True |
103 | return False | 103 | else: |
104 | return False | ||
104 | 105 | ||
105 | def find_value(self, version, pkgarch, checksum): | 106 | |
107 | def find_package_max_value(self, version, pkgarch): | ||
108 | """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value""" | ||
109 | |||
110 | with closing(self.conn.cursor()) as cursor: | ||
111 | data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
112 | (version, pkgarch)) | ||
113 | rows = data.fetchall() | ||
114 | value = self._max_value(rows) | ||
115 | return value | ||
116 | |||
117 | def find_value(self, version, pkgarch, checksum, history=False): | ||
106 | """Returns the value for the specified checksum if found or None otherwise.""" | 118 | """Returns the value for the specified checksum if found or None otherwise.""" |
107 | 119 | ||
108 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 120 | if history: |
109 | (version, pkgarch, checksum)) | 121 | return self.find_min_value(version, pkgarch, checksum) |
110 | row=data.fetchone() | ||
111 | if row is not None: | ||
112 | return row[0] | ||
113 | else: | 122 | else: |
114 | return None | 123 | return self.find_max_value(version, pkgarch, checksum) |
115 | 124 | ||
116 | def find_max_value(self, version, pkgarch): | ||
117 | """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value""" | ||
118 | 125 | ||
119 | data = self._execute("SELECT max(value) FROM %s where version=? AND pkgarch=?;" % (self.table), | 126 | def _find_extremum_value(self, version, pkgarch, checksum, is_max): |
120 | (version, pkgarch)) | 127 | """Returns the maximum (if is_max is True) or minimum (if is_max is False) value |
121 | row = data.fetchone() | 128 | for (version, pkgarch, checksum), or None if not found. Doesn't create a new value""" |
122 | if row is not None: | ||
123 | return row[0] | ||
124 | else: | ||
125 | return None | ||
126 | |||
127 | def _get_value_hist(self, version, pkgarch, checksum): | ||
128 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
129 | (version, pkgarch, checksum)) | ||
130 | row=data.fetchone() | ||
131 | if row is not None: | ||
132 | return row[0] | ||
133 | else: | ||
134 | #no value found, try to insert | ||
135 | if self.read_only: | ||
136 | data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
137 | (version, pkgarch)) | ||
138 | row = data.fetchone() | ||
139 | if row is not None: | ||
140 | return row[0] | ||
141 | else: | ||
142 | return 0 | ||
143 | 129 | ||
144 | try: | 130 | with closing(self.conn.cursor()) as cursor: |
145 | self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" | 131 | data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND checksum=?;" % (self.table), |
146 | % (self.table, self.table), | 132 | (version, pkgarch, checksum)) |
147 | (version, pkgarch, checksum, version, pkgarch)) | 133 | rows = data.fetchall() |
148 | except sqlite3.IntegrityError as exc: | 134 | return self._extremum_value(rows, is_max) |
149 | logger.error(str(exc)) | ||
150 | 135 | ||
151 | self.dirty = True | 136 | def find_max_value(self, version, pkgarch, checksum): |
137 | return self._find_extremum_value(version, pkgarch, checksum, True) | ||
152 | 138 | ||
153 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 139 | def find_min_value(self, version, pkgarch, checksum): |
154 | (version, pkgarch, checksum)) | 140 | return self._find_extremum_value(version, pkgarch, checksum, False) |
155 | row=data.fetchone() | 141 | |
156 | if row is not None: | 142 | def find_new_subvalue(self, version, pkgarch, base): |
157 | return row[0] | 143 | """Take and increase the greatest "<base>.y" value for (version, pkgarch), or return "<base>.0" if not found. |
158 | else: | 144 | This doesn't store a new value.""" |
159 | raise prserv.NotFoundError | 145 | |
160 | 146 | with closing(self.conn.cursor()) as cursor: | |
161 | def _get_value_no_hist(self, version, pkgarch, checksum): | 147 | data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND value LIKE '%s.%%';" % (self.table, base), |
162 | data=self._execute("SELECT value FROM %s \ | 148 | (version, pkgarch)) |
163 | WHERE version=? AND pkgarch=? AND checksum=? AND \ | 149 | rows = data.fetchall() |
164 | value >= (select max(value) from %s where version=? AND pkgarch=?);" | 150 | value = self._max_value(rows) |
165 | % (self.table, self.table), | 151 | |
166 | (version, pkgarch, checksum, version, pkgarch)) | 152 | if value is not None: |
167 | row=data.fetchone() | 153 | return increase_revision(value) |
168 | if row is not None: | ||
169 | return row[0] | ||
170 | else: | ||
171 | #no value found, try to insert | ||
172 | if self.read_only: | ||
173 | data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
174 | (version, pkgarch)) | ||
175 | return data.fetchone()[0] | ||
176 | |||
177 | try: | ||
178 | self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" | ||
179 | % (self.table, self.table), | ||
180 | (version, pkgarch, checksum, version, pkgarch)) | ||
181 | except sqlite3.IntegrityError as exc: | ||
182 | logger.error(str(exc)) | ||
183 | self.conn.rollback() | ||
184 | |||
185 | self.dirty = True | ||
186 | |||
187 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
188 | (version, pkgarch, checksum)) | ||
189 | row=data.fetchone() | ||
190 | if row is not None: | ||
191 | return row[0] | ||
192 | else: | 154 | else: |
193 | raise prserv.NotFoundError | 155 | return base + ".0" |
194 | 156 | ||
195 | def get_value(self, version, pkgarch, checksum): | 157 | def store_value(self, version, pkgarch, checksum, value): |
196 | if self.nohist: | 158 | """Store value in the database""" |
197 | return self._get_value_no_hist(version, pkgarch, checksum) | 159 | |
198 | else: | 160 | if not self.read_only and not self.test_checksum_value(version, pkgarch, checksum, value): |
199 | return self._get_value_hist(version, pkgarch, checksum) | 161 | with closing(self.conn.cursor()) as cursor: |
200 | 162 | cursor.execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | |
201 | def _import_hist(self, version, pkgarch, checksum, value): | ||
202 | if self.read_only: | ||
203 | return None | ||
204 | |||
205 | val = None | ||
206 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
207 | (version, pkgarch, checksum)) | ||
208 | row = data.fetchone() | ||
209 | if row is not None: | ||
210 | val=row[0] | ||
211 | else: | ||
212 | #no value found, try to insert | ||
213 | try: | ||
214 | self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | ||
215 | (version, pkgarch, checksum, value)) | 163 | (version, pkgarch, checksum, value)) |
216 | except sqlite3.IntegrityError as exc: | 164 | self.conn.commit() |
217 | logger.error(str(exc)) | ||
218 | 165 | ||
219 | self.dirty = True | 166 | def _get_value(self, version, pkgarch, checksum, history): |
220 | 167 | ||
221 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 168 | max_value = self.find_package_max_value(version, pkgarch) |
222 | (version, pkgarch, checksum)) | ||
223 | row = data.fetchone() | ||
224 | if row is not None: | ||
225 | val = row[0] | ||
226 | return val | ||
227 | 169 | ||
228 | def _import_no_hist(self, version, pkgarch, checksum, value): | 170 | if max_value is None: |
229 | if self.read_only: | 171 | # version, pkgarch completely unknown. Return initial value. |
230 | return None | 172 | return "0" |
231 | 173 | ||
232 | try: | 174 | value = self.find_value(version, pkgarch, checksum, history) |
233 | #try to insert | 175 | |
234 | self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | 176 | if value is None: |
235 | (version, pkgarch, checksum, value)) | 177 | # version, pkgarch found but not checksum. Create a new value from the maximum one |
236 | except sqlite3.IntegrityError as exc: | 178 | return increase_revision(max_value) |
237 | #already have the record, try to update | 179 | |
238 | try: | 180 | if history: |
239 | self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?" | 181 | return value |
240 | % (self.table), | 182 | |
241 | (value, version, pkgarch, checksum, value)) | 183 | # "no history" mode - If the value is not the maximum value for the package, need to increase it. |
242 | except sqlite3.IntegrityError as exc: | 184 | if max_value > value: |
243 | logger.error(str(exc)) | 185 | return increase_revision(max_value) |
244 | |||
245 | self.dirty = True | ||
246 | |||
247 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table, | ||
248 | (version, pkgarch, checksum, value)) | ||
249 | row=data.fetchone() | ||
250 | if row is not None: | ||
251 | return row[0] | ||
252 | else: | 186 | else: |
253 | return None | 187 | return value |
188 | |||
189 | def get_value(self, version, pkgarch, checksum, history): | ||
190 | value = self._get_value(version, pkgarch, checksum, history) | ||
191 | if not self.read_only: | ||
192 | self.store_value(version, pkgarch, checksum, value) | ||
193 | return value | ||
254 | 194 | ||
255 | def importone(self, version, pkgarch, checksum, value): | 195 | def importone(self, version, pkgarch, checksum, value): |
256 | if self.nohist: | 196 | self.store_value(version, pkgarch, checksum, value) |
257 | return self._import_no_hist(version, pkgarch, checksum, value) | 197 | return value |
258 | else: | ||
259 | return self._import_hist(version, pkgarch, checksum, value) | ||
260 | 198 | ||
261 | def export(self, version, pkgarch, checksum, colinfo): | 199 | def export(self, version, pkgarch, checksum, colinfo, history=False): |
262 | metainfo = {} | 200 | metainfo = {} |
263 | #column info | 201 | with closing(self.conn.cursor()) as cursor: |
264 | if colinfo: | 202 | #column info |
265 | metainfo["tbl_name"] = self.table | 203 | if colinfo: |
266 | metainfo["core_ver"] = prserv.__version__ | 204 | metainfo["tbl_name"] = self.table |
267 | metainfo["col_info"] = [] | 205 | metainfo["core_ver"] = prserv.__version__ |
268 | data = self._execute("PRAGMA table_info(%s);" % self.table) | 206 | metainfo["col_info"] = [] |
207 | data = cursor.execute("PRAGMA table_info(%s);" % self.table) | ||
208 | for row in data: | ||
209 | col = {} | ||
210 | col["name"] = row["name"] | ||
211 | col["type"] = row["type"] | ||
212 | col["notnull"] = row["notnull"] | ||
213 | col["dflt_value"] = row["dflt_value"] | ||
214 | col["pk"] = row["pk"] | ||
215 | metainfo["col_info"].append(col) | ||
216 | |||
217 | #data info | ||
218 | datainfo = [] | ||
219 | |||
220 | if history: | ||
221 | sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table | ||
222 | else: | ||
223 | sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \ | ||
224 | (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \ | ||
225 | WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table) | ||
226 | sqlarg = [] | ||
227 | where = "" | ||
228 | if version: | ||
229 | where += "AND T1.version=? " | ||
230 | sqlarg.append(str(version)) | ||
231 | if pkgarch: | ||
232 | where += "AND T1.pkgarch=? " | ||
233 | sqlarg.append(str(pkgarch)) | ||
234 | if checksum: | ||
235 | where += "AND T1.checksum=? " | ||
236 | sqlarg.append(str(checksum)) | ||
237 | |||
238 | sqlstmt += where + ";" | ||
239 | |||
240 | if len(sqlarg): | ||
241 | data = cursor.execute(sqlstmt, tuple(sqlarg)) | ||
242 | else: | ||
243 | data = cursor.execute(sqlstmt) | ||
269 | for row in data: | 244 | for row in data: |
270 | col = {} | 245 | if row["version"]: |
271 | col["name"] = row["name"] | 246 | col = {} |
272 | col["type"] = row["type"] | 247 | col["version"] = row["version"] |
273 | col["notnull"] = row["notnull"] | 248 | col["pkgarch"] = row["pkgarch"] |
274 | col["dflt_value"] = row["dflt_value"] | 249 | col["checksum"] = row["checksum"] |
275 | col["pk"] = row["pk"] | 250 | col["value"] = row["value"] |
276 | metainfo["col_info"].append(col) | 251 | datainfo.append(col) |
277 | |||
278 | #data info | ||
279 | datainfo = [] | ||
280 | |||
281 | if self.nohist: | ||
282 | sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \ | ||
283 | (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \ | ||
284 | WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table) | ||
285 | else: | ||
286 | sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table | ||
287 | sqlarg = [] | ||
288 | where = "" | ||
289 | if version: | ||
290 | where += "AND T1.version=? " | ||
291 | sqlarg.append(str(version)) | ||
292 | if pkgarch: | ||
293 | where += "AND T1.pkgarch=? " | ||
294 | sqlarg.append(str(pkgarch)) | ||
295 | if checksum: | ||
296 | where += "AND T1.checksum=? " | ||
297 | sqlarg.append(str(checksum)) | ||
298 | |||
299 | sqlstmt += where + ";" | ||
300 | |||
301 | if len(sqlarg): | ||
302 | data = self._execute(sqlstmt, tuple(sqlarg)) | ||
303 | else: | ||
304 | data = self._execute(sqlstmt) | ||
305 | for row in data: | ||
306 | if row["version"]: | ||
307 | col = {} | ||
308 | col["version"] = row["version"] | ||
309 | col["pkgarch"] = row["pkgarch"] | ||
310 | col["checksum"] = row["checksum"] | ||
311 | col["value"] = row["value"] | ||
312 | datainfo.append(col) | ||
313 | return (metainfo, datainfo) | 252 | return (metainfo, datainfo) |
314 | 253 | ||
315 | def dump_db(self, fd): | 254 | def dump_db(self, fd): |
@@ -322,9 +261,8 @@ class PRTable(object): | |||
322 | 261 | ||
323 | class PRData(object): | 262 | class PRData(object): |
324 | """Object representing the PR database""" | 263 | """Object representing the PR database""" |
325 | def __init__(self, filename, nohist=True, read_only=False): | 264 | def __init__(self, filename, read_only=False): |
326 | self.filename=os.path.abspath(filename) | 265 | self.filename=os.path.abspath(filename) |
327 | self.nohist=nohist | ||
328 | self.read_only = read_only | 266 | self.read_only = read_only |
329 | #build directory hierarchy | 267 | #build directory hierarchy |
330 | try: | 268 | try: |
@@ -334,14 +272,15 @@ class PRData(object): | |||
334 | raise e | 272 | raise e |
335 | uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "") | 273 | uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "") |
336 | logger.debug("Opening PRServ database '%s'" % (uri)) | 274 | logger.debug("Opening PRServ database '%s'" % (uri)) |
337 | self.connection=sqlite3.connect(uri, uri=True, isolation_level="EXCLUSIVE", check_same_thread = False) | 275 | self.connection=sqlite3.connect(uri, uri=True) |
338 | self.connection.row_factory=sqlite3.Row | 276 | self.connection.row_factory=sqlite3.Row |
339 | if not self.read_only: | 277 | self.connection.execute("PRAGMA synchronous = OFF;") |
340 | self.connection.execute("pragma synchronous = off;") | 278 | self.connection.execute("PRAGMA journal_mode = WAL;") |
341 | self.connection.execute("PRAGMA journal_mode = MEMORY;") | 279 | self.connection.commit() |
342 | self._tables={} | 280 | self._tables={} |
343 | 281 | ||
344 | def disconnect(self): | 282 | def disconnect(self): |
283 | self.connection.commit() | ||
345 | self.connection.close() | 284 | self.connection.close() |
346 | 285 | ||
347 | def __getitem__(self, tblname): | 286 | def __getitem__(self, tblname): |
@@ -351,7 +290,7 @@ class PRData(object): | |||
351 | if tblname in self._tables: | 290 | if tblname in self._tables: |
352 | return self._tables[tblname] | 291 | return self._tables[tblname] |
353 | else: | 292 | else: |
354 | tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist, self.read_only) | 293 | tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.read_only) |
355 | return tableobj | 294 | return tableobj |
356 | 295 | ||
357 | def __delitem__(self, tblname): | 296 | def __delitem__(self, tblname): |
@@ -359,3 +298,4 @@ class PRData(object): | |||
359 | del self._tables[tblname] | 298 | del self._tables[tblname] |
360 | logger.info("drop table %s" % (tblname)) | 299 | logger.info("drop table %s" % (tblname)) |
361 | self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) | 300 | self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) |
301 | self.connection.commit() | ||
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py index dc4be5b620..e175886308 100644 --- a/bitbake/lib/prserv/serv.py +++ b/bitbake/lib/prserv/serv.py | |||
@@ -12,6 +12,7 @@ import sqlite3 | |||
12 | import prserv | 12 | import prserv |
13 | import prserv.db | 13 | import prserv.db |
14 | import errno | 14 | import errno |
15 | from . import create_async_client, revision_smaller, increase_revision | ||
15 | import bb.asyncrpc | 16 | import bb.asyncrpc |
16 | 17 | ||
17 | logger = logging.getLogger("BitBake.PRserv") | 18 | logger = logging.getLogger("BitBake.PRserv") |
@@ -41,18 +42,16 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
41 | try: | 42 | try: |
42 | return await super().dispatch_message(msg) | 43 | return await super().dispatch_message(msg) |
43 | except: | 44 | except: |
44 | self.server.table.sync() | ||
45 | raise | 45 | raise |
46 | else: | ||
47 | self.server.table.sync_if_dirty() | ||
48 | 46 | ||
49 | async def handle_test_pr(self, request): | 47 | async def handle_test_pr(self, request): |
50 | '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value''' | 48 | '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value''' |
51 | version = request["version"] | 49 | version = request["version"] |
52 | pkgarch = request["pkgarch"] | 50 | pkgarch = request["pkgarch"] |
53 | checksum = request["checksum"] | 51 | checksum = request["checksum"] |
52 | history = request["history"] | ||
54 | 53 | ||
55 | value = self.server.table.find_value(version, pkgarch, checksum) | 54 | value = self.server.table.find_value(version, pkgarch, checksum, history) |
56 | return {"value": value} | 55 | return {"value": value} |
57 | 56 | ||
58 | async def handle_test_package(self, request): | 57 | async def handle_test_package(self, request): |
@@ -68,22 +67,110 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
68 | version = request["version"] | 67 | version = request["version"] |
69 | pkgarch = request["pkgarch"] | 68 | pkgarch = request["pkgarch"] |
70 | 69 | ||
71 | value = self.server.table.find_max_value(version, pkgarch) | 70 | value = self.server.table.find_package_max_value(version, pkgarch) |
72 | return {"value": value} | 71 | return {"value": value} |
73 | 72 | ||
74 | async def handle_get_pr(self, request): | 73 | async def handle_get_pr(self, request): |
75 | version = request["version"] | 74 | version = request["version"] |
76 | pkgarch = request["pkgarch"] | 75 | pkgarch = request["pkgarch"] |
77 | checksum = request["checksum"] | 76 | checksum = request["checksum"] |
77 | history = request["history"] | ||
78 | 78 | ||
79 | response = None | 79 | if self.upstream_client is None: |
80 | try: | 80 | value = self.server.table.get_value(version, pkgarch, checksum, history) |
81 | value = self.server.table.get_value(version, pkgarch, checksum) | 81 | return {"value": value} |
82 | response = {"value": value} | ||
83 | except prserv.NotFoundError: | ||
84 | self.logger.error("failure storing value in database for (%s, %s)",version, checksum) | ||
85 | 82 | ||
86 | return response | 83 | # We have an upstream server. |
84 | # Check whether the local server already knows the requested configuration. | ||
85 | # If the configuration is a new one, the generated value we will add will | ||
86 | # depend on what's on the upstream server. That's why we're calling find_value() | ||
87 | # instead of get_value() directly. | ||
88 | |||
89 | value = self.server.table.find_value(version, pkgarch, checksum, history) | ||
90 | upstream_max = await self.upstream_client.max_package_pr(version, pkgarch) | ||
91 | |||
92 | if value is not None: | ||
93 | |||
94 | # The configuration is already known locally. | ||
95 | |||
96 | if history: | ||
97 | value = self.server.table.get_value(version, pkgarch, checksum, history) | ||
98 | else: | ||
99 | existing_value = value | ||
100 | # In "no history", we need to make sure the value doesn't decrease | ||
101 | # and is at least greater than the maximum upstream value | ||
102 | # and the maximum local value | ||
103 | |||
104 | local_max = self.server.table.find_package_max_value(version, pkgarch) | ||
105 | if revision_smaller(value, local_max): | ||
106 | value = increase_revision(local_max) | ||
107 | |||
108 | if revision_smaller(value, upstream_max): | ||
109 | # Ask upstream whether it knows the checksum | ||
110 | upstream_value = await self.upstream_client.test_pr(version, pkgarch, checksum) | ||
111 | if upstream_value is None: | ||
112 | # Upstream doesn't have our checksum, let create a new one | ||
113 | value = upstream_max + ".0" | ||
114 | else: | ||
115 | # Fine to take the same value as upstream | ||
116 | value = upstream_max | ||
117 | |||
118 | if not value == existing_value and not self.server.read_only: | ||
119 | self.server.table.store_value(version, pkgarch, checksum, value) | ||
120 | |||
121 | return {"value": value} | ||
122 | |||
123 | # The configuration is a new one for the local server | ||
124 | # Let's ask the upstream server whether it knows it | ||
125 | |||
126 | known_upstream = await self.upstream_client.test_package(version, pkgarch) | ||
127 | |||
128 | if not known_upstream: | ||
129 | |||
130 | # The package is not known upstream, must be a local-only package | ||
131 | # Let's compute the PR number using the local-only method | ||
132 | |||
133 | value = self.server.table.get_value(version, pkgarch, checksum, history) | ||
134 | return {"value": value} | ||
135 | |||
136 | # The package is known upstream, let's ask the upstream server | ||
137 | # whether it knows our new output hash | ||
138 | |||
139 | value = await self.upstream_client.test_pr(version, pkgarch, checksum) | ||
140 | |||
141 | if value is not None: | ||
142 | |||
143 | # Upstream knows this output hash, let's store it and use it too. | ||
144 | |||
145 | if not self.server.read_only: | ||
146 | self.server.table.store_value(version, pkgarch, checksum, value) | ||
147 | # If the local server is read only, won't be able to store the new | ||
148 | # value in the database and will have to keep asking the upstream server | ||
149 | return {"value": value} | ||
150 | |||
151 | # The output hash doesn't exist upstream, get the most recent number from upstream (x) | ||
152 | # Then, we want to have a new PR value for the local server: x.y | ||
153 | |||
154 | upstream_max = await self.upstream_client.max_package_pr(version, pkgarch) | ||
155 | # Here we know that the package is known upstream, so upstream_max can't be None | ||
156 | subvalue = self.server.table.find_new_subvalue(version, pkgarch, upstream_max) | ||
157 | |||
158 | if not self.server.read_only: | ||
159 | self.server.table.store_value(version, pkgarch, checksum, subvalue) | ||
160 | |||
161 | return {"value": subvalue} | ||
162 | |||
163 | async def process_requests(self): | ||
164 | if self.server.upstream is not None: | ||
165 | self.upstream_client = await create_async_client(self.server.upstream) | ||
166 | else: | ||
167 | self.upstream_client = None | ||
168 | |||
169 | try: | ||
170 | await super().process_requests() | ||
171 | finally: | ||
172 | if self.upstream_client is not None: | ||
173 | await self.upstream_client.close() | ||
87 | 174 | ||
88 | async def handle_import_one(self, request): | 175 | async def handle_import_one(self, request): |
89 | response = None | 176 | response = None |
@@ -104,9 +191,10 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
104 | pkgarch = request["pkgarch"] | 191 | pkgarch = request["pkgarch"] |
105 | checksum = request["checksum"] | 192 | checksum = request["checksum"] |
106 | colinfo = request["colinfo"] | 193 | colinfo = request["colinfo"] |
194 | history = request["history"] | ||
107 | 195 | ||
108 | try: | 196 | try: |
109 | (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo) | 197 | (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo, history) |
110 | except sqlite3.Error as exc: | 198 | except sqlite3.Error as exc: |
111 | self.logger.error(str(exc)) | 199 | self.logger.error(str(exc)) |
112 | metainfo = datainfo = None | 200 | metainfo = datainfo = None |
@@ -117,11 +205,12 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
117 | return {"readonly": self.server.read_only} | 205 | return {"readonly": self.server.read_only} |
118 | 206 | ||
119 | class PRServer(bb.asyncrpc.AsyncServer): | 207 | class PRServer(bb.asyncrpc.AsyncServer): |
120 | def __init__(self, dbfile, read_only=False): | 208 | def __init__(self, dbfile, read_only=False, upstream=None): |
121 | super().__init__(logger) | 209 | super().__init__(logger) |
122 | self.dbfile = dbfile | 210 | self.dbfile = dbfile |
123 | self.table = None | 211 | self.table = None |
124 | self.read_only = read_only | 212 | self.read_only = read_only |
213 | self.upstream = upstream | ||
125 | 214 | ||
126 | def accept_client(self, socket): | 215 | def accept_client(self, socket): |
127 | return PRServerClient(socket, self) | 216 | return PRServerClient(socket, self) |
@@ -134,27 +223,25 @@ class PRServer(bb.asyncrpc.AsyncServer): | |||
134 | self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" % | 223 | self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" % |
135 | (self.dbfile, self.address, str(os.getpid()))) | 224 | (self.dbfile, self.address, str(os.getpid()))) |
136 | 225 | ||
226 | if self.upstream is not None: | ||
227 | self.logger.info("And upstream PRServer: %s " % (self.upstream)) | ||
228 | |||
137 | return tasks | 229 | return tasks |
138 | 230 | ||
139 | async def stop(self): | 231 | async def stop(self): |
140 | self.table.sync_if_dirty() | ||
141 | self.db.disconnect() | 232 | self.db.disconnect() |
142 | await super().stop() | 233 | await super().stop() |
143 | 234 | ||
144 | def signal_handler(self): | ||
145 | super().signal_handler() | ||
146 | if self.table: | ||
147 | self.table.sync() | ||
148 | |||
149 | class PRServSingleton(object): | 235 | class PRServSingleton(object): |
150 | def __init__(self, dbfile, logfile, host, port): | 236 | def __init__(self, dbfile, logfile, host, port, upstream): |
151 | self.dbfile = dbfile | 237 | self.dbfile = dbfile |
152 | self.logfile = logfile | 238 | self.logfile = logfile |
153 | self.host = host | 239 | self.host = host |
154 | self.port = port | 240 | self.port = port |
241 | self.upstream = upstream | ||
155 | 242 | ||
156 | def start(self): | 243 | def start(self): |
157 | self.prserv = PRServer(self.dbfile) | 244 | self.prserv = PRServer(self.dbfile, upstream=self.upstream) |
158 | self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port) | 245 | self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port) |
159 | self.process = self.prserv.serve_as_process(log_level=logging.WARNING) | 246 | self.process = self.prserv.serve_as_process(log_level=logging.WARNING) |
160 | 247 | ||
@@ -233,7 +320,7 @@ def run_as_daemon(func, pidfile, logfile): | |||
233 | os.remove(pidfile) | 320 | os.remove(pidfile) |
234 | os._exit(0) | 321 | os._exit(0) |
235 | 322 | ||
236 | def start_daemon(dbfile, host, port, logfile, read_only=False): | 323 | def start_daemon(dbfile, host, port, logfile, read_only=False, upstream=None): |
237 | ip = socket.gethostbyname(host) | 324 | ip = socket.gethostbyname(host) |
238 | pidfile = PIDPREFIX % (ip, port) | 325 | pidfile = PIDPREFIX % (ip, port) |
239 | try: | 326 | try: |
@@ -249,7 +336,7 @@ def start_daemon(dbfile, host, port, logfile, read_only=False): | |||
249 | 336 | ||
250 | dbfile = os.path.abspath(dbfile) | 337 | dbfile = os.path.abspath(dbfile) |
251 | def daemon_main(): | 338 | def daemon_main(): |
252 | server = PRServer(dbfile, read_only=read_only) | 339 | server = PRServer(dbfile, read_only=read_only, upstream=upstream) |
253 | server.start_tcp_server(ip, port) | 340 | server.start_tcp_server(ip, port) |
254 | server.serve_forever() | 341 | server.serve_forever() |
255 | 342 | ||
@@ -336,6 +423,9 @@ def auto_start(d): | |||
336 | 423 | ||
337 | host = host_params[0].strip().lower() | 424 | host = host_params[0].strip().lower() |
338 | port = int(host_params[1]) | 425 | port = int(host_params[1]) |
426 | |||
427 | upstream = d.getVar("PRSERV_UPSTREAM") or None | ||
428 | |||
339 | if is_local_special(host, port): | 429 | if is_local_special(host, port): |
340 | import bb.utils | 430 | import bb.utils |
341 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) | 431 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) |
@@ -350,7 +440,7 @@ def auto_start(d): | |||
350 | auto_shutdown() | 440 | auto_shutdown() |
351 | if not singleton: | 441 | if not singleton: |
352 | bb.utils.mkdirhier(cachedir) | 442 | bb.utils.mkdirhier(cachedir) |
353 | singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port) | 443 | singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port, upstream) |
354 | singleton.start() | 444 | singleton.start() |
355 | if singleton: | 445 | if singleton: |
356 | host = singleton.host | 446 | host = singleton.host |
diff --git a/bitbake/lib/prserv/tests.py b/bitbake/lib/prserv/tests.py new file mode 100644 index 0000000000..df0c003003 --- /dev/null +++ b/bitbake/lib/prserv/tests.py | |||
@@ -0,0 +1,388 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright (C) 2024 BitBake Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | from . import create_server, create_client, increase_revision, revision_greater, revision_smaller, _revision_greater_or_equal | ||
9 | import prserv.db as db | ||
10 | from bb.asyncrpc import InvokeError | ||
11 | import logging | ||
12 | import os | ||
13 | import sys | ||
14 | import tempfile | ||
15 | import unittest | ||
16 | import socket | ||
17 | import subprocess | ||
18 | from pathlib import Path | ||
19 | |||
20 | THIS_DIR = Path(__file__).parent | ||
21 | BIN_DIR = THIS_DIR.parent.parent / "bin" | ||
22 | |||
23 | version = "dummy-1.0-r0" | ||
24 | pkgarch = "core2-64" | ||
25 | other_arch = "aarch64" | ||
26 | |||
27 | checksumX = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4f0" | ||
28 | checksum0 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a0" | ||
29 | checksum1 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a1" | ||
30 | checksum2 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a2" | ||
31 | checksum3 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a3" | ||
32 | checksum4 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a4" | ||
33 | checksum5 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a5" | ||
34 | checksum6 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a6" | ||
35 | checksum7 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a7" | ||
36 | checksum8 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a8" | ||
37 | checksum9 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a9" | ||
38 | checksum10 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4aa" | ||
39 | |||
40 | def server_prefunc(server, name): | ||
41 | logging.basicConfig(level=logging.DEBUG, filename='prserv-%s.log' % name, filemode='w', | ||
42 | format='%(levelname)s %(filename)s:%(lineno)d %(message)s') | ||
43 | server.logger.debug("Running server %s" % name) | ||
44 | sys.stdout = open('prserv-stdout-%s.log' % name, 'w') | ||
45 | sys.stderr = sys.stdout | ||
46 | |||
47 | class PRTestSetup(object): | ||
48 | |||
49 | def start_server(self, name, dbfile, upstream=None, read_only=False, prefunc=server_prefunc): | ||
50 | |||
51 | def cleanup_server(server): | ||
52 | if server.process.exitcode is not None: | ||
53 | return | ||
54 | server.process.terminate() | ||
55 | server.process.join() | ||
56 | |||
57 | server = create_server(socket.gethostbyname("localhost") + ":0", | ||
58 | dbfile, | ||
59 | upstream=upstream, | ||
60 | read_only=read_only) | ||
61 | |||
62 | server.serve_as_process(prefunc=prefunc, args=(name,)) | ||
63 | self.addCleanup(cleanup_server, server) | ||
64 | |||
65 | return server | ||
66 | |||
67 | def start_client(self, server_address): | ||
68 | def cleanup_client(client): | ||
69 | client.close() | ||
70 | |||
71 | client = create_client(server_address) | ||
72 | self.addCleanup(cleanup_client, client) | ||
73 | |||
74 | return client | ||
75 | |||
76 | class FunctionTests(unittest.TestCase): | ||
77 | |||
78 | def setUp(self): | ||
79 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
80 | self.addCleanup(self.temp_dir.cleanup) | ||
81 | |||
82 | def test_increase_revision(self): | ||
83 | self.assertEqual(increase_revision("1"), "2") | ||
84 | self.assertEqual(increase_revision("1.0"), "1.1") | ||
85 | self.assertEqual(increase_revision("1.1.1"), "1.1.2") | ||
86 | self.assertEqual(increase_revision("1.1.1.3"), "1.1.1.4") | ||
87 | self.assertEqual(increase_revision("9"), "10") | ||
88 | self.assertEqual(increase_revision("1.9"), "1.10") | ||
89 | self.assertRaises(ValueError, increase_revision, "1.a") | ||
90 | self.assertRaises(ValueError, increase_revision, "1.") | ||
91 | self.assertRaises(ValueError, increase_revision, "") | ||
92 | |||
93 | def test_revision_greater_or_equal(self): | ||
94 | self.assertTrue(_revision_greater_or_equal("2", "2")) | ||
95 | self.assertTrue(_revision_greater_or_equal("2", "1")) | ||
96 | self.assertTrue(_revision_greater_or_equal("10", "2")) | ||
97 | self.assertTrue(_revision_greater_or_equal("1.10", "1.2")) | ||
98 | self.assertFalse(_revision_greater_or_equal("1.2", "1.10")) | ||
99 | self.assertTrue(_revision_greater_or_equal("1.10", "1")) | ||
100 | self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10")) | ||
101 | self.assertFalse(_revision_greater_or_equal("1.10.1", "1.10.2")) | ||
102 | self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10.1")) | ||
103 | self.assertTrue(_revision_greater_or_equal("1.10.1", "1")) | ||
104 | self.assertTrue(revision_greater("1.20", "1.3")) | ||
105 | self.assertTrue(revision_smaller("1.3", "1.20")) | ||
106 | |||
107 | # DB tests | ||
108 | |||
109 | def test_db(self): | ||
110 | dbfile = os.path.join(self.temp_dir.name, "testtable.sqlite3") | ||
111 | |||
112 | self.db = db.PRData(dbfile) | ||
113 | self.table = self.db["PRMAIN"] | ||
114 | |||
115 | self.table.store_value(version, pkgarch, checksum0, "0") | ||
116 | self.table.store_value(version, pkgarch, checksum1, "1") | ||
117 | # "No history" mode supports multiple PRs for the same checksum | ||
118 | self.table.store_value(version, pkgarch, checksum0, "2") | ||
119 | self.table.store_value(version, pkgarch, checksum2, "1.0") | ||
120 | |||
121 | self.assertTrue(self.table.test_package(version, pkgarch)) | ||
122 | self.assertFalse(self.table.test_package(version, other_arch)) | ||
123 | |||
124 | self.assertTrue(self.table.test_value(version, pkgarch, "0")) | ||
125 | self.assertTrue(self.table.test_value(version, pkgarch, "1")) | ||
126 | self.assertTrue(self.table.test_value(version, pkgarch, "2")) | ||
127 | |||
128 | self.assertEqual(self.table.find_package_max_value(version, pkgarch), "2") | ||
129 | |||
130 | self.assertEqual(self.table.find_min_value(version, pkgarch, checksum0), "0") | ||
131 | self.assertEqual(self.table.find_max_value(version, pkgarch, checksum0), "2") | ||
132 | |||
133 | # Test history modes | ||
134 | self.assertEqual(self.table.find_value(version, pkgarch, checksum0, True), "0") | ||
135 | self.assertEqual(self.table.find_value(version, pkgarch, checksum0, False), "2") | ||
136 | |||
137 | self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "3"), "3.0") | ||
138 | self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "1"), "1.1") | ||
139 | |||
140 | # Revision comparison tests | ||
141 | self.table.store_value(version, pkgarch, checksum1, "1.3") | ||
142 | self.table.store_value(version, pkgarch, checksum1, "1.20") | ||
143 | self.assertEqual(self.table.find_min_value(version, pkgarch, checksum1), "1") | ||
144 | self.assertEqual(self.table.find_max_value(version, pkgarch, checksum1), "1.20") | ||
145 | |||
146 | class PRBasicTests(PRTestSetup, unittest.TestCase): | ||
147 | |||
148 | def setUp(self): | ||
149 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
150 | self.addCleanup(self.temp_dir.cleanup) | ||
151 | |||
152 | dbfile = os.path.join(self.temp_dir.name, "prtest-basic.sqlite3") | ||
153 | |||
154 | self.server1 = self.start_server("basic", dbfile) | ||
155 | self.client1 = self.start_client(self.server1.address) | ||
156 | |||
157 | def test_basic(self): | ||
158 | |||
159 | # Checks on non existing configuration | ||
160 | |||
161 | result = self.client1.test_pr(version, pkgarch, checksum0) | ||
162 | self.assertIsNone(result, "test_pr should return 'None' for a non existing PR") | ||
163 | |||
164 | result = self.client1.test_package(version, pkgarch) | ||
165 | self.assertFalse(result, "test_package should return 'False' for a non existing PR") | ||
166 | |||
167 | result = self.client1.max_package_pr(version, pkgarch) | ||
168 | self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR") | ||
169 | |||
170 | # Add a first configuration | ||
171 | |||
172 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
173 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'") | ||
174 | |||
175 | result = self.client1.test_pr(version, pkgarch, checksum0) | ||
176 | self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR") | ||
177 | |||
178 | result = self.client1.test_package(version, pkgarch) | ||
179 | self.assertTrue(result, "test_package should return 'True' for an existing PR") | ||
180 | |||
181 | result = self.client1.max_package_pr(version, pkgarch) | ||
182 | self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series") | ||
183 | |||
184 | # Check that the same request gets the same value | ||
185 | |||
186 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
187 | self.assertEqual(result, "0", "getPR: asking for the same PR a second time in a row should return the same value.") | ||
188 | |||
189 | # Add new configurations | ||
190 | |||
191 | result = self.client1.getPR(version, pkgarch, checksum1) | ||
192 | self.assertEqual(result, "1", "getPR: second PR of a package should be '1'") | ||
193 | |||
194 | result = self.client1.test_pr(version, pkgarch, checksum1) | ||
195 | self.assertEqual(result, "1", "test_pr should return '1' here, matching the result of getPR") | ||
196 | |||
197 | result = self.client1.max_package_pr(version, pkgarch) | ||
198 | self.assertEqual(result, "1", "max_package_pr should return '1' in the current test series") | ||
199 | |||
200 | result = self.client1.getPR(version, pkgarch, checksum2) | ||
201 | self.assertEqual(result, "2", "getPR: second PR of a package should be '2'") | ||
202 | |||
203 | result = self.client1.test_pr(version, pkgarch, checksum2) | ||
204 | self.assertEqual(result, "2", "test_pr should return '2' here, matching the result of getPR") | ||
205 | |||
206 | result = self.client1.max_package_pr(version, pkgarch) | ||
207 | self.assertEqual(result, "2", "max_package_pr should return '2' in the current test series") | ||
208 | |||
209 | result = self.client1.getPR(version, pkgarch, checksum3) | ||
210 | self.assertEqual(result, "3", "getPR: second PR of a package should be '3'") | ||
211 | |||
212 | result = self.client1.test_pr(version, pkgarch, checksum3) | ||
213 | self.assertEqual(result, "3", "test_pr should return '3' here, matching the result of getPR") | ||
214 | |||
215 | result = self.client1.max_package_pr(version, pkgarch) | ||
216 | self.assertEqual(result, "3", "max_package_pr should return '3' in the current test series") | ||
217 | |||
218 | # Ask again for the first configuration | ||
219 | |||
220 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
221 | self.assertEqual(result, "4", "getPR: should return '4' in this configuration") | ||
222 | |||
223 | # Ask again with explicit "no history" mode | ||
224 | |||
225 | result = self.client1.getPR(version, pkgarch, checksum0, False) | ||
226 | self.assertEqual(result, "4", "getPR: should return '4' in this configuration") | ||
227 | |||
228 | # Ask again with explicit "history" mode. This should return the first recorded PR for checksum0 | ||
229 | |||
230 | result = self.client1.getPR(version, pkgarch, checksum0, True) | ||
231 | self.assertEqual(result, "0", "getPR: should return '0' in this configuration") | ||
232 | |||
233 | # Check again that another pkgarg resets the counters | ||
234 | |||
235 | result = self.client1.test_pr(version, other_arch, checksum0) | ||
236 | self.assertIsNone(result, "test_pr should return 'None' for a non existing PR") | ||
237 | |||
238 | result = self.client1.test_package(version, other_arch) | ||
239 | self.assertFalse(result, "test_package should return 'False' for a non existing PR") | ||
240 | |||
241 | result = self.client1.max_package_pr(version, other_arch) | ||
242 | self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR") | ||
243 | |||
244 | # Now add the configuration | ||
245 | |||
246 | result = self.client1.getPR(version, other_arch, checksum0) | ||
247 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'") | ||
248 | |||
249 | result = self.client1.test_pr(version, other_arch, checksum0) | ||
250 | self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR") | ||
251 | |||
252 | result = self.client1.test_package(version, other_arch) | ||
253 | self.assertTrue(result, "test_package should return 'True' for an existing PR") | ||
254 | |||
255 | result = self.client1.max_package_pr(version, other_arch) | ||
256 | self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series") | ||
257 | |||
258 | result = self.client1.is_readonly() | ||
259 | self.assertFalse(result, "Server should not be described as 'read-only'") | ||
260 | |||
261 | class PRUpstreamTests(PRTestSetup, unittest.TestCase): | ||
262 | |||
263 | def setUp(self): | ||
264 | |||
265 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
266 | self.addCleanup(self.temp_dir.cleanup) | ||
267 | |||
268 | dbfile2 = os.path.join(self.temp_dir.name, "prtest-upstream2.sqlite3") | ||
269 | self.server2 = self.start_server("upstream2", dbfile2) | ||
270 | self.client2 = self.start_client(self.server2.address) | ||
271 | |||
272 | dbfile1 = os.path.join(self.temp_dir.name, "prtest-upstream1.sqlite3") | ||
273 | self.server1 = self.start_server("upstream1", dbfile1, upstream=self.server2.address) | ||
274 | self.client1 = self.start_client(self.server1.address) | ||
275 | |||
276 | dbfile0 = os.path.join(self.temp_dir.name, "prtest-local.sqlite3") | ||
277 | self.server0 = self.start_server("local", dbfile0, upstream=self.server1.address) | ||
278 | self.client0 = self.start_client(self.server0.address) | ||
279 | self.shared_db = dbfile0 | ||
280 | |||
281 | def test_upstream_and_readonly(self): | ||
282 | |||
283 | # For identical checksums, all servers should return the same PR | ||
284 | |||
285 | result = self.client2.getPR(version, pkgarch, checksum0) | ||
286 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'") | ||
287 | |||
288 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
289 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)") | ||
290 | |||
291 | result = self.client0.getPR(version, pkgarch, checksum0) | ||
292 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)") | ||
293 | |||
294 | # Now introduce new checksums on server1 for, same version | ||
295 | |||
296 | result = self.client1.getPR(version, pkgarch, checksum1) | ||
297 | self.assertEqual(result, "0.0", "getPR: first PR of a package which has a different checksum upstream should be '0.0'") | ||
298 | |||
299 | result = self.client1.getPR(version, pkgarch, checksum2) | ||
300 | self.assertEqual(result, "0.1", "getPR: second PR of a package that has a different checksum upstream should be '0.1'") | ||
301 | |||
302 | # Now introduce checksums on server0 for, same version | ||
303 | |||
304 | result = self.client1.getPR(version, pkgarch, checksum1) | ||
305 | self.assertEqual(result, "0.2", "getPR: can't decrease for known PR") | ||
306 | |||
307 | result = self.client1.getPR(version, pkgarch, checksum2) | ||
308 | self.assertEqual(result, "0.3") | ||
309 | |||
310 | result = self.client1.max_package_pr(version, pkgarch) | ||
311 | self.assertEqual(result, "0.3") | ||
312 | |||
313 | result = self.client0.getPR(version, pkgarch, checksum3) | ||
314 | self.assertEqual(result, "0.3.0", "getPR: first PR of a package that doesn't exist upstream should be '0.3.0'") | ||
315 | |||
316 | result = self.client0.getPR(version, pkgarch, checksum4) | ||
317 | self.assertEqual(result, "0.3.1", "getPR: second PR of a package that doesn't exist upstream should be '0.3.1'") | ||
318 | |||
319 | result = self.client0.getPR(version, pkgarch, checksum3) | ||
320 | self.assertEqual(result, "0.3.2") | ||
321 | |||
322 | # More upstream updates | ||
323 | # Here, we assume no communication between server2 and server0. server2 only impacts server0 | ||
324 | # after impacting server1 | ||
325 | |||
326 | self.assertEqual(self.client2.getPR(version, pkgarch, checksum5), "1") | ||
327 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum6), "1.0") | ||
328 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "1.1") | ||
329 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum8), "1.1.0") | ||
330 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum9), "1.1.1") | ||
331 | |||
332 | # "history" mode tests | ||
333 | |||
334 | self.assertEqual(self.client2.getPR(version, pkgarch, checksum0, True), "0") | ||
335 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum2, True), "0.1") | ||
336 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum3, True), "0.3.0") | ||
337 | |||
338 | # More "no history" mode tests | ||
339 | |||
340 | self.assertEqual(self.client2.getPR(version, pkgarch, checksum0), "2") | ||
341 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum0), "2") # Same as upstream | ||
342 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum0), "2") # Same as upstream | ||
343 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "3") # This could be surprising, but since the previous revision was "2", increasing it yields "3". | ||
344 | # We don't know how many upstream servers we have | ||
345 | # Start read-only server with server1 as upstream | ||
346 | self.server_ro = self.start_server("local-ro", self.shared_db, upstream=self.server1.address, read_only=True) | ||
347 | self.client_ro = self.start_client(self.server_ro.address) | ||
348 | |||
349 | self.assertTrue(self.client_ro.is_readonly(), "Database should be described as 'read-only'") | ||
350 | |||
351 | # Checks on non existing configurations | ||
352 | self.assertIsNone(self.client_ro.test_pr(version, pkgarch, checksumX)) | ||
353 | self.assertFalse(self.client_ro.test_package("unknown", pkgarch)) | ||
354 | |||
355 | # Look up existing configurations | ||
356 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0), "3") # "no history" mode | ||
357 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0, True), "0") # "history" mode | ||
358 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3), "3") | ||
359 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3, True), "0.3.0") | ||
360 | self.assertEqual(self.client_ro.max_package_pr(version, pkgarch), "2") # normal as "3" was never saved | ||
361 | |||
362 | # Try to insert a new value. Here this one is know upstream. | ||
363 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum7), "3") | ||
364 | # Try to insert a completely new value. As the max upstream value is already "3", it should be "3.0" | ||
365 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum10), "3.0") | ||
366 | # Same with another value which only exists in the upstream upstream server | ||
367 | # This time, as the upstream server doesn't know it, it will ask its upstream server. So that's a known one. | ||
368 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum9), "3") | ||
369 | |||
370 | class ScriptTests(unittest.TestCase): | ||
371 | |||
372 | def setUp(self): | ||
373 | |||
374 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
375 | self.addCleanup(self.temp_dir.cleanup) | ||
376 | self.dbfile = os.path.join(self.temp_dir.name, "prtest.sqlite3") | ||
377 | |||
378 | def test_1_start_bitbake_prserv(self): | ||
379 | try: | ||
380 | subprocess.check_call([BIN_DIR / "bitbake-prserv", "--start", "-f", self.dbfile]) | ||
381 | except subprocess.CalledProcessError as e: | ||
382 | self.fail("Failed to start bitbake-prserv: %s" % e.returncode) | ||
383 | |||
384 | def test_2_stop_bitbake_prserv(self): | ||
385 | try: | ||
386 | subprocess.check_call([BIN_DIR / "bitbake-prserv", "--stop"]) | ||
387 | except subprocess.CalledProcessError as e: | ||
388 | self.fail("Failed to stop bitbake-prserv: %s" % e.returncode) | ||
diff --git a/bitbake/lib/toaster/orm/fixtures/check_fixtures.py b/bitbake/lib/toaster/orm/fixtures/check_fixtures.py new file mode 100755 index 0000000000..ae3722e0f6 --- /dev/null +++ b/bitbake/lib/toaster/orm/fixtures/check_fixtures.py | |||
@@ -0,0 +1,38 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright (C) 2025 Linux Foundation | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import json | ||
8 | import urllib.request | ||
9 | |||
10 | import gen_fixtures as fixtures | ||
11 | |||
12 | RELEASE_URL = "https://dashboard.yoctoproject.org/releases.json" | ||
13 | |||
14 | with urllib.request.urlopen(RELEASE_URL) as response: | ||
15 | if response.getcode() == 200: | ||
16 | data = response.read().decode("utf-8") | ||
17 | releases = json.loads(data) | ||
18 | else: | ||
19 | print("Couldn't access %s: %s" % (RELEASE_URL, reponse.getcode())) | ||
20 | exit(1) | ||
21 | |||
22 | |||
23 | # grab the recent release branches and add master, so we can ignore old branches | ||
24 | active_releases = [ | ||
25 | e["release_codename"].lower() for e in releases if e["series"] == "current" | ||
26 | ] | ||
27 | active_releases.append("master") | ||
28 | active_releases.append("head") | ||
29 | |||
30 | fixtures_releases = [x[0].lower() for x in fixtures.current_releases] | ||
31 | |||
32 | if set(active_releases) != set(fixtures_releases): | ||
33 | print("WARNING: Active releases don't match toaster configured releases, the difference is: %s" % set(active_releases).difference(set(fixtures_releases))) | ||
34 | print("Active releases: %s" % sorted(active_releases)) | ||
35 | print("Toaster configured releases: %s" % sorted(fixtures_releases)) | ||
36 | else: | ||
37 | print("Success, configuration matches") | ||
38 | |||
diff --git a/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py index 71afe3914e..6201f679b9 100755 --- a/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py +++ b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py | |||
@@ -9,7 +9,7 @@ | |||
9 | # | 9 | # |
10 | # Edit the 'current_releases' table for each new release cycle | 10 | # Edit the 'current_releases' table for each new release cycle |
11 | # | 11 | # |
12 | # Usage: ./get_fixtures all | 12 | # Usage: ./get_fixtures --all |
13 | # | 13 | # |
14 | 14 | ||
15 | import os | 15 | import os |
@@ -35,19 +35,23 @@ verbose = False | |||
35 | # [Codename, Yocto Project Version, Release Date, Current Version, Support Level, Poky Version, BitBake branch] | 35 | # [Codename, Yocto Project Version, Release Date, Current Version, Support Level, Poky Version, BitBake branch] |
36 | current_releases = [ | 36 | current_releases = [ |
37 | # Release slot #1 | 37 | # Release slot #1 |
38 | ['Kirkstone','4.0','April 2022','4.0.8 (March 2023)','Stable - Long Term Support (until Apr. 2024)','','2.0'], | 38 | ['Scarthgap','5.0','April 2024','5.0.0 (April 2024)','Long Term Support (until April 2028)','','2.8'], |
39 | # Release slot #2 'local' | 39 | # Release slot #2 'local' |
40 | ['HEAD','HEAD','','Local Yocto Project','HEAD','','HEAD'], | 40 | ['HEAD','HEAD','','Local Yocto Project','HEAD','','HEAD'], |
41 | # Release slot #3 'master' | 41 | # Release slot #3 'master' |
42 | ['Master','master','','Yocto Project master','master','','master'], | 42 | ['Master','master','','Yocto Project master','master','','master'], |
43 | # Release slot #4 | 43 | # Release slot #4 |
44 | ['Mickledore','4.2','April 2023','4.2.0 (April 2023)','Support for 7 months (until October 2023)','','2.4'], | 44 | ['Whinlatter','5.3','October 2025','5.3.0 (October 2024)','Support for 7 months (until May 2026)','','2.14'], |
45 | # ['Langdale','4.1','October 2022','4.1.2 (January 2023)','Support for 7 months (until May 2023)','','2.2'], | 45 | ['Walnascar','5.2','April 2025','5.2.0 (April 2025)','Support for 7 months (until October 2025)','','2.12'], |
46 | # ['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'], | 46 | #['Styhead','5.1','November 2024','5.1.0 (November 2024)','Support for 7 months (until May 2025)','','2.10'], |
47 | # ['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'], | 47 | #['Nanbield','4.3','November 2023','4.3.0 (November 2023)','Support for 7 months (until May 2024)','','2.6'], |
48 | # ['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'], | 48 | #['Mickledore','4.2','April 2023','4.2.0 (April 2023)','Support for 7 months (until October 2023)','','2.4'], |
49 | # Optional Release slot #5 | 49 | #['Langdale','4.1','October 2022','4.1.2 (January 2023)','Support for 7 months (until May 2023)','','2.2'], |
50 | ['Dunfell','3.1','April 2020','3.1.23 (February 2023)','Stable - Long Term Support (until Apr. 2024)','23.0','1.46'], | 50 | ['Kirkstone','4.0','April 2022','4.0.8 (March 2023)','Stable - Long Term Support (until Apr. 2024)','','2.0'], |
51 | #['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'], | ||
52 | #['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'], | ||
53 | #['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'], | ||
54 | #['Dunfell','3.1','April 2020','3.1.23 (February 2023)','Stable - Long Term Support (until Apr. 2024)','23.0','1.46'], | ||
51 | ] | 55 | ] |
52 | 56 | ||
53 | default_poky_layers = [ | 57 | default_poky_layers = [ |
diff --git a/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/bitbake/lib/toaster/orm/fixtures/oe-core.xml index 950f2a98af..264231d139 100644 --- a/bitbake/lib/toaster/orm/fixtures/oe-core.xml +++ b/bitbake/lib/toaster/orm/fixtures/oe-core.xml | |||
@@ -8,9 +8,9 @@ | |||
8 | 8 | ||
9 | <!-- Bitbake versions which correspond to the metadata release --> | 9 | <!-- Bitbake versions which correspond to the metadata release --> |
10 | <object model="orm.bitbakeversion" pk="1"> | 10 | <object model="orm.bitbakeversion" pk="1"> |
11 | <field type="CharField" name="name">kirkstone</field> | 11 | <field type="CharField" name="name">scarthgap</field> |
12 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | 12 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> |
13 | <field type="CharField" name="branch">2.0</field> | 13 | <field type="CharField" name="branch">2.8</field> |
14 | </object> | 14 | </object> |
15 | <object model="orm.bitbakeversion" pk="2"> | 15 | <object model="orm.bitbakeversion" pk="2"> |
16 | <field type="CharField" name="name">HEAD</field> | 16 | <field type="CharField" name="name">HEAD</field> |
@@ -23,23 +23,33 @@ | |||
23 | <field type="CharField" name="branch">master</field> | 23 | <field type="CharField" name="branch">master</field> |
24 | </object> | 24 | </object> |
25 | <object model="orm.bitbakeversion" pk="4"> | 25 | <object model="orm.bitbakeversion" pk="4"> |
26 | <field type="CharField" name="name">mickledore</field> | 26 | <field type="CharField" name="name">whinlatter</field> |
27 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | 27 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> |
28 | <field type="CharField" name="branch">2.4</field> | 28 | <field type="CharField" name="branch">2.14</field> |
29 | </object> | 29 | </object> |
30 | <object model="orm.bitbakeversion" pk="5"> | 30 | <object model="orm.bitbakeversion" pk="5"> |
31 | <field type="CharField" name="name">dunfell</field> | 31 | <field type="CharField" name="name">walnascar</field> |
32 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | ||
33 | <field type="CharField" name="branch">2.12</field> | ||
34 | </object> | ||
35 | <object model="orm.bitbakeversion" pk="6"> | ||
36 | <field type="CharField" name="name">styhead</field> | ||
32 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | 37 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> |
33 | <field type="CharField" name="branch">1.46</field> | 38 | <field type="CharField" name="branch">2.10</field> |
39 | </object> | ||
40 | <object model="orm.bitbakeversion" pk="7"> | ||
41 | <field type="CharField" name="name">kirkstone</field> | ||
42 | <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field> | ||
43 | <field type="CharField" name="branch">2.0</field> | ||
34 | </object> | 44 | </object> |
35 | 45 | ||
36 | <!-- Releases available --> | 46 | <!-- Releases available --> |
37 | <object model="orm.release" pk="1"> | 47 | <object model="orm.release" pk="1"> |
38 | <field type="CharField" name="name">kirkstone</field> | 48 | <field type="CharField" name="name">scarthgap</field> |
39 | <field type="CharField" name="description">Openembedded Kirkstone</field> | 49 | <field type="CharField" name="description">Openembedded Scarthgap</field> |
40 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> | 50 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> |
41 | <field type="CharField" name="branch_name">kirkstone</field> | 51 | <field type="CharField" name="branch_name">scarthgap</field> |
42 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=kirkstone\">OpenEmbedded Kirkstone</a> branch.</field> | 52 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=scarthgap\">OpenEmbedded Scarthgap</a> branch.</field> |
43 | </object> | 53 | </object> |
44 | <object model="orm.release" pk="2"> | 54 | <object model="orm.release" pk="2"> |
45 | <field type="CharField" name="name">local</field> | 55 | <field type="CharField" name="name">local</field> |
@@ -56,18 +66,32 @@ | |||
56 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch.</field> | 66 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch.</field> |
57 | </object> | 67 | </object> |
58 | <object model="orm.release" pk="4"> | 68 | <object model="orm.release" pk="4"> |
59 | <field type="CharField" name="name">mickledore</field> | 69 | <field type="CharField" name="name">whinlatter</field> |
60 | <field type="CharField" name="description">Openembedded Mickledore</field> | 70 | <field type="CharField" name="description">Openembedded Whinlatter</field> |
61 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> | 71 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> |
62 | <field type="CharField" name="branch_name">mickledore</field> | 72 | <field type="CharField" name="branch_name">whinlatter</field> |
63 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=mickledore\">OpenEmbedded Mickledore</a> branch.</field> | 73 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=whinlatter\">OpenEmbedded Whinlatter</a> branch.</field> |
64 | </object> | 74 | </object> |
65 | <object model="orm.release" pk="5"> | 75 | <object model="orm.release" pk="5"> |
66 | <field type="CharField" name="name">dunfell</field> | 76 | <field type="CharField" name="name">walnascar</field> |
67 | <field type="CharField" name="description">Openembedded Dunfell</field> | 77 | <field type="CharField" name="description">Openembedded Walnascar</field> |
68 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field> | 78 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field> |
69 | <field type="CharField" name="branch_name">dunfell</field> | 79 | <field type="CharField" name="branch_name">walnascar</field> |
70 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=dunfell\">OpenEmbedded Dunfell</a> branch.</field> | 80 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=walnascar\">OpenEmbedded Walnascar</a> branch.</field> |
81 | </object> | ||
82 | <object model="orm.release" pk="6"> | ||
83 | <field type="CharField" name="name">styhead</field> | ||
84 | <field type="CharField" name="description">Openembedded Styhead</field> | ||
85 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">6</field> | ||
86 | <field type="CharField" name="branch_name">styhead</field> | ||
87 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=styhead\">OpenEmbedded Styhead</a> branch.</field> | ||
88 | </object> | ||
89 | <object model="orm.release" pk="7"> | ||
90 | <field type="CharField" name="name">kirkstone</field> | ||
91 | <field type="CharField" name="description">Openembedded Kirkstone</field> | ||
92 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">7</field> | ||
93 | <field type="CharField" name="branch_name">kirkstone</field> | ||
94 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=kirkstone\">OpenEmbedded Kirkstone</a> branch.</field> | ||
71 | </object> | 95 | </object> |
72 | 96 | ||
73 | <!-- Default layers for each release --> | 97 | <!-- Default layers for each release --> |
@@ -91,6 +115,14 @@ | |||
91 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | 115 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> |
92 | <field type="CharField" name="layer_name">openembedded-core</field> | 116 | <field type="CharField" name="layer_name">openembedded-core</field> |
93 | </object> | 117 | </object> |
118 | <object model="orm.releasedefaultlayer" pk="6"> | ||
119 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
120 | <field type="CharField" name="layer_name">openembedded-core</field> | ||
121 | </object> | ||
122 | <object model="orm.releasedefaultlayer" pk="7"> | ||
123 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
124 | <field type="CharField" name="layer_name">openembedded-core</field> | ||
125 | </object> | ||
94 | 126 | ||
95 | 127 | ||
96 | <!-- Layer for the Local release --> | 128 | <!-- Layer for the Local release --> |
diff --git a/bitbake/lib/toaster/orm/fixtures/poky.xml b/bitbake/lib/toaster/orm/fixtures/poky.xml index 121e52fd45..6cf4f0687a 100644 --- a/bitbake/lib/toaster/orm/fixtures/poky.xml +++ b/bitbake/lib/toaster/orm/fixtures/poky.xml | |||
@@ -8,9 +8,9 @@ | |||
8 | 8 | ||
9 | <!-- Bitbake versions which correspond to the metadata release --> | 9 | <!-- Bitbake versions which correspond to the metadata release --> |
10 | <object model="orm.bitbakeversion" pk="1"> | 10 | <object model="orm.bitbakeversion" pk="1"> |
11 | <field type="CharField" name="name">kirkstone</field> | 11 | <field type="CharField" name="name">scarthgap</field> |
12 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | 12 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> |
13 | <field type="CharField" name="branch">kirkstone</field> | 13 | <field type="CharField" name="branch">scarthgap</field> |
14 | <field type="CharField" name="dirpath">bitbake</field> | 14 | <field type="CharField" name="dirpath">bitbake</field> |
15 | </object> | 15 | </object> |
16 | <object model="orm.bitbakeversion" pk="2"> | 16 | <object model="orm.bitbakeversion" pk="2"> |
@@ -26,26 +26,38 @@ | |||
26 | <field type="CharField" name="dirpath">bitbake</field> | 26 | <field type="CharField" name="dirpath">bitbake</field> |
27 | </object> | 27 | </object> |
28 | <object model="orm.bitbakeversion" pk="4"> | 28 | <object model="orm.bitbakeversion" pk="4"> |
29 | <field type="CharField" name="name">mickledore</field> | 29 | <field type="CharField" name="name">whinlatter</field> |
30 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | 30 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> |
31 | <field type="CharField" name="branch">mickledore</field> | 31 | <field type="CharField" name="branch">whinlatter</field> |
32 | <field type="CharField" name="dirpath">bitbake</field> | 32 | <field type="CharField" name="dirpath">bitbake</field> |
33 | </object> | 33 | </object> |
34 | <object model="orm.bitbakeversion" pk="5"> | 34 | <object model="orm.bitbakeversion" pk="5"> |
35 | <field type="CharField" name="name">dunfell</field> | 35 | <field type="CharField" name="name">walnascar</field> |
36 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | ||
37 | <field type="CharField" name="branch">walnascar</field> | ||
38 | <field type="CharField" name="dirpath">bitbake</field> | ||
39 | </object> | ||
40 | <object model="orm.bitbakeversion" pk="6"> | ||
41 | <field type="CharField" name="name">styhead</field> | ||
36 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | 42 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> |
37 | <field type="CharField" name="branch">dunfell</field> | 43 | <field type="CharField" name="branch">styhead</field> |
44 | <field type="CharField" name="dirpath">bitbake</field> | ||
45 | </object> | ||
46 | <object model="orm.bitbakeversion" pk="7"> | ||
47 | <field type="CharField" name="name">kirkstone</field> | ||
48 | <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field> | ||
49 | <field type="CharField" name="branch">kirkstone</field> | ||
38 | <field type="CharField" name="dirpath">bitbake</field> | 50 | <field type="CharField" name="dirpath">bitbake</field> |
39 | </object> | 51 | </object> |
40 | 52 | ||
41 | 53 | ||
42 | <!-- Releases available --> | 54 | <!-- Releases available --> |
43 | <object model="orm.release" pk="1"> | 55 | <object model="orm.release" pk="1"> |
44 | <field type="CharField" name="name">kirkstone</field> | 56 | <field type="CharField" name="name">scarthgap</field> |
45 | <field type="CharField" name="description">Yocto Project 4.0 "Kirkstone"</field> | 57 | <field type="CharField" name="description">Yocto Project 5.0 "Scarthgap"</field> |
46 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> | 58 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> |
47 | <field type="CharField" name="branch_name">kirkstone</field> | 59 | <field type="CharField" name="branch_name">scarthgap</field> |
48 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=kirkstone">Yocto Project Kirkstone branch</a>.</field> | 60 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=scarthgap">Yocto Project Scarthgap branch</a>.</field> |
49 | </object> | 61 | </object> |
50 | <object model="orm.release" pk="2"> | 62 | <object model="orm.release" pk="2"> |
51 | <field type="CharField" name="name">local</field> | 63 | <field type="CharField" name="name">local</field> |
@@ -62,18 +74,32 @@ | |||
62 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>.</field> | 74 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>.</field> |
63 | </object> | 75 | </object> |
64 | <object model="orm.release" pk="4"> | 76 | <object model="orm.release" pk="4"> |
65 | <field type="CharField" name="name">mickledore</field> | 77 | <field type="CharField" name="name">whinlatter</field> |
66 | <field type="CharField" name="description">Yocto Project 4.2 "Mickledore"</field> | 78 | <field type="CharField" name="description">Yocto Project 5.3 "Whinlatter"</field> |
67 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> | 79 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> |
68 | <field type="CharField" name="branch_name">mickledore</field> | 80 | <field type="CharField" name="branch_name">whinlatter</field> |
69 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=mickledore">Yocto Project Mickledore branch</a>.</field> | 81 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=whinlatter">Yocto Project Whinlatter branch</a>.</field> |
70 | </object> | 82 | </object> |
71 | <object model="orm.release" pk="5"> | 83 | <object model="orm.release" pk="5"> |
72 | <field type="CharField" name="name">dunfell</field> | 84 | <field type="CharField" name="name">walnascar</field> |
73 | <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field> | 85 | <field type="CharField" name="description">Yocto Project 5.2 "Walnascar"</field> |
74 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field> | 86 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field> |
75 | <field type="CharField" name="branch_name">dunfell</field> | 87 | <field type="CharField" name="branch_name">walnascar</field> |
76 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell">Yocto Project Dunfell branch</a>.</field> | 88 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=walnascar">Yocto Project Walnascar branch</a>.</field> |
89 | </object> | ||
90 | <object model="orm.release" pk="6"> | ||
91 | <field type="CharField" name="name">styhead</field> | ||
92 | <field type="CharField" name="description">Yocto Project 5.1 "Styhead"</field> | ||
93 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">6</field> | ||
94 | <field type="CharField" name="branch_name">styhead</field> | ||
95 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=styhead">Yocto Project Styhead branch</a>.</field> | ||
96 | </object> | ||
97 | <object model="orm.release" pk="7"> | ||
98 | <field type="CharField" name="name">kirkstone</field> | ||
99 | <field type="CharField" name="description">Yocto Project 4.0 "Kirkstone"</field> | ||
100 | <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">7</field> | ||
101 | <field type="CharField" name="branch_name">kirkstone</field> | ||
102 | <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=kirkstone">Yocto Project Kirkstone branch</a>.</field> | ||
77 | </object> | 103 | </object> |
78 | 104 | ||
79 | <!-- Default project layers for each release --> | 105 | <!-- Default project layers for each release --> |
@@ -137,6 +163,30 @@ | |||
137 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | 163 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> |
138 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> | 164 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> |
139 | </object> | 165 | </object> |
166 | <object model="orm.releasedefaultlayer" pk="16"> | ||
167 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
168 | <field type="CharField" name="layer_name">openembedded-core</field> | ||
169 | </object> | ||
170 | <object model="orm.releasedefaultlayer" pk="17"> | ||
171 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
172 | <field type="CharField" name="layer_name">meta-poky</field> | ||
173 | </object> | ||
174 | <object model="orm.releasedefaultlayer" pk="18"> | ||
175 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
176 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> | ||
177 | </object> | ||
178 | <object model="orm.releasedefaultlayer" pk="19"> | ||
179 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
180 | <field type="CharField" name="layer_name">openembedded-core</field> | ||
181 | </object> | ||
182 | <object model="orm.releasedefaultlayer" pk="20"> | ||
183 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
184 | <field type="CharField" name="layer_name">meta-poky</field> | ||
185 | </object> | ||
186 | <object model="orm.releasedefaultlayer" pk="21"> | ||
187 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
188 | <field type="CharField" name="layer_name">meta-yocto-bsp</field> | ||
189 | </object> | ||
140 | 190 | ||
141 | <!-- Default layers provided by poky | 191 | <!-- Default layers provided by poky |
142 | openembedded-core | 192 | openembedded-core |
@@ -155,7 +205,7 @@ | |||
155 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | 205 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> |
156 | <field type="IntegerField" name="layer_source">0</field> | 206 | <field type="IntegerField" name="layer_source">0</field> |
157 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> | 207 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> |
158 | <field type="CharField" name="branch">kirkstone</field> | 208 | <field type="CharField" name="branch">scarthgap</field> |
159 | <field type="CharField" name="dirpath">meta</field> | 209 | <field type="CharField" name="dirpath">meta</field> |
160 | </object> | 210 | </object> |
161 | <object model="orm.layer_version" pk="2"> | 211 | <object model="orm.layer_version" pk="2"> |
@@ -177,14 +227,28 @@ | |||
177 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | 227 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> |
178 | <field type="IntegerField" name="layer_source">0</field> | 228 | <field type="IntegerField" name="layer_source">0</field> |
179 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 229 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
180 | <field type="CharField" name="branch">mickledore</field> | 230 | <field type="CharField" name="branch">whinlatter</field> |
181 | <field type="CharField" name="dirpath">meta</field> | 231 | <field type="CharField" name="dirpath">meta</field> |
182 | </object> | 232 | </object> |
183 | <object model="orm.layer_version" pk="5"> | 233 | <object model="orm.layer_version" pk="5"> |
184 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | 234 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> |
185 | <field type="IntegerField" name="layer_source">0</field> | 235 | <field type="IntegerField" name="layer_source">0</field> |
186 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | 236 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> |
187 | <field type="CharField" name="branch">dunfell</field> | 237 | <field type="CharField" name="branch">walnascar</field> |
238 | <field type="CharField" name="dirpath">meta</field> | ||
239 | </object> | ||
240 | <object model="orm.layer_version" pk="6"> | ||
241 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | ||
242 | <field type="IntegerField" name="layer_source">0</field> | ||
243 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
244 | <field type="CharField" name="branch">styhead</field> | ||
245 | <field type="CharField" name="dirpath">meta</field> | ||
246 | </object> | ||
247 | <object model="orm.layer_version" pk="7"> | ||
248 | <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> | ||
249 | <field type="IntegerField" name="layer_source">0</field> | ||
250 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
251 | <field type="CharField" name="branch">kirkstone</field> | ||
188 | <field type="CharField" name="dirpath">meta</field> | 252 | <field type="CharField" name="dirpath">meta</field> |
189 | </object> | 253 | </object> |
190 | 254 | ||
@@ -196,14 +260,14 @@ | |||
196 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 260 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
197 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 261 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
198 | </object> | 262 | </object> |
199 | <object model="orm.layer_version" pk="6"> | 263 | <object model="orm.layer_version" pk="8"> |
200 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 264 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
201 | <field type="IntegerField" name="layer_source">0</field> | 265 | <field type="IntegerField" name="layer_source">0</field> |
202 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> | 266 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> |
203 | <field type="CharField" name="branch">kirkstone</field> | 267 | <field type="CharField" name="branch">scarthgap</field> |
204 | <field type="CharField" name="dirpath">meta-poky</field> | 268 | <field type="CharField" name="dirpath">meta-poky</field> |
205 | </object> | 269 | </object> |
206 | <object model="orm.layer_version" pk="7"> | 270 | <object model="orm.layer_version" pk="9"> |
207 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 271 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
208 | <field type="IntegerField" name="layer_source">0</field> | 272 | <field type="IntegerField" name="layer_source">0</field> |
209 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> | 273 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> |
@@ -211,25 +275,39 @@ | |||
211 | <field type="CharField" name="commit">HEAD</field> | 275 | <field type="CharField" name="commit">HEAD</field> |
212 | <field type="CharField" name="dirpath">meta-poky</field> | 276 | <field type="CharField" name="dirpath">meta-poky</field> |
213 | </object> | 277 | </object> |
214 | <object model="orm.layer_version" pk="8"> | 278 | <object model="orm.layer_version" pk="10"> |
215 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 279 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
216 | <field type="IntegerField" name="layer_source">0</field> | 280 | <field type="IntegerField" name="layer_source">0</field> |
217 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> | 281 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> |
218 | <field type="CharField" name="branch">master</field> | 282 | <field type="CharField" name="branch">master</field> |
219 | <field type="CharField" name="dirpath">meta-poky</field> | 283 | <field type="CharField" name="dirpath">meta-poky</field> |
220 | </object> | 284 | </object> |
221 | <object model="orm.layer_version" pk="9"> | 285 | <object model="orm.layer_version" pk="11"> |
222 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 286 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
223 | <field type="IntegerField" name="layer_source">0</field> | 287 | <field type="IntegerField" name="layer_source">0</field> |
224 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 288 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
225 | <field type="CharField" name="branch">mickledore</field> | 289 | <field type="CharField" name="branch">whinlatter</field> |
226 | <field type="CharField" name="dirpath">meta-poky</field> | 290 | <field type="CharField" name="dirpath">meta-poky</field> |
227 | </object> | 291 | </object> |
228 | <object model="orm.layer_version" pk="10"> | 292 | <object model="orm.layer_version" pk="12"> |
229 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | 293 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> |
230 | <field type="IntegerField" name="layer_source">0</field> | 294 | <field type="IntegerField" name="layer_source">0</field> |
231 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | 295 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> |
232 | <field type="CharField" name="branch">dunfell</field> | 296 | <field type="CharField" name="branch">walnascar</field> |
297 | <field type="CharField" name="dirpath">meta-poky</field> | ||
298 | </object> | ||
299 | <object model="orm.layer_version" pk="13"> | ||
300 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | ||
301 | <field type="IntegerField" name="layer_source">0</field> | ||
302 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
303 | <field type="CharField" name="branch">styhead</field> | ||
304 | <field type="CharField" name="dirpath">meta-poky</field> | ||
305 | </object> | ||
306 | <object model="orm.layer_version" pk="14"> | ||
307 | <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> | ||
308 | <field type="IntegerField" name="layer_source">0</field> | ||
309 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
310 | <field type="CharField" name="branch">kirkstone</field> | ||
233 | <field type="CharField" name="dirpath">meta-poky</field> | 311 | <field type="CharField" name="dirpath">meta-poky</field> |
234 | </object> | 312 | </object> |
235 | 313 | ||
@@ -241,14 +319,14 @@ | |||
241 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 319 | <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
242 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> | 320 | <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> |
243 | </object> | 321 | </object> |
244 | <object model="orm.layer_version" pk="11"> | 322 | <object model="orm.layer_version" pk="15"> |
245 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 323 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
246 | <field type="IntegerField" name="layer_source">0</field> | 324 | <field type="IntegerField" name="layer_source">0</field> |
247 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> | 325 | <field rel="ManyToOneRel" to="orm.release" name="release">1</field> |
248 | <field type="CharField" name="branch">kirkstone</field> | 326 | <field type="CharField" name="branch">scarthgap</field> |
249 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 327 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
250 | </object> | 328 | </object> |
251 | <object model="orm.layer_version" pk="12"> | 329 | <object model="orm.layer_version" pk="16"> |
252 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 330 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
253 | <field type="IntegerField" name="layer_source">0</field> | 331 | <field type="IntegerField" name="layer_source">0</field> |
254 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> | 332 | <field rel="ManyToOneRel" to="orm.release" name="release">2</field> |
@@ -256,25 +334,39 @@ | |||
256 | <field type="CharField" name="commit">HEAD</field> | 334 | <field type="CharField" name="commit">HEAD</field> |
257 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 335 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
258 | </object> | 336 | </object> |
259 | <object model="orm.layer_version" pk="13"> | 337 | <object model="orm.layer_version" pk="17"> |
260 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 338 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
261 | <field type="IntegerField" name="layer_source">0</field> | 339 | <field type="IntegerField" name="layer_source">0</field> |
262 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> | 340 | <field rel="ManyToOneRel" to="orm.release" name="release">3</field> |
263 | <field type="CharField" name="branch">master</field> | 341 | <field type="CharField" name="branch">master</field> |
264 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 342 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
265 | </object> | 343 | </object> |
266 | <object model="orm.layer_version" pk="14"> | 344 | <object model="orm.layer_version" pk="18"> |
267 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 345 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
268 | <field type="IntegerField" name="layer_source">0</field> | 346 | <field type="IntegerField" name="layer_source">0</field> |
269 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> | 347 | <field rel="ManyToOneRel" to="orm.release" name="release">4</field> |
270 | <field type="CharField" name="branch">mickledore</field> | 348 | <field type="CharField" name="branch">whinlatter</field> |
271 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 349 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
272 | </object> | 350 | </object> |
273 | <object model="orm.layer_version" pk="15"> | 351 | <object model="orm.layer_version" pk="19"> |
274 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | 352 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> |
275 | <field type="IntegerField" name="layer_source">0</field> | 353 | <field type="IntegerField" name="layer_source">0</field> |
276 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> | 354 | <field rel="ManyToOneRel" to="orm.release" name="release">5</field> |
277 | <field type="CharField" name="branch">dunfell</field> | 355 | <field type="CharField" name="branch">walnascar</field> |
356 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | ||
357 | </object> | ||
358 | <object model="orm.layer_version" pk="20"> | ||
359 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | ||
360 | <field type="IntegerField" name="layer_source">0</field> | ||
361 | <field rel="ManyToOneRel" to="orm.release" name="release">6</field> | ||
362 | <field type="CharField" name="branch">styhead</field> | ||
363 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | ||
364 | </object> | ||
365 | <object model="orm.layer_version" pk="21"> | ||
366 | <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> | ||
367 | <field type="IntegerField" name="layer_source">0</field> | ||
368 | <field rel="ManyToOneRel" to="orm.release" name="release">7</field> | ||
369 | <field type="CharField" name="branch">kirkstone</field> | ||
278 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> | 370 | <field type="CharField" name="dirpath">meta-yocto-bsp</field> |
279 | </object> | 371 | </object> |
280 | </django-objects> | 372 | </django-objects> |
diff --git a/bitbake/lib/toaster/orm/models.py b/bitbake/lib/toaster/orm/models.py index 19c9686206..e2f488ed89 100644 --- a/bitbake/lib/toaster/orm/models.py +++ b/bitbake/lib/toaster/orm/models.py | |||
@@ -79,7 +79,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']: | |||
79 | # end of HACK | 79 | # end of HACK |
80 | 80 | ||
81 | class GitURLValidator(validators.URLValidator): | 81 | class GitURLValidator(validators.URLValidator): |
82 | import re | ||
83 | regex = re.compile( | 82 | regex = re.compile( |
84 | r'^(?:ssh|git|http|ftp)s?://' # http:// or https:// | 83 | r'^(?:ssh|git|http|ftp)s?://' # http:// or https:// |
85 | r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... | 84 | r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... |
@@ -1500,7 +1499,7 @@ class Layer_Version(models.Model): | |||
1500 | # code lifted, with adaptations, from the layerindex-web application | 1499 | # code lifted, with adaptations, from the layerindex-web application |
1501 | # https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/ | 1500 | # https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/ |
1502 | def _handle_url_path(self, base_url, path): | 1501 | def _handle_url_path(self, base_url, path): |
1503 | import re, posixpath | 1502 | import posixpath |
1504 | if base_url: | 1503 | if base_url: |
1505 | if self.dirpath: | 1504 | if self.dirpath: |
1506 | if path: | 1505 | if path: |
diff --git a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py index 393be75496..6953541ab5 100644 --- a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py +++ b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py | |||
@@ -27,7 +27,7 @@ from selenium.webdriver.common.by import By | |||
27 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities | 27 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities |
28 | from selenium.common.exceptions import NoSuchElementException, \ | 28 | from selenium.common.exceptions import NoSuchElementException, \ |
29 | StaleElementReferenceException, TimeoutException, \ | 29 | StaleElementReferenceException, TimeoutException, \ |
30 | SessionNotCreatedException | 30 | SessionNotCreatedException, WebDriverException |
31 | 31 | ||
32 | def create_selenium_driver(cls,browser='chrome'): | 32 | def create_selenium_driver(cls,browser='chrome'): |
33 | # set default browser string based on env (if available) | 33 | # set default browser string based on env (if available) |
@@ -90,7 +90,7 @@ class Wait(WebDriverWait): | |||
90 | Subclass of WebDriverWait with predetermined timeout and poll | 90 | Subclass of WebDriverWait with predetermined timeout and poll |
91 | frequency. Also deals with a wider variety of exceptions. | 91 | frequency. Also deals with a wider variety of exceptions. |
92 | """ | 92 | """ |
93 | _TIMEOUT = 10 | 93 | _TIMEOUT = 20 |
94 | _POLL_FREQUENCY = 0.5 | 94 | _POLL_FREQUENCY = 0.5 |
95 | 95 | ||
96 | def __init__(self, driver, timeout=_TIMEOUT, poll=_POLL_FREQUENCY): | 96 | def __init__(self, driver, timeout=_TIMEOUT, poll=_POLL_FREQUENCY): |
@@ -114,6 +114,9 @@ class Wait(WebDriverWait): | |||
114 | pass | 114 | pass |
115 | except StaleElementReferenceException: | 115 | except StaleElementReferenceException: |
116 | pass | 116 | pass |
117 | except WebDriverException: | ||
118 | # selenium.common.exceptions.WebDriverException: Message: unknown error: unhandled inspector error: {"code":-32000,"message":"Node with given id does not belong to the document"} | ||
119 | pass | ||
117 | 120 | ||
118 | time.sleep(self._poll) | 121 | time.sleep(self._poll) |
119 | if time.time() > end_time: | 122 | if time.time() > end_time: |
@@ -183,7 +186,7 @@ class SeleniumTestCaseBase(unittest.TestCase): | |||
183 | self.driver.get(abs_url) | 186 | self.driver.get(abs_url) |
184 | 187 | ||
185 | try: # Ensure page is loaded before proceeding | 188 | try: # Ensure page is loaded before proceeding |
186 | self.wait_until_visible("#global-nav", poll=3) | 189 | self.wait_until_visible("#global-nav") |
187 | except NoSuchElementException: | 190 | except NoSuchElementException: |
188 | self.driver.implicitly_wait(3) | 191 | self.driver.implicitly_wait(3) |
189 | except TimeoutException: | 192 | except TimeoutException: |
@@ -208,36 +211,43 @@ class SeleniumTestCaseBase(unittest.TestCase): | |||
208 | """ Return the element which currently has focus on the page """ | 211 | """ Return the element which currently has focus on the page """ |
209 | return self.driver.switch_to.active_element | 212 | return self.driver.switch_to.active_element |
210 | 213 | ||
211 | def wait_until_present(self, selector, poll=0.5): | 214 | def wait_until_present(self, selector, timeout=Wait._TIMEOUT): |
212 | """ Wait until element matching CSS selector is on the page """ | 215 | """ Wait until element matching CSS selector is on the page """ |
213 | is_present = lambda driver: self.find(selector) | 216 | is_present = lambda driver: self.find(selector) |
214 | msg = 'An element matching "%s" should be on the page' % selector | 217 | msg = 'An element matching "%s" should be on the page' % selector |
215 | element = Wait(self.driver, poll=poll).until(is_present, msg) | 218 | element = Wait(self.driver, timeout=timeout).until(is_present, msg) |
216 | if poll > 2: | ||
217 | time.sleep(poll) # element need more delay to be present | ||
218 | return element | 219 | return element |
219 | 220 | ||
220 | def wait_until_visible(self, selector, poll=1): | 221 | def wait_until_visible(self, selector, timeout=Wait._TIMEOUT): |
221 | """ Wait until element matching CSS selector is visible on the page """ | 222 | """ Wait until element matching CSS selector is visible on the page """ |
222 | is_visible = lambda driver: self.find(selector).is_displayed() | 223 | is_visible = lambda driver: self.find(selector).is_displayed() |
223 | msg = 'An element matching "%s" should be visible' % selector | 224 | msg = 'An element matching "%s" should be visible' % selector |
224 | Wait(self.driver, poll=poll).until(is_visible, msg) | 225 | Wait(self.driver, timeout=timeout).until(is_visible, msg) |
225 | time.sleep(poll) # wait for visibility to settle | 226 | return self.find(selector) |
227 | |||
228 | def wait_until_not_visible(self, selector, timeout=Wait._TIMEOUT): | ||
229 | """ Wait until element matching CSS selector is not visible on the page """ | ||
230 | is_visible = lambda driver: self.find(selector).is_displayed() | ||
231 | msg = 'An element matching "%s" should be visible' % selector | ||
232 | Wait(self.driver, timeout=timeout).until_not(is_visible, msg) | ||
226 | return self.find(selector) | 233 | return self.find(selector) |
227 | 234 | ||
228 | def wait_until_clickable(self, selector, poll=1): | 235 | def wait_until_clickable(self, selector, timeout=Wait._TIMEOUT): |
229 | """ Wait until element matching CSS selector is visible on the page """ | 236 | """ Wait until element matching CSS selector is visible on the page """ |
230 | WebDriverWait( | 237 | WebDriverWait(self.driver, timeout=timeout).until(lambda driver: self.driver.execute_script("return jQuery.active == 0")) |
231 | self.driver, | 238 | is_clickable = lambda driver: (self.find(selector).is_displayed() and self.find(selector).is_enabled()) |
232 | Wait._TIMEOUT, | 239 | msg = 'An element matching "%s" should be clickable' % selector |
233 | poll_frequency=poll | 240 | Wait(self.driver, timeout=timeout).until(is_clickable, msg) |
234 | ).until( | ||
235 | EC.element_to_be_clickable((By.ID, selector.removeprefix('#') | ||
236 | ) | ||
237 | ) | ||
238 | ) | ||
239 | return self.find(selector) | 241 | return self.find(selector) |
240 | 242 | ||
243 | def wait_until_element_clickable(self, finder, timeout=Wait._TIMEOUT): | ||
244 | """ Wait until element is clickable """ | ||
245 | WebDriverWait(self.driver, timeout=timeout).until(lambda driver: self.driver.execute_script("return jQuery.active == 0")) | ||
246 | is_clickable = lambda driver: (finder(driver).is_displayed() and finder(driver).is_enabled()) | ||
247 | msg = 'A matching element never became be clickable' | ||
248 | Wait(self.driver, timeout=timeout).until(is_clickable, msg) | ||
249 | return finder(self.driver) | ||
250 | |||
241 | def wait_until_focused(self, selector): | 251 | def wait_until_focused(self, selector): |
242 | """ Wait until element matching CSS selector has focus """ | 252 | """ Wait until element matching CSS selector has focus """ |
243 | is_focused = \ | 253 | is_focused = \ |
diff --git a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py index b9356a0344..9ab81fb11b 100644 --- a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py +++ b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py | |||
@@ -200,6 +200,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
200 | 200 | ||
201 | # should see a rebuild button for non-command-line builds | 201 | # should see a rebuild button for non-command-line builds |
202 | self.wait_until_visible('#allbuildstable tbody tr') | 202 | self.wait_until_visible('#allbuildstable tbody tr') |
203 | self.wait_until_visible('.rebuild-btn') | ||
203 | selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id | 204 | selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id |
204 | run_again_button = self.find_all(selector) | 205 | run_again_button = self.find_all(selector) |
205 | self.assertEqual(len(run_again_button), 1, | 206 | self.assertEqual(len(run_again_button), 1, |
@@ -224,7 +225,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
224 | 225 | ||
225 | url = reverse('all-builds') | 226 | url = reverse('all-builds') |
226 | self.get(url) | 227 | self.get(url) |
227 | self.wait_until_visible('#allbuildstable', poll=3) | 228 | self.wait_until_visible('#allbuildstable') |
228 | 229 | ||
229 | # get the project name cells from the table | 230 | # get the project name cells from the table |
230 | cells = self.find_all('#allbuildstable td[class="project"]') | 231 | cells = self.find_all('#allbuildstable td[class="project"]') |
@@ -257,7 +258,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
257 | 258 | ||
258 | url = reverse('all-builds') | 259 | url = reverse('all-builds') |
259 | self.get(url) | 260 | self.get(url) |
260 | self.wait_until_visible('#allbuildstable', poll=3) | 261 | self.wait_until_visible('#allbuildstable') |
261 | 262 | ||
262 | # test recent builds area for successful build | 263 | # test recent builds area for successful build |
263 | element = self._get_build_time_element(build1) | 264 | element = self._get_build_time_element(build1) |
@@ -452,7 +453,7 @@ class TestAllBuildsPage(SeleniumTestCase): | |||
452 | def test_show_rows(row_to_show, show_row_link): | 453 | def test_show_rows(row_to_show, show_row_link): |
453 | # Check that we can show rows == row_to_show | 454 | # Check that we can show rows == row_to_show |
454 | show_row_link.select_by_value(str(row_to_show)) | 455 | show_row_link.select_by_value(str(row_to_show)) |
455 | self.wait_until_visible('#allbuildstable tbody tr', poll=3) | 456 | self.wait_until_visible('#allbuildstable tbody tr') |
456 | # check at least some rows are visible | 457 | # check at least some rows are visible |
457 | self.assertTrue( | 458 | self.assertTrue( |
458 | len(self.find_all('#allbuildstable tbody tr')) > 0 | 459 | len(self.find_all('#allbuildstable tbody tr')) > 0 |
diff --git a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py index 9ed1901cc9..05e12892be 100644 --- a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py +++ b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py | |||
@@ -81,7 +81,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
81 | 81 | ||
82 | def _get_row_for_project(self, project_name): | 82 | def _get_row_for_project(self, project_name): |
83 | """ Get the HTML row for a project, or None if not found """ | 83 | """ Get the HTML row for a project, or None if not found """ |
84 | self.wait_until_visible('#projectstable tbody tr', poll=3) | 84 | self.wait_until_visible('#projectstable tbody tr') |
85 | rows = self.find_all('#projectstable tbody tr') | 85 | rows = self.find_all('#projectstable tbody tr') |
86 | 86 | ||
87 | # find the row with a project name matching the one supplied | 87 | # find the row with a project name matching the one supplied |
@@ -236,7 +236,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
236 | self.get(url) | 236 | self.get(url) |
237 | 237 | ||
238 | # Chseck search box is present and works | 238 | # Chseck search box is present and works |
239 | self.wait_until_visible('#projectstable tbody tr', poll=3) | 239 | self.wait_until_visible('#projectstable tbody tr') |
240 | search_box = self.find('#search-input-projectstable') | 240 | search_box = self.find('#search-input-projectstable') |
241 | self.assertTrue(search_box.is_displayed()) | 241 | self.assertTrue(search_box.is_displayed()) |
242 | 242 | ||
@@ -244,7 +244,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
244 | search_box.send_keys('test project 10') | 244 | search_box.send_keys('test project 10') |
245 | search_btn = self.find('#search-submit-projectstable') | 245 | search_btn = self.find('#search-submit-projectstable') |
246 | search_btn.click() | 246 | search_btn.click() |
247 | self.wait_until_visible('#projectstable tbody tr', poll=3) | 247 | self.wait_until_visible('#projectstable tbody tr') |
248 | rows = self.find_all('#projectstable tbody tr') | 248 | rows = self.find_all('#projectstable tbody tr') |
249 | self.assertTrue(len(rows) == 1) | 249 | self.assertTrue(len(rows) == 1) |
250 | 250 | ||
@@ -290,7 +290,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
290 | ) | 290 | ) |
291 | url = reverse('all-projects') | 291 | url = reverse('all-projects') |
292 | self.get(url) | 292 | self.get(url) |
293 | self.wait_until_visible('#projectstable tbody tr', poll=3) | 293 | self.wait_until_visible('#projectstable tbody tr') |
294 | 294 | ||
295 | # Check edit column | 295 | # Check edit column |
296 | edit_column = self.find('#edit-columns-button') | 296 | edit_column = self.find('#edit-columns-button') |
@@ -313,7 +313,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
313 | def test_show_rows(row_to_show, show_row_link): | 313 | def test_show_rows(row_to_show, show_row_link): |
314 | # Check that we can show rows == row_to_show | 314 | # Check that we can show rows == row_to_show |
315 | show_row_link.select_by_value(str(row_to_show)) | 315 | show_row_link.select_by_value(str(row_to_show)) |
316 | self.wait_until_visible('#projectstable tbody tr', poll=3) | 316 | self.wait_until_visible('#projectstable tbody tr') |
317 | # check at least some rows are visible | 317 | # check at least some rows are visible |
318 | self.assertTrue( | 318 | self.assertTrue( |
319 | len(self.find_all('#projectstable tbody tr')) > 0 | 319 | len(self.find_all('#projectstable tbody tr')) > 0 |
@@ -321,7 +321,7 @@ class TestAllProjectsPage(SeleniumTestCase): | |||
321 | 321 | ||
322 | url = reverse('all-projects') | 322 | url = reverse('all-projects') |
323 | self.get(url) | 323 | self.get(url) |
324 | self.wait_until_visible('#projectstable tbody tr', poll=3) | 324 | self.wait_until_visible('#projectstable tbody tr') |
325 | 325 | ||
326 | show_rows = self.driver.find_elements( | 326 | show_rows = self.driver.find_elements( |
327 | By.XPATH, | 327 | By.XPATH, |
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py index d838ce363a..82367108e2 100644 --- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py +++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py | |||
@@ -162,7 +162,7 @@ class TestBuildDashboardPage(SeleniumTestCase): | |||
162 | """ | 162 | """ |
163 | url = reverse('builddashboard', args=(build.id,)) | 163 | url = reverse('builddashboard', args=(build.id,)) |
164 | self.get(url) | 164 | self.get(url) |
165 | self.wait_until_visible('#global-nav', poll=3) | 165 | self.wait_until_visible('#global-nav') |
166 | 166 | ||
167 | def _get_build_dashboard_errors(self, build): | 167 | def _get_build_dashboard_errors(self, build): |
168 | """ | 168 | """ |
diff --git a/bitbake/lib/toaster/tests/browser/test_landing_page.py b/bitbake/lib/toaster/tests/browser/test_landing_page.py index 8fe5fea467..210359d561 100644 --- a/bitbake/lib/toaster/tests/browser/test_landing_page.py +++ b/bitbake/lib/toaster/tests/browser/test_landing_page.py | |||
@@ -34,6 +34,7 @@ class TestLandingPage(SeleniumTestCase): | |||
34 | def test_icon_info_visible_and_clickable(self): | 34 | def test_icon_info_visible_and_clickable(self): |
35 | """ Test that the information icon is visible and clickable """ | 35 | """ Test that the information icon is visible and clickable """ |
36 | self.get(reverse('landing')) | 36 | self.get(reverse('landing')) |
37 | self.wait_until_visible('#toaster-version-info-sign') | ||
37 | info_sign = self.find('#toaster-version-info-sign') | 38 | info_sign = self.find('#toaster-version-info-sign') |
38 | 39 | ||
39 | # check that the info sign is visible | 40 | # check that the info sign is visible |
@@ -43,6 +44,7 @@ class TestLandingPage(SeleniumTestCase): | |||
43 | # and info modal is appearing when clicking on the info sign | 44 | # and info modal is appearing when clicking on the info sign |
44 | info_sign.click() # click on the info sign make attribute 'aria-describedby' visible | 45 | info_sign.click() # click on the info sign make attribute 'aria-describedby' visible |
45 | info_model_id = info_sign.get_attribute('aria-describedby') | 46 | info_model_id = info_sign.get_attribute('aria-describedby') |
47 | self.wait_until_visible(f'#{info_model_id}') | ||
46 | info_modal = self.find(f'#{info_model_id}') | 48 | info_modal = self.find(f'#{info_model_id}') |
47 | self.assertTrue(info_modal.is_displayed()) | 49 | self.assertTrue(info_modal.is_displayed()) |
48 | self.assertTrue("Toaster version information" in info_modal.text) | 50 | self.assertTrue("Toaster version information" in info_modal.text) |
@@ -50,6 +52,7 @@ class TestLandingPage(SeleniumTestCase): | |||
50 | def test_documentation_link_displayed(self): | 52 | def test_documentation_link_displayed(self): |
51 | """ Test that the documentation link is displayed """ | 53 | """ Test that the documentation link is displayed """ |
52 | self.get(reverse('landing')) | 54 | self.get(reverse('landing')) |
55 | self.wait_until_visible('#navbar-docs') | ||
53 | documentation_link = self.find('#navbar-docs > a') | 56 | documentation_link = self.find('#navbar-docs > a') |
54 | 57 | ||
55 | # check that the documentation link is visible | 58 | # check that the documentation link is visible |
@@ -65,6 +68,7 @@ class TestLandingPage(SeleniumTestCase): | |||
65 | def test_openembedded_jumbotron_link_visible_and_clickable(self): | 68 | def test_openembedded_jumbotron_link_visible_and_clickable(self): |
66 | """ Test OpenEmbedded link jumbotron is visible and clickable: """ | 69 | """ Test OpenEmbedded link jumbotron is visible and clickable: """ |
67 | self.get(reverse('landing')) | 70 | self.get(reverse('landing')) |
71 | self.wait_until_visible('.jumbotron') | ||
68 | jumbotron = self.find('.jumbotron') | 72 | jumbotron = self.find('.jumbotron') |
69 | 73 | ||
70 | # check OpenEmbedded | 74 | # check OpenEmbedded |
@@ -76,6 +80,7 @@ class TestLandingPage(SeleniumTestCase): | |||
76 | def test_bitbake_jumbotron_link_visible_and_clickable(self): | 80 | def test_bitbake_jumbotron_link_visible_and_clickable(self): |
77 | """ Test BitBake link jumbotron is visible and clickable: """ | 81 | """ Test BitBake link jumbotron is visible and clickable: """ |
78 | self.get(reverse('landing')) | 82 | self.get(reverse('landing')) |
83 | self.wait_until_visible('.jumbotron') | ||
79 | jumbotron = self.find('.jumbotron') | 84 | jumbotron = self.find('.jumbotron') |
80 | 85 | ||
81 | # check BitBake | 86 | # check BitBake |
@@ -88,6 +93,7 @@ class TestLandingPage(SeleniumTestCase): | |||
88 | def test_yoctoproject_jumbotron_link_visible_and_clickable(self): | 93 | def test_yoctoproject_jumbotron_link_visible_and_clickable(self): |
89 | """ Test Yocto Project link jumbotron is visible and clickable: """ | 94 | """ Test Yocto Project link jumbotron is visible and clickable: """ |
90 | self.get(reverse('landing')) | 95 | self.get(reverse('landing')) |
96 | self.wait_until_visible('.jumbotron') | ||
91 | jumbotron = self.find('.jumbotron') | 97 | jumbotron = self.find('.jumbotron') |
92 | 98 | ||
93 | # check Yocto Project | 99 | # check Yocto Project |
@@ -101,6 +107,7 @@ class TestLandingPage(SeleniumTestCase): | |||
101 | if visible and clickable | 107 | if visible and clickable |
102 | """ | 108 | """ |
103 | self.get(reverse('landing')) | 109 | self.get(reverse('landing')) |
110 | self.wait_until_visible('.jumbotron') | ||
104 | jumbotron = self.find('.jumbotron') | 111 | jumbotron = self.find('.jumbotron') |
105 | 112 | ||
106 | # check Big magenta button | 113 | # check Big magenta button |
@@ -119,6 +126,7 @@ class TestLandingPage(SeleniumTestCase): | |||
119 | Layer_Version.objects.create(layer=layer) | 126 | Layer_Version.objects.create(layer=layer) |
120 | 127 | ||
121 | self.get(reverse('landing')) | 128 | self.get(reverse('landing')) |
129 | self.wait_until_visible('.jumbotron') | ||
122 | jumbotron = self.find('.jumbotron') | 130 | jumbotron = self.find('.jumbotron') |
123 | 131 | ||
124 | # check Big Blue button | 132 | # check Big Blue button |
@@ -132,6 +140,7 @@ class TestLandingPage(SeleniumTestCase): | |||
132 | def test_toaster_manual_link_visible_and_clickable(self): | 140 | def test_toaster_manual_link_visible_and_clickable(self): |
133 | """ Test Read the Toaster manual link jumbotron is visible and clickable: """ | 141 | """ Test Read the Toaster manual link jumbotron is visible and clickable: """ |
134 | self.get(reverse('landing')) | 142 | self.get(reverse('landing')) |
143 | self.wait_until_visible('.jumbotron') | ||
135 | jumbotron = self.find('.jumbotron') | 144 | jumbotron = self.find('.jumbotron') |
136 | 145 | ||
137 | # check Read the Toaster manual | 146 | # check Read the Toaster manual |
@@ -145,6 +154,7 @@ class TestLandingPage(SeleniumTestCase): | |||
145 | def test_contrib_to_toaster_link_visible_and_clickable(self): | 154 | def test_contrib_to_toaster_link_visible_and_clickable(self): |
146 | """ Test Contribute to Toaster link jumbotron is visible and clickable: """ | 155 | """ Test Contribute to Toaster link jumbotron is visible and clickable: """ |
147 | self.get(reverse('landing')) | 156 | self.get(reverse('landing')) |
157 | self.wait_until_visible('.jumbotron') | ||
148 | jumbotron = self.find('.jumbotron') | 158 | jumbotron = self.find('.jumbotron') |
149 | 159 | ||
150 | # check Contribute to Toaster | 160 | # check Contribute to Toaster |
@@ -161,6 +171,7 @@ class TestLandingPage(SeleniumTestCase): | |||
161 | => should see the landing page | 171 | => should see the landing page |
162 | """ | 172 | """ |
163 | self.get(reverse('landing')) | 173 | self.get(reverse('landing')) |
174 | self.wait_until_visible('.jumbotron') | ||
164 | self.assertTrue(self.LANDING_PAGE_TITLE in self.get_page_source()) | 175 | self.assertTrue(self.LANDING_PAGE_TITLE in self.get_page_source()) |
165 | 176 | ||
166 | def test_default_project_has_build(self): | 177 | def test_default_project_has_build(self): |
@@ -193,6 +204,7 @@ class TestLandingPage(SeleniumTestCase): | |||
193 | user_project.save() | 204 | user_project.save() |
194 | 205 | ||
195 | self.get(reverse('landing')) | 206 | self.get(reverse('landing')) |
207 | self.wait_until_visible('#projectstable') | ||
196 | 208 | ||
197 | elements = self.find_all('#projectstable') | 209 | elements = self.find_all('#projectstable') |
198 | self.assertEqual(len(elements), 1, 'should redirect to projects') | 210 | self.assertEqual(len(elements), 1, 'should redirect to projects') |
@@ -213,7 +225,7 @@ class TestLandingPage(SeleniumTestCase): | |||
213 | 225 | ||
214 | self.get(reverse('landing')) | 226 | self.get(reverse('landing')) |
215 | 227 | ||
216 | self.wait_until_visible("#latest-builds", poll=3) | 228 | self.wait_until_visible("#latest-builds") |
217 | elements = self.find_all('#allbuildstable') | 229 | elements = self.find_all('#allbuildstable') |
218 | self.assertEqual(len(elements), 1, 'should redirect to builds') | 230 | self.assertEqual(len(elements), 1, 'should redirect to builds') |
219 | content = self.get_page_source() | 231 | content = self.get_page_source() |
diff --git a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py index 5c29548b78..6abfdef699 100644 --- a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py +++ b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py | |||
@@ -64,7 +64,7 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
64 | args=(self.project.pk, | 64 | args=(self.project.pk, |
65 | self.imported_layer_version.pk)) | 65 | self.imported_layer_version.pk)) |
66 | 66 | ||
67 | def _edit_layerdetails(self): | 67 | def test_edit_layerdetails_page(self): |
68 | """ Edit all the editable fields for the layer refresh the page and | 68 | """ Edit all the editable fields for the layer refresh the page and |
69 | check that the new values exist""" | 69 | check that the new values exist""" |
70 | 70 | ||
@@ -100,24 +100,19 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
100 | (self.initial_values, value)) | 100 | (self.initial_values, value)) |
101 | 101 | ||
102 | # Make sure the input visible beofre sending keys | 102 | # Make sure the input visible beofre sending keys |
103 | self.wait_until_visible("#layer-git input[type=text]") | 103 | self.wait_until_clickable("#layer-git input[type=text]") |
104 | inputs.send_keys("-edited") | 104 | inputs.send_keys("-edited") |
105 | 105 | ||
106 | # Save the new values | 106 | # Save the new values |
107 | for save_btn in self.find_all(".change-btn"): | 107 | for save_btn in self.find_all(".change-btn"): |
108 | save_btn.click() | 108 | save_btn.click() |
109 | 109 | ||
110 | try: | 110 | self.wait_until_visible("#save-changes-for-switch") |
111 | self.wait_until_visible("#save-changes-for-switch", poll=3) | 111 | # Ensure scrolled into view |
112 | btn_save_chg_for_switch = self.wait_until_clickable( | 112 | self.driver.execute_script('window.scrollTo({behavior: "instant", top: 0, left: 0})') |
113 | "#save-changes-for-switch", poll=3) | 113 | btn_save_chg_for_switch = self.wait_until_clickable( |
114 | btn_save_chg_for_switch.click() | 114 | "#save-changes-for-switch") |
115 | except ElementClickInterceptedException: | 115 | btn_save_chg_for_switch.click() |
116 | self.skipTest( | ||
117 | "save-changes-for-switch click intercepted. Element not visible or maybe covered by another element.") | ||
118 | except TimeoutException: | ||
119 | self.skipTest( | ||
120 | "save-changes-for-switch is not clickable within the specified timeout.") | ||
121 | 116 | ||
122 | self.wait_until_visible("#edit-layer-source") | 117 | self.wait_until_visible("#edit-layer-source") |
123 | 118 | ||
@@ -147,17 +142,10 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
147 | new_dir = "/home/test/my-meta-dir" | 142 | new_dir = "/home/test/my-meta-dir" |
148 | dir_input.send_keys(new_dir) | 143 | dir_input.send_keys(new_dir) |
149 | 144 | ||
150 | try: | 145 | self.wait_until_visible("#save-changes-for-switch") |
151 | self.wait_until_visible("#save-changes-for-switch", poll=3) | 146 | btn_save_chg_for_switch = self.wait_until_clickable( |
152 | btn_save_chg_for_switch = self.wait_until_clickable( | 147 | "#save-changes-for-switch") |
153 | "#save-changes-for-switch", poll=3) | 148 | btn_save_chg_for_switch.click() |
154 | btn_save_chg_for_switch.click() | ||
155 | except ElementClickInterceptedException: | ||
156 | self.skipTest( | ||
157 | "save-changes-for-switch click intercepted. Element not properly visible or maybe behind another element.") | ||
158 | except TimeoutException: | ||
159 | self.skipTest( | ||
160 | "save-changes-for-switch is not clickable within the specified timeout.") | ||
161 | 149 | ||
162 | self.wait_until_visible("#edit-layer-source") | 150 | self.wait_until_visible("#edit-layer-source") |
163 | 151 | ||
@@ -168,12 +156,6 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
168 | "Expected %s in the dir value for layer directory" % | 156 | "Expected %s in the dir value for layer directory" % |
169 | new_dir) | 157 | new_dir) |
170 | 158 | ||
171 | def test_edit_layerdetails_page(self): | ||
172 | try: | ||
173 | self._edit_layerdetails() | ||
174 | except ElementClickInterceptedException: | ||
175 | self.skipTest( | ||
176 | "ElementClickInterceptedException occured. Element not visible or maybe covered by another element.") | ||
177 | 159 | ||
178 | def test_delete_layer(self): | 160 | def test_delete_layer(self): |
179 | """ Delete the layer """ | 161 | """ Delete the layer """ |
@@ -211,6 +193,7 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
211 | self.get(self.url) | 193 | self.get(self.url) |
212 | 194 | ||
213 | # Add the layer | 195 | # Add the layer |
196 | self.wait_until_clickable("#add-remove-layer-btn") | ||
214 | self.click("#add-remove-layer-btn") | 197 | self.click("#add-remove-layer-btn") |
215 | 198 | ||
216 | notification = self.wait_until_visible("#change-notification-msg") | 199 | notification = self.wait_until_visible("#change-notification-msg") |
@@ -218,12 +201,17 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
218 | expected_text = "You have added 1 layer to your project: %s" % \ | 201 | expected_text = "You have added 1 layer to your project: %s" % \ |
219 | self.imported_layer_version.layer.name | 202 | self.imported_layer_version.layer.name |
220 | 203 | ||
221 | self.assertTrue(expected_text in notification.text, | 204 | self.assertIn(expected_text, notification.text, |
222 | "Expected notification text %s not found was " | 205 | "Expected notification text %s not found was " |
223 | " \"%s\" instead" % | 206 | " \"%s\" instead" % |
224 | (expected_text, notification.text)) | 207 | (expected_text, notification.text)) |
225 | 208 | ||
209 | hide_button = self.find('#hide-alert') | ||
210 | hide_button.click() | ||
211 | self.wait_until_not_visible('#change-notification') | ||
212 | |||
226 | # Remove the layer | 213 | # Remove the layer |
214 | self.wait_until_clickable("#add-remove-layer-btn") | ||
227 | self.click("#add-remove-layer-btn") | 215 | self.click("#add-remove-layer-btn") |
228 | 216 | ||
229 | notification = self.wait_until_visible("#change-notification-msg") | 217 | notification = self.wait_until_visible("#change-notification-msg") |
@@ -231,7 +219,7 @@ class TestLayerDetailsPage(SeleniumTestCase): | |||
231 | expected_text = "You have removed 1 layer from your project: %s" % \ | 219 | expected_text = "You have removed 1 layer from your project: %s" % \ |
232 | self.imported_layer_version.layer.name | 220 | self.imported_layer_version.layer.name |
233 | 221 | ||
234 | self.assertTrue(expected_text in notification.text, | 222 | self.assertIn(expected_text, notification.text, |
235 | "Expected notification text %s not found was " | 223 | "Expected notification text %s not found was " |
236 | " \"%s\" instead" % | 224 | " \"%s\" instead" % |
237 | (expected_text, notification.text)) | 225 | (expected_text, notification.text)) |
diff --git a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py index 9f0b6397fe..bf0304dbec 100644 --- a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py +++ b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py | |||
@@ -90,7 +90,7 @@ class TestNewCustomImagePage(SeleniumTestCase): | |||
90 | """ | 90 | """ |
91 | url = reverse('newcustomimage', args=(self.project.id,)) | 91 | url = reverse('newcustomimage', args=(self.project.id,)) |
92 | self.get(url) | 92 | self.get(url) |
93 | self.wait_until_visible('#global-nav', poll=3) | 93 | self.wait_until_visible('#global-nav') |
94 | 94 | ||
95 | self.click('button[data-recipe="%s"]' % self.recipe.id) | 95 | self.click('button[data-recipe="%s"]' % self.recipe.id) |
96 | 96 | ||
diff --git a/bitbake/lib/toaster/tests/browser/test_new_project_page.py b/bitbake/lib/toaster/tests/browser/test_new_project_page.py index 458bb6538d..e50f236c32 100644 --- a/bitbake/lib/toaster/tests/browser/test_new_project_page.py +++ b/bitbake/lib/toaster/tests/browser/test_new_project_page.py | |||
@@ -47,7 +47,7 @@ class TestNewProjectPage(SeleniumTestCase): | |||
47 | 47 | ||
48 | url = reverse('newproject') | 48 | url = reverse('newproject') |
49 | self.get(url) | 49 | self.get(url) |
50 | self.wait_until_visible('#new-project-name', poll=3) | 50 | self.wait_until_visible('#new-project-name') |
51 | self.enter_text('#new-project-name', project_name) | 51 | self.enter_text('#new-project-name', project_name) |
52 | 52 | ||
53 | select = Select(self.find('#projectversion')) | 53 | select = Select(self.find('#projectversion')) |
@@ -58,7 +58,7 @@ class TestNewProjectPage(SeleniumTestCase): | |||
58 | # We should get redirected to the new project's page with the | 58 | # We should get redirected to the new project's page with the |
59 | # notification at the top | 59 | # notification at the top |
60 | element = self.wait_until_visible( | 60 | element = self.wait_until_visible( |
61 | '#project-created-notification', poll=3) | 61 | '#project-created-notification') |
62 | 62 | ||
63 | self.assertTrue(project_name in element.text, | 63 | self.assertTrue(project_name in element.text, |
64 | "New project name not in new project notification") | 64 | "New project name not in new project notification") |
@@ -79,7 +79,7 @@ class TestNewProjectPage(SeleniumTestCase): | |||
79 | 79 | ||
80 | url = reverse('newproject') | 80 | url = reverse('newproject') |
81 | self.get(url) | 81 | self.get(url) |
82 | self.wait_until_visible('#new-project-name', poll=3) | 82 | self.wait_until_visible('#new-project-name') |
83 | 83 | ||
84 | self.enter_text('#new-project-name', project_name) | 84 | self.enter_text('#new-project-name', project_name) |
85 | 85 | ||
@@ -89,12 +89,10 @@ class TestNewProjectPage(SeleniumTestCase): | |||
89 | radio = self.driver.find_element(By.ID, 'type-new') | 89 | radio = self.driver.find_element(By.ID, 'type-new') |
90 | radio.click() | 90 | radio.click() |
91 | 91 | ||
92 | self.click("#create-project-button") | 92 | self.wait_until_visible('#hint-error-project-name') |
93 | |||
94 | self.wait_until_present('#hint-error-project-name', poll=3) | ||
95 | element = self.find('#hint-error-project-name') | 93 | element = self.find('#hint-error-project-name') |
96 | 94 | ||
97 | self.assertTrue(("Project names must be unique" in element.text), | 95 | self.assertIn("Project names must be unique", element.text, |
98 | "Did not find unique project name error message") | 96 | "Did not find unique project name error message") |
99 | 97 | ||
100 | # Try and click it anyway, if it submits we'll have a new project in | 98 | # Try and click it anyway, if it submits we'll have a new project in |
diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py b/bitbake/lib/toaster/tests/builds/buildtest.py index cacfccd4d3..e54d561334 100644 --- a/bitbake/lib/toaster/tests/builds/buildtest.py +++ b/bitbake/lib/toaster/tests/builds/buildtest.py | |||
@@ -128,7 +128,7 @@ class BuildTest(unittest.TestCase): | |||
128 | if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): | 128 | if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): |
129 | ProjectVariable.objects.get_or_create( | 129 | ProjectVariable.objects.get_or_create( |
130 | name="SSTATE_MIRRORS", | 130 | name="SSTATE_MIRRORS", |
131 | value="file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH", | 131 | value="file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH", |
132 | project=project) | 132 | project=project) |
133 | 133 | ||
134 | ProjectTarget.objects.create(project=project, | 134 | ProjectTarget.objects.create(project=project, |
diff --git a/bitbake/lib/toaster/tests/functional/functional_helpers.py b/bitbake/lib/toaster/tests/functional/functional_helpers.py index 7c20437d14..e28f2024f5 100644 --- a/bitbake/lib/toaster/tests/functional/functional_helpers.py +++ b/bitbake/lib/toaster/tests/functional/functional_helpers.py | |||
@@ -12,9 +12,12 @@ import logging | |||
12 | import subprocess | 12 | import subprocess |
13 | import signal | 13 | import signal |
14 | import re | 14 | import re |
15 | import requests | ||
15 | 16 | ||
17 | from django.urls import reverse | ||
16 | from tests.browser.selenium_helpers_base import SeleniumTestCaseBase | 18 | from tests.browser.selenium_helpers_base import SeleniumTestCaseBase |
17 | from selenium.webdriver.common.by import By | 19 | from selenium.webdriver.common.by import By |
20 | from selenium.webdriver.support.select import Select | ||
18 | from selenium.common.exceptions import NoSuchElementException | 21 | from selenium.common.exceptions import NoSuchElementException |
19 | 22 | ||
20 | logger = logging.getLogger("toaster") | 23 | logger = logging.getLogger("toaster") |
@@ -136,3 +139,86 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase): | |||
136 | except NoSuchElementException: | 139 | except NoSuchElementException: |
137 | return False | 140 | return False |
138 | return element | 141 | return element |
142 | |||
143 | def create_new_project( | ||
144 | self, | ||
145 | project_name, | ||
146 | release, | ||
147 | release_title, | ||
148 | merge_toaster_settings, | ||
149 | ): | ||
150 | """ Create/Test new project using: | ||
151 | - Project Name: Any string | ||
152 | - Release: Any string | ||
153 | - Merge Toaster settings: True or False | ||
154 | """ | ||
155 | |||
156 | # Obtain a CSRF token from a suitable URL | ||
157 | projs = requests.get(self.live_server_url + reverse('newproject')) | ||
158 | csrftoken = projs.cookies.get('csrftoken') | ||
159 | |||
160 | # Use the projects typeahead to find out if the project already exists | ||
161 | req = requests.get(self.live_server_url + reverse('xhr_projectstypeahead'), {'search': project_name, 'format' : 'json'}) | ||
162 | data = req.json() | ||
163 | # Delete any existing projects | ||
164 | for result in data['results']: | ||
165 | del_url = reverse('xhr_project', args=(result['id'],)) | ||
166 | del_response = requests.delete(self.live_server_url + del_url, cookies={'csrftoken': csrftoken}, headers={'X-CSRFToken': csrftoken}) | ||
167 | self.assertEqual(del_response.status_code, 200) | ||
168 | |||
169 | self.get(reverse('newproject')) | ||
170 | self.wait_until_visible('#new-project-name') | ||
171 | self.driver.find_element(By.ID, | ||
172 | "new-project-name").send_keys(project_name) | ||
173 | |||
174 | select = Select(self.find('#projectversion')) | ||
175 | select.select_by_value(release) | ||
176 | |||
177 | # check merge toaster settings | ||
178 | checkbox = self.find('.checkbox-mergeattr') | ||
179 | if merge_toaster_settings: | ||
180 | if not checkbox.is_selected(): | ||
181 | checkbox.click() | ||
182 | else: | ||
183 | if checkbox.is_selected(): | ||
184 | checkbox.click() | ||
185 | |||
186 | self.wait_until_clickable('#create-project-button') | ||
187 | |||
188 | self.driver.find_element(By.ID, "create-project-button").click() | ||
189 | |||
190 | element = self.wait_until_visible('#project-created-notification') | ||
191 | self.assertTrue( | ||
192 | self.element_exists('#project-created-notification'), | ||
193 | f"Project:{project_name} creation notification not shown" | ||
194 | ) | ||
195 | self.assertTrue( | ||
196 | project_name in element.text, | ||
197 | f"New project name:{project_name} not in new project notification" | ||
198 | ) | ||
199 | |||
200 | # Use the projects typeahead again to check the project now exists | ||
201 | req = requests.get(self.live_server_url + reverse('xhr_projectstypeahead'), {'search': project_name, 'format' : 'json'}) | ||
202 | data = req.json() | ||
203 | self.assertGreater(len(data['results']), 0, f"New project:{project_name} not found in database") | ||
204 | |||
205 | project_id = data['results'][0]['id'] | ||
206 | |||
207 | self.wait_until_visible('#project-release-title') | ||
208 | |||
209 | # check release | ||
210 | if release_title is not None: | ||
211 | self.assertTrue(re.search( | ||
212 | release_title, | ||
213 | self.driver.find_element(By.XPATH, | ||
214 | "//span[@id='project-release-title']" | ||
215 | ).text), | ||
216 | 'The project release is not defined') | ||
217 | |||
218 | return project_id | ||
219 | |||
220 | def load_projects_page_helper(self): | ||
221 | self.wait_until_present('#projectstable') | ||
222 | # Need to wait for some data in the table too | ||
223 | self.wait_until_present('td[class="updated"]') | ||
224 | |||
diff --git a/bitbake/lib/toaster/tests/functional/test_create_new_project.py b/bitbake/lib/toaster/tests/functional/test_create_new_project.py index 94d90459e1..66213c736e 100644 --- a/bitbake/lib/toaster/tests/functional/test_create_new_project.py +++ b/bitbake/lib/toaster/tests/functional/test_create_new_project.py | |||
@@ -11,67 +11,10 @@ import pytest | |||
11 | from django.urls import reverse | 11 | from django.urls import reverse |
12 | from selenium.webdriver.support.select import Select | 12 | from selenium.webdriver.support.select import Select |
13 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | 13 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase |
14 | from orm.models import Project | ||
15 | from selenium.webdriver.common.by import By | 14 | from selenium.webdriver.common.by import By |
16 | 15 | ||
17 | |||
18 | @pytest.mark.django_db | ||
19 | @pytest.mark.order("last") | ||
20 | class TestCreateNewProject(SeleniumFunctionalTestCase): | 16 | class TestCreateNewProject(SeleniumFunctionalTestCase): |
21 | 17 | ||
22 | def _create_test_new_project( | ||
23 | self, | ||
24 | project_name, | ||
25 | release, | ||
26 | release_title, | ||
27 | merge_toaster_settings, | ||
28 | ): | ||
29 | """ Create/Test new project using: | ||
30 | - Project Name: Any string | ||
31 | - Release: Any string | ||
32 | - Merge Toaster settings: True or False | ||
33 | """ | ||
34 | self.get(reverse('newproject')) | ||
35 | self.wait_until_visible('#new-project-name', poll=3) | ||
36 | self.driver.find_element(By.ID, | ||
37 | "new-project-name").send_keys(project_name) | ||
38 | |||
39 | select = Select(self.find('#projectversion')) | ||
40 | select.select_by_value(release) | ||
41 | |||
42 | # check merge toaster settings | ||
43 | checkbox = self.find('.checkbox-mergeattr') | ||
44 | if merge_toaster_settings: | ||
45 | if not checkbox.is_selected(): | ||
46 | checkbox.click() | ||
47 | else: | ||
48 | if checkbox.is_selected(): | ||
49 | checkbox.click() | ||
50 | |||
51 | self.driver.find_element(By.ID, "create-project-button").click() | ||
52 | |||
53 | element = self.wait_until_visible('#project-created-notification', poll=3) | ||
54 | self.assertTrue( | ||
55 | self.element_exists('#project-created-notification'), | ||
56 | f"Project:{project_name} creation notification not shown" | ||
57 | ) | ||
58 | self.assertTrue( | ||
59 | project_name in element.text, | ||
60 | f"New project name:{project_name} not in new project notification" | ||
61 | ) | ||
62 | self.assertTrue( | ||
63 | Project.objects.filter(name=project_name).count(), | ||
64 | f"New project:{project_name} not found in database" | ||
65 | ) | ||
66 | |||
67 | # check release | ||
68 | self.assertTrue(re.search( | ||
69 | release_title, | ||
70 | self.driver.find_element(By.XPATH, | ||
71 | "//span[@id='project-release-title']" | ||
72 | ).text), | ||
73 | 'The project release is not defined') | ||
74 | |||
75 | def test_create_new_project_master(self): | 18 | def test_create_new_project_master(self): |
76 | """ Test create new project using: | 19 | """ Test create new project using: |
77 | - Project Name: Any string | 20 | - Project Name: Any string |
@@ -81,43 +24,43 @@ class TestCreateNewProject(SeleniumFunctionalTestCase): | |||
81 | release = '3' | 24 | release = '3' |
82 | release_title = 'Yocto Project master' | 25 | release_title = 'Yocto Project master' |
83 | project_name = 'projectmaster' | 26 | project_name = 'projectmaster' |
84 | self._create_test_new_project( | 27 | self.create_new_project( |
85 | project_name, | 28 | project_name, |
86 | release, | 29 | release, |
87 | release_title, | 30 | release_title, |
88 | False, | 31 | False, |
89 | ) | 32 | ) |
90 | 33 | ||
91 | def test_create_new_project_kirkstone(self): | 34 | def test_create_new_project_scarthgap(self): |
92 | """ Test create new project using: | 35 | """ Test create new project using: |
93 | - Project Name: Any string | 36 | - Project Name: Any string |
94 | - Release: Yocto Project 4.0 "Kirkstone" (option value: 1) | 37 | - Release: Yocto Project 5.0 "Scarthgap" (option value: 1) |
95 | - Merge Toaster settings: True | 38 | - Merge Toaster settings: True |
96 | """ | 39 | """ |
97 | release = '1' | 40 | release = '1' |
98 | release_title = 'Yocto Project 4.0 "Kirkstone"' | 41 | release_title = 'Yocto Project 5.0 "Scarthgap"' |
99 | project_name = 'projectkirkstone' | 42 | project_name = 'projectscarthgap' |
100 | self._create_test_new_project( | 43 | self.create_new_project( |
101 | project_name, | 44 | project_name, |
102 | release, | 45 | release, |
103 | release_title, | 46 | release_title, |
104 | True, | 47 | True, |
105 | ) | 48 | ) |
106 | 49 | ||
107 | def test_create_new_project_dunfell(self): | 50 | def test_create_new_project_kirkstone(self): |
108 | """ Test create new project using: | 51 | """ Test create new project using: |
109 | - Project Name: Any string | 52 | - Project Name: Any string |
110 | - Release: Yocto Project 3.1 "Dunfell" (option value: 5) | 53 | - Release: Yocto Project 4.0 "Kirkstone" (option value: 6) |
111 | - Merge Toaster settings: False | 54 | - Merge Toaster settings: True |
112 | """ | 55 | """ |
113 | release = '5' | 56 | release = '7' |
114 | release_title = 'Yocto Project 3.1 "Dunfell"' | 57 | release_title = 'Yocto Project 4.0 "Kirkstone"' |
115 | project_name = 'projectdunfell' | 58 | project_name = 'projectkirkstone' |
116 | self._create_test_new_project( | 59 | self.create_new_project( |
117 | project_name, | 60 | project_name, |
118 | release, | 61 | release, |
119 | release_title, | 62 | release_title, |
120 | False, | 63 | True, |
121 | ) | 64 | ) |
122 | 65 | ||
123 | def test_create_new_project_local(self): | 66 | def test_create_new_project_local(self): |
@@ -129,7 +72,7 @@ class TestCreateNewProject(SeleniumFunctionalTestCase): | |||
129 | release = '2' | 72 | release = '2' |
130 | release_title = 'Local Yocto Project' | 73 | release_title = 'Local Yocto Project' |
131 | project_name = 'projectlocal' | 74 | project_name = 'projectlocal' |
132 | self._create_test_new_project( | 75 | self.create_new_project( |
133 | project_name, | 76 | project_name, |
134 | release, | 77 | release, |
135 | release_title, | 78 | release_title, |
@@ -172,8 +115,10 @@ class TestCreateNewProject(SeleniumFunctionalTestCase): | |||
172 | "import-project-dir").send_keys(wrong_path) | 115 | "import-project-dir").send_keys(wrong_path) |
173 | self.driver.find_element(By.ID, "create-project-button").click() | 116 | self.driver.find_element(By.ID, "create-project-button").click() |
174 | 117 | ||
118 | self.wait_until_visible('.alert-danger') | ||
119 | |||
175 | # check error message | 120 | # check error message |
176 | self.assertTrue(self.element_exists('.alert-danger'), | 121 | self.assertTrue(self.element_exists('.alert-danger'), |
177 | 'Allert message not shown') | 122 | 'Alert message not shown') |
178 | self.assertTrue(wrong_path in self.find('.alert-danger').text, | 123 | self.assertTrue(wrong_path in self.find('.alert-danger').text, |
179 | "Wrong path not in alert message") | 124 | "Wrong path not in alert message") |
diff --git a/bitbake/lib/toaster/tests/functional/test_functional_basic.py b/bitbake/lib/toaster/tests/functional/test_functional_basic.py index e4070fbb88..d5c9708617 100644 --- a/bitbake/lib/toaster/tests/functional/test_functional_basic.py +++ b/bitbake/lib/toaster/tests/functional/test_functional_basic.py | |||
@@ -17,145 +17,132 @@ from selenium.webdriver.common.by import By | |||
17 | from tests.functional.utils import get_projectId_from_url | 17 | from tests.functional.utils import get_projectId_from_url |
18 | 18 | ||
19 | 19 | ||
20 | @pytest.mark.django_db | ||
21 | @pytest.mark.order("second_to_last") | ||
22 | class FuntionalTestBasic(SeleniumFunctionalTestCase): | 20 | class FuntionalTestBasic(SeleniumFunctionalTestCase): |
23 | """Basic functional tests for Toaster""" | 21 | """Basic functional tests for Toaster""" |
24 | project_id = None | 22 | project_id = None |
23 | project_url = None | ||
25 | 24 | ||
26 | def setUp(self): | 25 | def setUp(self): |
27 | super(FuntionalTestBasic, self).setUp() | 26 | super(FuntionalTestBasic, self).setUp() |
28 | if not FuntionalTestBasic.project_id: | 27 | if not FuntionalTestBasic.project_id: |
29 | self._create_slenium_project() | 28 | FuntionalTestBasic.project_id = self.create_new_project('selenium-project', '3', None, False) |
30 | current_url = self.driver.current_url | ||
31 | FuntionalTestBasic.project_id = get_projectId_from_url(current_url) | ||
32 | |||
33 | # testcase (1514) | ||
34 | def _create_slenium_project(self): | ||
35 | project_name = 'selenium-project' | ||
36 | self.get(reverse('newproject')) | ||
37 | self.wait_until_visible('#new-project-name', poll=3) | ||
38 | self.driver.find_element(By.ID, "new-project-name").send_keys(project_name) | ||
39 | self.driver.find_element(By.ID, 'projectversion').click() | ||
40 | self.driver.find_element(By.ID, "create-project-button").click() | ||
41 | element = self.wait_until_visible('#project-created-notification', poll=10) | ||
42 | self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown') | ||
43 | self.assertTrue(project_name in element.text, | ||
44 | "New project name not in new project notification") | ||
45 | self.assertTrue(Project.objects.filter(name=project_name).count(), | ||
46 | "New project not found in database") | ||
47 | return Project.objects.last().id | ||
48 | 29 | ||
49 | # testcase (1515) | 30 | # testcase (1515) |
50 | def test_verify_left_bar_menu(self): | 31 | def test_verify_left_bar_menu(self): |
51 | self.get(reverse('all-projects')) | 32 | self.get(reverse('all-projects')) |
52 | self.wait_until_present('#projectstable', poll=10) | 33 | self.load_projects_page_helper() |
53 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 34 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
54 | self.wait_until_present('#config-nav', poll=10) | 35 | self.wait_until_present('#config-nav') |
55 | self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist') | 36 | self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist') |
56 | project_URL=self.get_URL() | 37 | project_URL=self.get_URL() |
57 | self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click() | 38 | self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click() |
58 | self.wait_until_present('#config-nav', poll=10) | ||
59 | 39 | ||
60 | try: | 40 | try: |
41 | self.wait_until_present('#config-nav') | ||
61 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click() | 42 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click() |
62 | self.wait_until_present('#config-nav', poll=10) | 43 | self.wait_until_present('#filter-modal-customimagestable') |
63 | self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly') | ||
64 | except: | 44 | except: |
65 | self.fail(msg='No Custom images tab available') | 45 | self.fail(msg='No Custom images tab available') |
46 | self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly') | ||
66 | 47 | ||
67 | try: | 48 | try: |
68 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click() | 49 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click() |
69 | self.wait_until_present('#config-nav', poll=10) | 50 | self.wait_until_present('#filter-modal-imagerecipestable') |
70 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly') | ||
71 | except: | 51 | except: |
72 | self.fail(msg='No Compatible image tab available') | 52 | self.fail(msg='No Compatible image tab available') |
53 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly') | ||
73 | 54 | ||
74 | try: | 55 | try: |
75 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click() | 56 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click() |
76 | self.wait_until_present('#config-nav', poll=10) | 57 | self.wait_until_present('#filter-modal-softwarerecipestable') |
77 | self.assertTrue(re.search("Compatible software recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly') | ||
78 | except: | 58 | except: |
79 | self.fail(msg='No Compatible software recipe tab available') | 59 | self.fail(msg='No Compatible software recipe tab available') |
60 | self.assertTrue(re.search("Compatible software recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly') | ||
80 | 61 | ||
81 | try: | 62 | try: |
82 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click() | 63 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click() |
83 | self.wait_until_present('#config-nav', poll=10) | 64 | self.wait_until_present('#filter-modal-machinestable') |
84 | self.assertTrue(re.search("Compatible machines",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly') | ||
85 | except: | 65 | except: |
86 | self.fail(msg='No Compatible machines tab available') | 66 | self.fail(msg='No Compatible machines tab available') |
67 | self.assertTrue(re.search("Compatible machines",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly') | ||
87 | 68 | ||
88 | try: | 69 | try: |
89 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click() | 70 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click() |
90 | self.wait_until_present('#config-nav', poll=10) | 71 | self.wait_until_present('#filter-modal-layerstable') |
91 | self.assertTrue(re.search("Compatible layers",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly') | ||
92 | except: | 72 | except: |
93 | self.fail(msg='No Compatible layers tab available') | 73 | self.fail(msg='No Compatible layers tab available') |
74 | self.assertTrue(re.search("Compatible layers",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly') | ||
94 | 75 | ||
95 | try: | 76 | try: |
96 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click() | 77 | self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click() |
97 | self.wait_until_present('#config-nav', poll=10) | 78 | self.wait_until_present('#configvar-list') |
98 | self.assertTrue(re.search("Bitbake variables",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly') | ||
99 | except: | 79 | except: |
100 | self.fail(msg='No Bitbake variables tab available') | 80 | self.fail(msg='No Bitbake variables tab available') |
81 | self.assertTrue(re.search("Bitbake variables",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly') | ||
101 | 82 | ||
102 | # testcase (1516) | 83 | # testcase (1516) |
103 | def test_review_configuration_information(self): | 84 | def test_review_configuration_information(self): |
104 | self.get(reverse('all-projects')) | 85 | self.get(reverse('all-projects')) |
105 | self.wait_until_present('#projectstable', poll=10) | 86 | self.load_projects_page_helper() |
106 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 87 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
107 | project_URL=self.get_URL() | 88 | project_URL=self.get_URL() |
108 | self.wait_until_present('#config-nav', poll=10) | 89 | |
90 | # Machine section of page | ||
91 | self.wait_until_visible('#machine-section') | ||
92 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') | ||
93 | self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned') | ||
109 | try: | 94 | try: |
110 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') | ||
111 | self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned') | ||
112 | self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click() | 95 | self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click() |
113 | self.wait_until_visible('#select-machine-form', poll=10) | 96 | self.wait_until_visible('#select-machine-form') |
114 | self.wait_until_visible('#cancel-machine-change', poll=10) | 97 | self.wait_until_visible('#cancel-machine-change') |
115 | self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click() | 98 | self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click() |
116 | except: | 99 | except: |
117 | self.fail(msg='The machine information is wrong in the configuration page') | 100 | self.fail(msg='The machine information is wrong in the configuration page') |
118 | 101 | ||
102 | # Most built recipes section | ||
103 | self.wait_until_visible('#no-most-built') | ||
119 | try: | 104 | try: |
120 | self.driver.find_element(By.ID, 'no-most-built') | 105 | self.driver.find_element(By.ID, 'no-most-built') |
121 | except: | 106 | except: |
122 | self.fail(msg='No Most built information in project detail page') | 107 | self.fail(msg='No Most built information in project detail page') |
123 | 108 | ||
124 | try: | 109 | # Project Release title |
125 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.XPATH, "//span[@id='project-release-title']").text),'The project release is not defined') | 110 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.XPATH, "//span[@id='project-release-title']").text), 'The project release is not defined in the project detail page') |
126 | except: | ||
127 | self.fail(msg='No project release title information in project detail page') | ||
128 | 111 | ||
112 | # List of layers in project | ||
113 | self.wait_until_visible('#layer-container') | ||
114 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']") | ||
115 | self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count') | ||
129 | try: | 116 | try: |
130 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']") | ||
131 | self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count') | ||
132 | layer_list = self.driver.find_element(By.ID, "layers-in-project-list") | 117 | layer_list = self.driver.find_element(By.ID, "layers-in-project-list") |
133 | layers = layer_list.find_elements(By.TAG_NAME, "li") | 118 | layers = layer_list.find_elements(By.TAG_NAME, "li") |
134 | for layer in layers: | ||
135 | if re.match ("openembedded-core",layer.text): | ||
136 | print ("openembedded-core layer is a default layer in the project configuration") | ||
137 | elif re.match ("meta-poky",layer.text): | ||
138 | print ("meta-poky layer is a default layer in the project configuration") | ||
139 | elif re.match ("meta-yocto-bsp",layer.text): | ||
140 | print ("meta-yocto-bsp is a default layer in the project configuratoin") | ||
141 | else: | ||
142 | self.fail(msg='default layers are missing from the project configuration') | ||
143 | except: | 119 | except: |
144 | self.fail(msg='No Layer information in project detail page') | 120 | self.fail(msg='No Layer information in project detail page') |
145 | 121 | ||
122 | for layer in layers: | ||
123 | if re.match ("openembedded-core", layer.text): | ||
124 | print ("openembedded-core layer is a default layer in the project configuration") | ||
125 | elif re.match ("meta-poky", layer.text): | ||
126 | print ("meta-poky layer is a default layer in the project configuration") | ||
127 | elif re.match ("meta-yocto-bsp", layer.text): | ||
128 | print ("meta-yocto-bsp is a default layer in the project configuratoin") | ||
129 | else: | ||
130 | self.fail(msg='default layers are missing from the project configuration') | ||
131 | |||
146 | # testcase (1517) | 132 | # testcase (1517) |
147 | def test_verify_machine_information(self): | 133 | def test_verify_machine_information(self): |
148 | self.get(reverse('all-projects')) | 134 | self.get(reverse('all-projects')) |
149 | self.wait_until_present('#projectstable', poll=10) | 135 | self.load_projects_page_helper() |
150 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 136 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
151 | self.wait_until_present('#config-nav', poll=10) | ||
152 | 137 | ||
138 | self.wait_until_visible('#machine-section') | ||
139 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') | ||
140 | self.wait_until_visible('#project-machine-name') | ||
141 | self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned') | ||
153 | try: | 142 | try: |
154 | self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') | ||
155 | self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned') | ||
156 | self.driver.find_element(By.ID, "change-machine-toggle").click() | 143 | self.driver.find_element(By.ID, "change-machine-toggle").click() |
157 | self.wait_until_visible('#select-machine-form', poll=10) | 144 | self.wait_until_visible('#select-machine-form') |
158 | self.wait_until_visible('#cancel-machine-change', poll=10) | 145 | self.wait_until_visible('#cancel-machine-change') |
159 | self.driver.find_element(By.ID, "cancel-machine-change").click() | 146 | self.driver.find_element(By.ID, "cancel-machine-change").click() |
160 | except: | 147 | except: |
161 | self.fail(msg='The machine information is wrong in the configuration page') | 148 | self.fail(msg='The machine information is wrong in the configuration page') |
@@ -163,83 +150,95 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase): | |||
163 | # testcase (1518) | 150 | # testcase (1518) |
164 | def test_verify_most_built_recipes_information(self): | 151 | def test_verify_most_built_recipes_information(self): |
165 | self.get(reverse('all-projects')) | 152 | self.get(reverse('all-projects')) |
166 | self.wait_until_present('#projectstable', poll=10) | 153 | self.load_projects_page_helper() |
167 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 154 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
168 | self.wait_until_present('#config-nav', poll=10) | 155 | self.wait_until_present('#config-nav') |
169 | project_URL=self.get_URL() | 156 | project_URL=self.get_URL() |
157 | |||
158 | self.wait_until_visible('#no-most-built') | ||
159 | self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present') | ||
170 | try: | 160 | try: |
171 | self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present') | ||
172 | self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click() | 161 | self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click() |
173 | self.wait_until_present('#config-nav', poll=10) | ||
174 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly') | ||
175 | except: | 162 | except: |
176 | self.fail(msg='No Most built information in project detail page') | 163 | self.fail(msg='No Most built information in project detail page') |
164 | self.wait_until_visible('#config-nav') | ||
165 | self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly') | ||
177 | 166 | ||
178 | # testcase (1519) | 167 | # testcase (1519) |
179 | def test_verify_project_release_information(self): | 168 | def test_verify_project_release_information(self): |
180 | self.get(reverse('all-projects')) | 169 | self.get(reverse('all-projects')) |
181 | self.wait_until_present('#projectstable', poll=10) | 170 | self.load_projects_page_helper() |
182 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 171 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
183 | self.wait_until_present('#config-nav', poll=10) | 172 | self.wait_until_visible('#project-release-title') |
184 | 173 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text), 'No project release title information in project detail page') | |
185 | try: | ||
186 | self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text),'The project release is not defined') | ||
187 | except: | ||
188 | self.fail(msg='No project release title information in project detail page') | ||
189 | 174 | ||
190 | # testcase (1520) | 175 | # testcase (1520) |
191 | def test_verify_layer_information(self): | 176 | def test_verify_layer_information(self): |
192 | self.get(reverse('all-projects')) | 177 | self.get(reverse('all-projects')) |
193 | self.wait_until_present('#projectstable', poll=10) | 178 | self.load_projects_page_helper() |
194 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 179 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
195 | self.wait_until_present('#config-nav', poll=10) | 180 | self.wait_until_present('#config-nav') |
196 | project_URL=self.get_URL() | 181 | project_URL=self.get_URL() |
182 | self.wait_until_visible('#layer-container') | ||
183 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']") | ||
184 | self.wait_until_visible('#project-layers-count') | ||
185 | self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count') | ||
186 | |||
197 | try: | 187 | try: |
198 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']") | ||
199 | self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count') | ||
200 | layer_list = self.driver.find_element(By.ID, "layers-in-project-list") | 188 | layer_list = self.driver.find_element(By.ID, "layers-in-project-list") |
201 | layers = layer_list.find_elements(By.TAG_NAME, "li") | 189 | layers = layer_list.find_elements(By.TAG_NAME, "li") |
190 | except: | ||
191 | self.fail(msg='No Layer information in project detail page') | ||
202 | 192 | ||
203 | for layer in layers: | 193 | for layer in layers: |
204 | if re.match ("openembedded-core",layer.text): | 194 | if re.match ("openembedded-core",layer.text): |
205 | print ("openembedded-core layer is a default layer in the project configuration") | 195 | print ("openembedded-core layer is a default layer in the project configuration") |
206 | elif re.match ("meta-poky",layer.text): | 196 | elif re.match ("meta-poky",layer.text): |
207 | print ("meta-poky layer is a default layer in the project configuration") | 197 | print ("meta-poky layer is a default layer in the project configuration") |
208 | elif re.match ("meta-yocto-bsp",layer.text): | 198 | elif re.match ("meta-yocto-bsp",layer.text): |
209 | print ("meta-yocto-bsp is a default layer in the project configuratoin") | 199 | print ("meta-yocto-bsp is a default layer in the project configuratoin") |
210 | else: | 200 | else: |
211 | self.fail(msg='default layers are missing from the project configuration') | 201 | self.fail(msg='default layers are missing from the project configuration') |
212 | 202 | ||
203 | try: | ||
213 | self.driver.find_element(By.XPATH, "//input[@id='layer-add-input']") | 204 | self.driver.find_element(By.XPATH, "//input[@id='layer-add-input']") |
214 | self.driver.find_element(By.XPATH, "//button[@id='add-layer-btn']") | 205 | self.driver.find_element(By.XPATH, "//button[@id='add-layer-btn']") |
215 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']") | 206 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']") |
216 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]") | 207 | self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]") |
217 | except: | 208 | except: |
218 | self.fail(msg='No Layer information in project detail page') | 209 | self.fail(msg='Layer configuration controls missing') |
219 | 210 | ||
220 | # testcase (1521) | 211 | # testcase (1521) |
221 | def test_verify_project_detail_links(self): | 212 | def test_verify_project_detail_links(self): |
222 | self.get(reverse('all-projects')) | 213 | self.get(reverse('all-projects')) |
223 | self.wait_until_present('#projectstable', poll=10) | 214 | self.load_projects_page_helper() |
224 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() | 215 | self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() |
225 | self.wait_until_present('#config-nav', poll=10) | 216 | self.wait_until_present('#config-nav') |
226 | project_URL=self.get_URL() | 217 | project_URL=self.get_URL() |
227 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click() | 218 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click() |
228 | self.wait_until_present('#config-nav', poll=10) | 219 | self.wait_until_visible('#topbar-configuration-tab') |
229 | self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled') | 220 | self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled') |
230 | 221 | ||
231 | try: | 222 | try: |
232 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click() | 223 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click() |
233 | self.wait_until_visible('#project-topbar', poll=10) | 224 | except: |
234 | self.assertTrue(re.search("Builds",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled') | 225 | self.fail(msg='Builds tab information is not present') |
226 | |||
227 | self.wait_until_visible('#project-topbar') | ||
228 | self.assertTrue(re.search("Builds",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled') | ||
229 | try: | ||
235 | self.driver.find_element(By.XPATH, "//div[@id='empty-state-projectbuildstable']") | 230 | self.driver.find_element(By.XPATH, "//div[@id='empty-state-projectbuildstable']") |
236 | except: | 231 | except: |
237 | self.fail(msg='Builds tab information is not present') | 232 | self.fail(msg='Builds tab information is not present') |
238 | 233 | ||
239 | try: | 234 | try: |
240 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click() | 235 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click() |
241 | self.wait_until_visible('#project-topbar', poll=10) | 236 | except: |
242 | self.assertTrue(re.search("Import layer",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled') | 237 | self.fail(msg='Import layer tab not loading properly') |
238 | |||
239 | self.wait_until_visible('#project-topbar') | ||
240 | self.assertTrue(re.search("Import layer",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled') | ||
241 | try: | ||
243 | self.driver.find_element(By.XPATH, "//fieldset[@id='repo-select']") | 242 | self.driver.find_element(By.XPATH, "//fieldset[@id='repo-select']") |
244 | self.driver.find_element(By.XPATH, "//fieldset[@id='git-repo']") | 243 | self.driver.find_element(By.XPATH, "//fieldset[@id='git-repo']") |
245 | except: | 244 | except: |
@@ -247,11 +246,12 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase): | |||
247 | 246 | ||
248 | try: | 247 | try: |
249 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click() | 248 | self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click() |
250 | self.wait_until_visible('#project-topbar', poll=10) | ||
251 | self.assertTrue(re.search("New custom image",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled') | ||
252 | self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element(By.XPATH, "//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly') | ||
253 | except: | 249 | except: |
254 | self.fail(msg='New custom image tab not loading properly') | 250 | self.fail(msg='New custom image tab not loading properly') |
255 | 251 | ||
252 | self.wait_until_visible('#project-topbar') | ||
253 | self.assertTrue(re.search("New custom image",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled') | ||
254 | self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element(By.XPATH, "//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly') | ||
255 | |||
256 | 256 | ||
257 | 257 | ||
diff --git a/bitbake/lib/toaster/tests/functional/test_project_config.py b/bitbake/lib/toaster/tests/functional/test_project_config.py index dbee36aa4e..fcb1bc3284 100644 --- a/bitbake/lib/toaster/tests/functional/test_project_config.py +++ b/bitbake/lib/toaster/tests/functional/test_project_config.py | |||
@@ -7,7 +7,6 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | import string | 9 | import string |
10 | import random | ||
11 | import pytest | 10 | import pytest |
12 | from django.urls import reverse | 11 | from django.urls import reverse |
13 | from selenium.webdriver import Keys | 12 | from selenium.webdriver import Keys |
@@ -18,9 +17,6 @@ from selenium.webdriver.common.by import By | |||
18 | 17 | ||
19 | from .utils import get_projectId_from_url | 18 | from .utils import get_projectId_from_url |
20 | 19 | ||
21 | |||
22 | @pytest.mark.django_db | ||
23 | @pytest.mark.order("last") | ||
24 | class TestProjectConfig(SeleniumFunctionalTestCase): | 20 | class TestProjectConfig(SeleniumFunctionalTestCase): |
25 | project_id = None | 21 | project_id = None |
26 | PROJECT_NAME = 'TestProjectConfig' | 22 | PROJECT_NAME = 'TestProjectConfig' |
@@ -28,42 +24,6 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
28 | INVALID_PATH_CHAR_TEXT = 'The directory path cannot include spaces or ' \ | 24 | INVALID_PATH_CHAR_TEXT = 'The directory path cannot include spaces or ' \ |
29 | 'any of these characters' | 25 | 'any of these characters' |
30 | 26 | ||
31 | def _create_project(self, project_name): | ||
32 | """ Create/Test new project using: | ||
33 | - Project Name: Any string | ||
34 | - Release: Any string | ||
35 | - Merge Toaster settings: True or False | ||
36 | """ | ||
37 | self.get(reverse('newproject')) | ||
38 | self.wait_until_visible('#new-project-name', poll=2) | ||
39 | self.find("#new-project-name").send_keys(project_name) | ||
40 | select = Select(self.find("#projectversion")) | ||
41 | select.select_by_value('3') | ||
42 | |||
43 | # check merge toaster settings | ||
44 | checkbox = self.find('.checkbox-mergeattr') | ||
45 | if not checkbox.is_selected(): | ||
46 | checkbox.click() | ||
47 | |||
48 | if self.PROJECT_NAME != 'TestProjectConfig': | ||
49 | # Reset project name if it's not the default one | ||
50 | self.PROJECT_NAME = 'TestProjectConfig' | ||
51 | |||
52 | self.find("#create-project-button").click() | ||
53 | |||
54 | try: | ||
55 | self.wait_until_visible('#hint-error-project-name', poll=2) | ||
56 | url = reverse('project', args=(TestProjectConfig.project_id, )) | ||
57 | self.get(url) | ||
58 | self.wait_until_visible('#config-nav', poll=3) | ||
59 | except TimeoutException: | ||
60 | self.wait_until_visible('#config-nav', poll=3) | ||
61 | |||
62 | def _random_string(self, length): | ||
63 | return ''.join( | ||
64 | random.choice(string.ascii_letters) for _ in range(length) | ||
65 | ) | ||
66 | |||
67 | def _get_config_nav_item(self, index): | 27 | def _get_config_nav_item(self, index): |
68 | config_nav = self.find('#config-nav') | 28 | config_nav = self.find('#config-nav') |
69 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] | 29 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] |
@@ -72,16 +32,14 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
72 | """ Navigate to project BitBake variables page """ | 32 | """ Navigate to project BitBake variables page """ |
73 | # check if the menu is displayed | 33 | # check if the menu is displayed |
74 | if TestProjectConfig.project_id is None: | 34 | if TestProjectConfig.project_id is None: |
75 | self._create_project(project_name=self._random_string(10)) | 35 | TestProjectConfig.project_id = self.create_new_project(self.PROJECT_NAME, '3', None, True) |
76 | current_url = self.driver.current_url | 36 | |
77 | TestProjectConfig.project_id = get_projectId_from_url(current_url) | 37 | url = reverse('projectconf', args=(TestProjectConfig.project_id,)) |
78 | else: | 38 | self.get(url) |
79 | url = reverse('projectconf', args=(TestProjectConfig.project_id,)) | 39 | self.wait_until_visible('#config-nav') |
80 | self.get(url) | ||
81 | self.wait_until_visible('#config-nav', poll=3) | ||
82 | bbv_page_link = self._get_config_nav_item(9) | 40 | bbv_page_link = self._get_config_nav_item(9) |
83 | bbv_page_link.click() | 41 | bbv_page_link.click() |
84 | self.wait_until_visible('#config-nav', poll=3) | 42 | self.wait_until_visible('#config-nav') |
85 | 43 | ||
86 | def test_no_underscore_iamgefs_type(self): | 44 | def test_no_underscore_iamgefs_type(self): |
87 | """ | 45 | """ |
@@ -90,13 +48,13 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
90 | self._navigate_bbv_page() | 48 | self._navigate_bbv_page() |
91 | imagefs_type = "foo_bar" | 49 | imagefs_type = "foo_bar" |
92 | 50 | ||
93 | self.wait_until_visible('#change-image_fstypes-icon', poll=2) | 51 | self.wait_until_visible('#change-image_fstypes-icon') |
94 | 52 | ||
95 | self.click('#change-image_fstypes-icon') | 53 | self.click('#change-image_fstypes-icon') |
96 | 54 | ||
97 | self.enter_text('#new-imagefs_types', imagefs_type) | 55 | self.enter_text('#new-imagefs_types', imagefs_type) |
98 | 56 | ||
99 | element = self.wait_until_visible('#hintError-image-fs_type', poll=2) | 57 | element = self.wait_until_visible('#hintError-image-fs_type') |
100 | 58 | ||
101 | self.assertTrue(("A valid image type cannot include underscores" in element.text), | 59 | self.assertTrue(("A valid image type cannot include underscores" in element.text), |
102 | "Did not find underscore error message") | 60 | "Did not find underscore error message") |
@@ -110,7 +68,7 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
110 | 68 | ||
111 | imagefs_type = "btrfs" | 69 | imagefs_type = "btrfs" |
112 | 70 | ||
113 | self.wait_until_visible('#change-image_fstypes-icon', poll=2) | 71 | self.wait_until_visible('#change-image_fstypes-icon') |
114 | 72 | ||
115 | self.click('#change-image_fstypes-icon') | 73 | self.click('#change-image_fstypes-icon') |
116 | 74 | ||
@@ -129,22 +87,20 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
129 | """ | 87 | """ |
130 | self._navigate_bbv_page() | 88 | self._navigate_bbv_page() |
131 | 89 | ||
132 | self.wait_until_visible('#change-image_fstypes-icon', poll=2) | 90 | self.wait_until_visible('#change-image_fstypes-icon') |
133 | |||
134 | self.click('#change-image_fstypes-icon') | 91 | self.click('#change-image_fstypes-icon') |
135 | 92 | ||
136 | checkboxes_selector = '.fs-checkbox-fstypes' | 93 | checkboxes_selector = '.fs-checkbox-fstypes' |
137 | 94 | ||
138 | self.wait_until_visible(checkboxes_selector, poll=2) | 95 | self.wait_until_visible(checkboxes_selector) |
139 | checkboxes = self.find_all(checkboxes_selector) | 96 | checkboxes = self.find_all(checkboxes_selector) |
140 | 97 | ||
141 | for checkbox in checkboxes: | 98 | for checkbox in checkboxes: |
142 | if checkbox.get_attribute("value") == "cpio": | 99 | if checkbox.get_attribute("value") == "cpio": |
143 | checkbox.click() | 100 | checkbox.click() |
101 | self.wait_until_visible('#new-imagefs_types') | ||
144 | element = self.driver.find_element(By.ID, 'new-imagefs_types') | 102 | element = self.driver.find_element(By.ID, 'new-imagefs_types') |
145 | 103 | ||
146 | self.wait_until_visible('#new-imagefs_types', poll=2) | ||
147 | |||
148 | self.assertTrue(("cpio" in element.get_attribute('value'), | 104 | self.assertTrue(("cpio" in element.get_attribute('value'), |
149 | "Imagefs not added into the textbox")) | 105 | "Imagefs not added into the textbox")) |
150 | checkbox.click() | 106 | checkbox.click() |
@@ -160,20 +116,19 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
160 | 116 | ||
161 | # activate the input to edit download dir | 117 | # activate the input to edit download dir |
162 | try: | 118 | try: |
163 | change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2) | 119 | change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon') |
164 | except TimeoutException: | 120 | except TimeoutException: |
165 | # If download dir is not displayed, test is skipped | 121 | # If download dir is not displayed, test is skipped |
166 | change_dl_dir_btn = None | 122 | change_dl_dir_btn = None |
167 | 123 | ||
168 | if change_dl_dir_btn: | 124 | if change_dl_dir_btn: |
169 | change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2) | ||
170 | change_dl_dir_btn.click() | 125 | change_dl_dir_btn.click() |
171 | 126 | ||
172 | # downloads dir path doesn't start with / or ${...} | 127 | # downloads dir path doesn't start with / or ${...} |
173 | input_field = self.wait_until_visible('#new-dl_dir', poll=2) | 128 | input_field = self.wait_until_visible('#new-dl_dir') |
174 | input_field.clear() | 129 | input_field.clear() |
175 | self.enter_text('#new-dl_dir', 'home/foo') | 130 | self.enter_text('#new-dl_dir', 'home/foo') |
176 | element = self.wait_until_visible('#hintError-initialChar-dl_dir', poll=2) | 131 | element = self.wait_until_visible('#hintError-initialChar-dl_dir') |
177 | 132 | ||
178 | msg = 'downloads directory path starts with invalid character but ' \ | 133 | msg = 'downloads directory path starts with invalid character but ' \ |
179 | 'treated as valid' | 134 | 'treated as valid' |
@@ -183,7 +138,7 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
183 | self.driver.find_element(By.ID, 'new-dl_dir').clear() | 138 | self.driver.find_element(By.ID, 'new-dl_dir').clear() |
184 | self.enter_text('#new-dl_dir', '/foo/bar a') | 139 | self.enter_text('#new-dl_dir', '/foo/bar a') |
185 | 140 | ||
186 | element = self.wait_until_visible('#hintError-dl_dir', poll=2) | 141 | element = self.wait_until_visible('#hintError-dl_dir') |
187 | msg = 'downloads directory path characters invalid but treated as valid' | 142 | msg = 'downloads directory path characters invalid but treated as valid' |
188 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 143 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
189 | 144 | ||
@@ -191,7 +146,7 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
191 | self.driver.find_element(By.ID,'new-dl_dir').clear() | 146 | self.driver.find_element(By.ID,'new-dl_dir').clear() |
192 | self.enter_text('#new-dl_dir', '${TOPDIR}/down foo') | 147 | self.enter_text('#new-dl_dir', '${TOPDIR}/down foo') |
193 | 148 | ||
194 | element = self.wait_until_visible('#hintError-dl_dir', poll=2) | 149 | element = self.wait_until_visible('#hintError-dl_dir') |
195 | msg = 'downloads directory path characters invalid but treated as valid' | 150 | msg = 'downloads directory path characters invalid but treated as valid' |
196 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 151 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
197 | 152 | ||
@@ -219,10 +174,7 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
219 | self._navigate_bbv_page() | 174 | self._navigate_bbv_page() |
220 | 175 | ||
221 | try: | 176 | try: |
222 | btn_chg_sstate_dir = self.wait_until_visible( | 177 | btn_chg_sstate_dir = self.wait_until_visible('#change-sstate_dir-icon') |
223 | '#change-sstate_dir-icon', | ||
224 | poll=2 | ||
225 | ) | ||
226 | self.click('#change-sstate_dir-icon') | 178 | self.click('#change-sstate_dir-icon') |
227 | except TimeoutException: | 179 | except TimeoutException: |
228 | # If sstate_dir is not displayed, test is skipped | 180 | # If sstate_dir is not displayed, test is skipped |
@@ -230,10 +182,10 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
230 | 182 | ||
231 | if btn_chg_sstate_dir: # Skip continuation if sstate_dir is not displayed | 183 | if btn_chg_sstate_dir: # Skip continuation if sstate_dir is not displayed |
232 | # path doesn't start with / or ${...} | 184 | # path doesn't start with / or ${...} |
233 | input_field = self.wait_until_visible('#new-sstate_dir', poll=2) | 185 | input_field = self.wait_until_visible('#new-sstate_dir') |
234 | input_field.clear() | 186 | input_field.clear() |
235 | self.enter_text('#new-sstate_dir', 'home/foo') | 187 | self.enter_text('#new-sstate_dir', 'home/foo') |
236 | element = self.wait_until_visible('#hintError-initialChar-sstate_dir', poll=2) | 188 | element = self.wait_until_visible('#hintError-initialChar-sstate_dir') |
237 | 189 | ||
238 | msg = 'sstate directory path starts with invalid character but ' \ | 190 | msg = 'sstate directory path starts with invalid character but ' \ |
239 | 'treated as valid' | 191 | 'treated as valid' |
@@ -243,7 +195,7 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
243 | self.driver.find_element(By.ID, 'new-sstate_dir').clear() | 195 | self.driver.find_element(By.ID, 'new-sstate_dir').clear() |
244 | self.enter_text('#new-sstate_dir', '/foo/bar a') | 196 | self.enter_text('#new-sstate_dir', '/foo/bar a') |
245 | 197 | ||
246 | element = self.wait_until_visible('#hintError-sstate_dir', poll=2) | 198 | element = self.wait_until_visible('#hintError-sstate_dir') |
247 | msg = 'sstate directory path characters invalid but treated as valid' | 199 | msg = 'sstate directory path characters invalid but treated as valid' |
248 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 200 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
249 | 201 | ||
@@ -251,7 +203,7 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
251 | self.driver.find_element(By.ID,'new-sstate_dir').clear() | 203 | self.driver.find_element(By.ID,'new-sstate_dir').clear() |
252 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo') | 204 | self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo') |
253 | 205 | ||
254 | element = self.wait_until_visible('#hintError-sstate_dir', poll=2) | 206 | element = self.wait_until_visible('#hintError-sstate_dir') |
255 | msg = 'sstate directory path characters invalid but treated as valid' | 207 | msg = 'sstate directory path characters invalid but treated as valid' |
256 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) | 208 | self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg) |
257 | 209 | ||
@@ -275,13 +227,14 @@ class TestProjectConfig(SeleniumFunctionalTestCase): | |||
275 | var_name, field, btn_id, input_id, value, save_btn, *_ = kwargs.values() | 227 | var_name, field, btn_id, input_id, value, save_btn, *_ = kwargs.values() |
276 | """ Change bitbake variable value """ | 228 | """ Change bitbake variable value """ |
277 | self._navigate_bbv_page() | 229 | self._navigate_bbv_page() |
278 | self.wait_until_visible(f'#{btn_id}', poll=2) | 230 | self.wait_until_visible(f'#{btn_id}') |
279 | if kwargs.get('new_variable'): | 231 | if kwargs.get('new_variable'): |
280 | self.find(f"#{btn_id}").clear() | 232 | self.find(f"#{btn_id}").clear() |
281 | self.enter_text(f"#{btn_id}", f"{var_name}") | 233 | self.enter_text(f"#{btn_id}", f"{var_name}") |
282 | else: | 234 | else: |
283 | self.click(f'#{btn_id}') | 235 | self.click(f'#{btn_id}') |
284 | self.wait_until_visible(f'#{input_id}', poll=2) | 236 | |
237 | self.wait_until_visible(f'#{input_id}') | ||
285 | 238 | ||
286 | if kwargs.get('is_select'): | 239 | if kwargs.get('is_select'): |
287 | select = Select(self.find(f'#{input_id}')) | 240 | select = Select(self.find(f'#{input_id}')) |
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page.py b/bitbake/lib/toaster/tests/functional/test_project_page.py index adbe3587e4..429d86feba 100644 --- a/bitbake/lib/toaster/tests/functional/test_project_page.py +++ b/bitbake/lib/toaster/tests/functional/test_project_page.py | |||
@@ -7,8 +7,8 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | import os | 9 | import os |
10 | import random | ||
11 | import string | 10 | import string |
11 | import time | ||
12 | from unittest import skip | 12 | from unittest import skip |
13 | import pytest | 13 | import pytest |
14 | from django.urls import reverse | 14 | from django.urls import reverse |
@@ -22,58 +22,17 @@ from selenium.webdriver.common.by import By | |||
22 | 22 | ||
23 | from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled | 23 | from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled |
24 | 24 | ||
25 | 25 | class TestProjectPageBase(SeleniumFunctionalTestCase): | |
26 | @pytest.mark.django_db | ||
27 | @pytest.mark.order("last") | ||
28 | class TestProjectPage(SeleniumFunctionalTestCase): | ||
29 | project_id = None | 26 | project_id = None |
30 | PROJECT_NAME = 'TestProjectPage' | 27 | PROJECT_NAME = 'TestProjectPage' |
31 | 28 | ||
32 | def _create_project(self, project_name): | ||
33 | """ Create/Test new project using: | ||
34 | - Project Name: Any string | ||
35 | - Release: Any string | ||
36 | - Merge Toaster settings: True or False | ||
37 | """ | ||
38 | self.get(reverse('newproject')) | ||
39 | self.wait_until_visible('#new-project-name') | ||
40 | self.find("#new-project-name").send_keys(project_name) | ||
41 | select = Select(self.find("#projectversion")) | ||
42 | select.select_by_value('3') | ||
43 | |||
44 | # check merge toaster settings | ||
45 | checkbox = self.find('.checkbox-mergeattr') | ||
46 | if not checkbox.is_selected(): | ||
47 | checkbox.click() | ||
48 | |||
49 | if self.PROJECT_NAME != 'TestProjectPage': | ||
50 | # Reset project name if it's not the default one | ||
51 | self.PROJECT_NAME = 'TestProjectPage' | ||
52 | |||
53 | self.find("#create-project-button").click() | ||
54 | |||
55 | try: | ||
56 | self.wait_until_visible('#hint-error-project-name') | ||
57 | url = reverse('project', args=(TestProjectPage.project_id, )) | ||
58 | self.get(url) | ||
59 | self.wait_until_visible('#config-nav', poll=3) | ||
60 | except TimeoutException: | ||
61 | self.wait_until_visible('#config-nav', poll=3) | ||
62 | |||
63 | def _random_string(self, length): | ||
64 | return ''.join( | ||
65 | random.choice(string.ascii_letters) for _ in range(length) | ||
66 | ) | ||
67 | |||
68 | def _navigate_to_project_page(self): | 29 | def _navigate_to_project_page(self): |
69 | # Navigate to project page | 30 | # Navigate to project page |
70 | if TestProjectPage.project_id is None: | 31 | if TestProjectPageBase.project_id is None: |
71 | self._create_project(project_name=self._random_string(10)) | 32 | TestProjectPageBase.project_id = self.create_new_project(self.PROJECT_NAME, '3', None, True) |
72 | current_url = self.driver.current_url | 33 | |
73 | TestProjectPage.project_id = get_projectId_from_url(current_url) | 34 | url = reverse('project', args=(TestProjectPageBase.project_id,)) |
74 | else: | 35 | self.get(url) |
75 | url = reverse('project', args=(TestProjectPage.project_id,)) | ||
76 | self.get(url) | ||
77 | self.wait_until_visible('#config-nav') | 36 | self.wait_until_visible('#config-nav') |
78 | 37 | ||
79 | def _get_create_builds(self, **kwargs): | 38 | def _get_create_builds(self, **kwargs): |
@@ -81,14 +40,14 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
81 | # parameters for builds to associate with the projects | 40 | # parameters for builds to associate with the projects |
82 | now = timezone.now() | 41 | now = timezone.now() |
83 | self.project1_build_success = { | 42 | self.project1_build_success = { |
84 | 'project': Project.objects.get(id=TestProjectPage.project_id), | 43 | 'project': Project.objects.get(id=TestProjectPageBase.project_id), |
85 | 'started_on': now, | 44 | 'started_on': now, |
86 | 'completed_on': now, | 45 | 'completed_on': now, |
87 | 'outcome': Build.SUCCEEDED | 46 | 'outcome': Build.SUCCEEDED |
88 | } | 47 | } |
89 | 48 | ||
90 | self.project1_build_failure = { | 49 | self.project1_build_failure = { |
91 | 'project': Project.objects.get(id=TestProjectPage.project_id), | 50 | 'project': Project.objects.get(id=TestProjectPageBase.project_id), |
92 | 'started_on': now, | 51 | 'started_on': now, |
93 | 'completed_on': now, | 52 | 'completed_on': now, |
94 | 'outcome': Build.FAILED | 53 | 'outcome': Build.FAILED |
@@ -133,7 +92,8 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
133 | list_check_box_id: list | 92 | list_check_box_id: list |
134 | ): | 93 | ): |
135 | # Check edit column | 94 | # Check edit column |
136 | edit_column = self.find(f'#{edit_btn_id}') | 95 | finder = lambda driver: self.find(f'#{edit_btn_id}') |
96 | edit_column = self.wait_until_element_clickable(finder) | ||
137 | self.assertTrue(edit_column.is_displayed()) | 97 | self.assertTrue(edit_column.is_displayed()) |
138 | edit_column.click() | 98 | edit_column.click() |
139 | # Check dropdown is visible | 99 | # Check dropdown is visible |
@@ -192,7 +152,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
192 | def test_show_rows(row_to_show, show_row_link): | 152 | def test_show_rows(row_to_show, show_row_link): |
193 | # Check that we can show rows == row_to_show | 153 | # Check that we can show rows == row_to_show |
194 | show_row_link.select_by_value(str(row_to_show)) | 154 | show_row_link.select_by_value(str(row_to_show)) |
195 | self.wait_until_visible(f'#{table_selector} tbody tr', poll=3) | 155 | self.wait_until_visible(f'#{table_selector} tbody tr') |
196 | # check at least some rows are visible | 156 | # check at least some rows are visible |
197 | self.assertTrue( | 157 | self.assertTrue( |
198 | len(self.find_all(f'#{table_selector} tbody tr')) > 0 | 158 | len(self.find_all(f'#{table_selector} tbody tr')) > 0 |
@@ -222,34 +182,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
222 | rows = self.find_all(f'#{table_selector} tbody tr') | 182 | rows = self.find_all(f'#{table_selector} tbody tr') |
223 | self.assertTrue(len(rows) > 0) | 183 | self.assertTrue(len(rows) > 0) |
224 | 184 | ||
225 | def test_create_project(self): | 185 | class TestProjectPage(TestProjectPageBase): |
226 | """ Create/Test new project using: | ||
227 | - Project Name: Any string | ||
228 | - Release: Any string | ||
229 | - Merge Toaster settings: True or False | ||
230 | """ | ||
231 | self._create_project(project_name=self.PROJECT_NAME) | ||
232 | |||
233 | def test_image_recipe_editColumn(self): | ||
234 | """ Test the edit column feature in image recipe table on project page """ | ||
235 | self._get_create_builds(success=10, failure=10) | ||
236 | |||
237 | url = reverse('projectimagerecipes', args=(TestProjectPage.project_id,)) | ||
238 | self.get(url) | ||
239 | self.wait_until_present('#imagerecipestable tbody tr') | ||
240 | |||
241 | column_list = [ | ||
242 | 'get_description_or_summary', 'layer_version__get_vcs_reference', | ||
243 | 'layer_version__layer__name', 'license', 'recipe-file', 'section', | ||
244 | 'version' | ||
245 | ] | ||
246 | |||
247 | # Check that we can hide the edit column | ||
248 | self._mixin_test_table_edit_column( | ||
249 | 'imagerecipestable', | ||
250 | 'edit-columns-button', | ||
251 | [f'checkbox-{column}' for column in column_list] | ||
252 | ) | ||
253 | 186 | ||
254 | def test_page_header_on_project_page(self): | 187 | def test_page_header_on_project_page(self): |
255 | """ Check page header in project page: | 188 | """ Check page header in project page: |
@@ -272,8 +205,8 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
272 | logo_img = logo.find_element(By.TAG_NAME, 'img') | 205 | logo_img = logo.find_element(By.TAG_NAME, 'img') |
273 | self.assertTrue(logo_img.is_displayed(), | 206 | self.assertTrue(logo_img.is_displayed(), |
274 | 'Logo of Yocto project not found') | 207 | 'Logo of Yocto project not found') |
275 | self.assertTrue( | 208 | self.assertIn( |
276 | '/static/img/logo.png' in str(logo_img.get_attribute('src')), | 209 | '/static/img/logo.png', str(logo_img.get_attribute('src')), |
277 | 'Logo of Yocto project not found' | 210 | 'Logo of Yocto project not found' |
278 | ) | 211 | ) |
279 | # "Toaster"+" Information icon", clickable | 212 | # "Toaster"+" Information icon", clickable |
@@ -282,34 +215,34 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
282 | "//div[@class='toaster-navbar-brand']//a[@class='brand']", | 215 | "//div[@class='toaster-navbar-brand']//a[@class='brand']", |
283 | ) | 216 | ) |
284 | self.assertTrue(toaster.is_displayed(), 'Toaster not found') | 217 | self.assertTrue(toaster.is_displayed(), 'Toaster not found') |
285 | self.assertTrue(toaster.text == 'Toaster') | 218 | self.assertEqual(toaster.text, 'Toaster') |
286 | info_sign = self.find('.glyphicon-info-sign') | 219 | info_sign = self.find('.glyphicon-info-sign') |
287 | self.assertTrue(info_sign.is_displayed()) | 220 | self.assertTrue(info_sign.is_displayed()) |
288 | 221 | ||
289 | # "Server Icon" + "All builds" | 222 | # "Server Icon" + "All builds" |
290 | all_builds = self.find('#navbar-all-builds') | 223 | all_builds = self.find('#navbar-all-builds') |
291 | all_builds_link = all_builds.find_element(By.TAG_NAME, 'a') | 224 | all_builds_link = all_builds.find_element(By.TAG_NAME, 'a') |
292 | self.assertTrue("All builds" in all_builds_link.text) | 225 | self.assertIn("All builds", all_builds_link.text) |
293 | self.assertTrue( | 226 | self.assertIn( |
294 | '/toastergui/builds/' in str(all_builds_link.get_attribute('href')) | 227 | '/toastergui/builds/', str(all_builds_link.get_attribute('href')) |
295 | ) | 228 | ) |
296 | server_icon = all_builds.find_element(By.TAG_NAME, 'i') | 229 | server_icon = all_builds.find_element(By.TAG_NAME, 'i') |
297 | self.assertTrue( | 230 | self.assertEqual( |
298 | server_icon.get_attribute('class') == 'glyphicon glyphicon-tasks' | 231 | server_icon.get_attribute('class'), 'glyphicon glyphicon-tasks' |
299 | ) | 232 | ) |
300 | self.assertTrue(server_icon.is_displayed()) | 233 | self.assertTrue(server_icon.is_displayed()) |
301 | 234 | ||
302 | # "Directory Icon" + "All projects" | 235 | # "Directory Icon" + "All projects" |
303 | all_projects = self.find('#navbar-all-projects') | 236 | all_projects = self.find('#navbar-all-projects') |
304 | all_projects_link = all_projects.find_element(By.TAG_NAME, 'a') | 237 | all_projects_link = all_projects.find_element(By.TAG_NAME, 'a') |
305 | self.assertTrue("All projects" in all_projects_link.text) | 238 | self.assertIn("All projects", all_projects_link.text) |
306 | self.assertTrue( | 239 | self.assertIn( |
307 | '/toastergui/projects/' in str(all_projects_link.get_attribute( | 240 | '/toastergui/projects/', str(all_projects_link.get_attribute( |
308 | 'href')) | 241 | 'href')) |
309 | ) | 242 | ) |
310 | dir_icon = all_projects.find_element(By.TAG_NAME, 'i') | 243 | dir_icon = all_projects.find_element(By.TAG_NAME, 'i') |
311 | self.assertTrue( | 244 | self.assertEqual( |
312 | dir_icon.get_attribute('class') == 'icon-folder-open' | 245 | dir_icon.get_attribute('class'), 'icon-folder-open' |
313 | ) | 246 | ) |
314 | self.assertTrue(dir_icon.is_displayed()) | 247 | self.assertTrue(dir_icon.is_displayed()) |
315 | 248 | ||
@@ -317,23 +250,23 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
317 | toaster_docs_link = self.find('#navbar-docs') | 250 | toaster_docs_link = self.find('#navbar-docs') |
318 | toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME, | 251 | toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME, |
319 | 'a') | 252 | 'a') |
320 | self.assertTrue("Documentation" in toaster_docs_link_link.text) | 253 | self.assertIn("Documentation", toaster_docs_link_link.text) |
321 | self.assertTrue( | 254 | self.assertEqual( |
322 | toaster_docs_link_link.get_attribute('href') == 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual' | 255 | toaster_docs_link_link.get_attribute('href'), 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual' |
323 | ) | 256 | ) |
324 | book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i') | 257 | book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i') |
325 | self.assertTrue( | 258 | self.assertEqual( |
326 | book_icon.get_attribute('class') == 'glyphicon glyphicon-book' | 259 | book_icon.get_attribute('class'), 'glyphicon glyphicon-book' |
327 | ) | 260 | ) |
328 | self.assertTrue(book_icon.is_displayed()) | 261 | self.assertTrue(book_icon.is_displayed()) |
329 | 262 | ||
330 | # AT RIGHT -> button "New project" | 263 | # AT RIGHT -> button "New project" |
331 | new_project_button = self.find('#new-project-button') | 264 | new_project_button = self.find('#new-project-button') |
332 | self.assertTrue(new_project_button.is_displayed()) | 265 | self.assertTrue(new_project_button.is_displayed()) |
333 | self.assertTrue(new_project_button.text == 'New project') | 266 | self.assertEqual(new_project_button.text, 'New project') |
334 | new_project_button.click() | 267 | new_project_button.click() |
335 | self.assertTrue( | 268 | self.assertIn( |
336 | '/toastergui/newproject/' in str(self.driver.current_url) | 269 | '/toastergui/newproject/', str(self.driver.current_url) |
337 | ) | 270 | ) |
338 | 271 | ||
339 | def test_edit_project_name(self): | 272 | def test_edit_project_name(self): |
@@ -348,7 +281,8 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
348 | 281 | ||
349 | # click on "Edit" icon button | 282 | # click on "Edit" icon button |
350 | self.wait_until_visible('#project-name-container') | 283 | self.wait_until_visible('#project-name-container') |
351 | edit_button = self.find('#project-change-form-toggle') | 284 | finder = lambda driver: self.find('#project-change-form-toggle') |
285 | edit_button = self.wait_until_element_clickable(finder) | ||
352 | edit_button.click() | 286 | edit_button.click() |
353 | project_name_input = self.find('#project-name-change-input') | 287 | project_name_input = self.find('#project-name-change-input') |
354 | self.assertTrue(project_name_input.is_displayed()) | 288 | self.assertTrue(project_name_input.is_displayed()) |
@@ -358,8 +292,8 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
358 | 292 | ||
359 | # check project name is changed | 293 | # check project name is changed |
360 | self.wait_until_visible('#project-name-container') | 294 | self.wait_until_visible('#project-name-container') |
361 | self.assertTrue( | 295 | self.assertIn( |
362 | 'New Name' in str(self.find('#project-name-container').text) | 296 | 'New Name', str(self.find('#project-name-container').text) |
363 | ) | 297 | ) |
364 | 298 | ||
365 | def test_project_page_tabs(self): | 299 | def test_project_page_tabs(self): |
@@ -376,10 +310,10 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
376 | # check "configuration" tab | 310 | # check "configuration" tab |
377 | self.wait_until_visible('#topbar-configuration-tab') | 311 | self.wait_until_visible('#topbar-configuration-tab') |
378 | config_tab = self.find('#topbar-configuration-tab') | 312 | config_tab = self.find('#topbar-configuration-tab') |
379 | self.assertTrue(config_tab.get_attribute('class') == 'active') | 313 | self.assertEqual(config_tab.get_attribute('class'), 'active') |
380 | self.assertTrue('Configuration' in str(config_tab.text)) | 314 | self.assertIn('Configuration', str(config_tab.text)) |
381 | self.assertTrue( | 315 | self.assertIn( |
382 | f"/toastergui/project/{TestProjectPage.project_id}" in str(self.driver.current_url) | 316 | f"/toastergui/project/{TestProjectPageBase.project_id}", str(self.driver.current_url) |
383 | ) | 317 | ) |
384 | 318 | ||
385 | def get_tabs(): | 319 | def get_tabs(): |
@@ -392,9 +326,9 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
392 | def check_tab_link(tab_index, tab_name, url): | 326 | def check_tab_link(tab_index, tab_name, url): |
393 | tab = get_tabs()[tab_index] | 327 | tab = get_tabs()[tab_index] |
394 | tab_link = tab.find_element(By.TAG_NAME, 'a') | 328 | tab_link = tab.find_element(By.TAG_NAME, 'a') |
395 | self.assertTrue(url in tab_link.get_attribute('href')) | 329 | self.assertIn(url, tab_link.get_attribute('href')) |
396 | self.assertTrue(tab_name in tab_link.text) | 330 | self.assertIn(tab_name, tab_link.text) |
397 | self.assertTrue(tab.get_attribute('class') == 'active') | 331 | self.assertEqual(tab.get_attribute('class'), 'active') |
398 | 332 | ||
399 | # check "Builds" tab | 333 | # check "Builds" tab |
400 | builds_tab = get_tabs()[1] | 334 | builds_tab = get_tabs()[1] |
@@ -402,7 +336,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
402 | check_tab_link( | 336 | check_tab_link( |
403 | 1, | 337 | 1, |
404 | 'Builds', | 338 | 'Builds', |
405 | f"/toastergui/project/{TestProjectPage.project_id}/builds" | 339 | f"/toastergui/project/{TestProjectPageBase.project_id}/builds" |
406 | ) | 340 | ) |
407 | 341 | ||
408 | # check "Import layers" tab | 342 | # check "Import layers" tab |
@@ -411,7 +345,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
411 | check_tab_link( | 345 | check_tab_link( |
412 | 2, | 346 | 2, |
413 | 'Import layer', | 347 | 'Import layer', |
414 | f"/toastergui/project/{TestProjectPage.project_id}/importlayer" | 348 | f"/toastergui/project/{TestProjectPageBase.project_id}/importlayer" |
415 | ) | 349 | ) |
416 | 350 | ||
417 | # check "New custom image" tab | 351 | # check "New custom image" tab |
@@ -420,7 +354,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
420 | check_tab_link( | 354 | check_tab_link( |
421 | 3, | 355 | 3, |
422 | 'New custom image', | 356 | 'New custom image', |
423 | f"/toastergui/project/{TestProjectPage.project_id}/newcustomimage" | 357 | f"/toastergui/project/{TestProjectPageBase.project_id}/newcustomimage" |
424 | ) | 358 | ) |
425 | 359 | ||
426 | # check search box can be use to build recipes | 360 | # check search box can be use to build recipes |
@@ -428,13 +362,17 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
428 | search_box.send_keys('core-image-minimal') | 362 | search_box.send_keys('core-image-minimal') |
429 | self.find('#build-button').click() | 363 | self.find('#build-button').click() |
430 | self.wait_until_visible('#latest-builds') | 364 | self.wait_until_visible('#latest-builds') |
431 | lastest_builds = self.driver.find_elements( | 365 | buildtext = "Loading" |
432 | By.XPATH, | 366 | while "Loading" in buildtext: |
433 | '//div[@id="latest-builds"]', | 367 | time.sleep(1) |
434 | ) | 368 | lastest_builds = self.driver.find_elements( |
435 | last_build = lastest_builds[0] | 369 | By.XPATH, |
436 | self.assertTrue( | 370 | '//div[@id="latest-builds"]', |
437 | 'core-image-minimal' in str(last_build.text) | 371 | ) |
372 | last_build = lastest_builds[0] | ||
373 | buildtext = last_build.text | ||
374 | self.assertIn( | ||
375 | 'core-image-minimal', str(last_build.text) | ||
438 | ) | 376 | ) |
439 | 377 | ||
440 | def test_softwareRecipe_page(self): | 378 | def test_softwareRecipe_page(self): |
@@ -446,7 +384,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
446 | """ | 384 | """ |
447 | self._navigate_to_config_nav('softwarerecipestable', 4) | 385 | self._navigate_to_config_nav('softwarerecipestable', 4) |
448 | # check title "Compatible software recipes" is displayed | 386 | # check title "Compatible software recipes" is displayed |
449 | self.assertTrue("Compatible software recipes" in self.get_page_source()) | 387 | self.assertIn("Compatible software recipes", self.get_page_source()) |
450 | # Test search input | 388 | # Test search input |
451 | self._mixin_test_table_search_input( | 389 | self._mixin_test_table_search_input( |
452 | input_selector='search-input-softwarerecipestable', | 390 | input_selector='search-input-softwarerecipestable', |
@@ -455,12 +393,8 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
455 | table_selector='softwarerecipestable' | 393 | table_selector='softwarerecipestable' |
456 | ) | 394 | ) |
457 | # check "build recipe" button works | 395 | # check "build recipe" button works |
458 | rows = self.find_all('#softwarerecipestable tbody tr') | 396 | finder = lambda driver: self.find_all('#softwarerecipestable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]/a') |
459 | image_to_build = rows[0] | 397 | build_btn = self.wait_until_element_clickable(finder) |
460 | build_btn = image_to_build.find_element( | ||
461 | By.XPATH, | ||
462 | '//td[@class="add-del-layers"]//a[1]' | ||
463 | ) | ||
464 | build_btn.click() | 398 | build_btn.click() |
465 | build_state = wait_until_build(self, 'queued cloning starting parsing failed') | 399 | build_state = wait_until_build(self, 'queued cloning starting parsing failed') |
466 | lastest_builds = self.driver.find_elements( | 400 | lastest_builds = self.driver.find_elements( |
@@ -468,11 +402,10 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
468 | '//div[@id="latest-builds"]/div' | 402 | '//div[@id="latest-builds"]/div' |
469 | ) | 403 | ) |
470 | self.assertTrue(len(lastest_builds) > 0) | 404 | self.assertTrue(len(lastest_builds) > 0) |
471 | last_build = lastest_builds[0] | 405 | # Find the latest builds, the last build and then the cancel button |
472 | cancel_button = last_build.find_element( | 406 | |
473 | By.XPATH, | 407 | finder = lambda driver: driver.find_elements(By.XPATH, '//div[@id="latest-builds"]/div')[0].find_element(By.XPATH, '//span[@class="cancel-build-btn pull-right alert-link"]') |
474 | '//span[@class="cancel-build-btn pull-right alert-link"]', | 408 | cancel_button = self.wait_until_element_clickable(finder) |
475 | ) | ||
476 | cancel_button.click() | 409 | cancel_button.click() |
477 | if 'starting' not in build_state: # change build state when cancelled in starting state | 410 | if 'starting' not in build_state: # change build state when cancelled in starting state |
478 | wait_until_build_cancelled(self) | 411 | wait_until_build_cancelled(self) |
@@ -510,7 +443,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
510 | """ | 443 | """ |
511 | self._navigate_to_config_nav('machinestable', 5) | 444 | self._navigate_to_config_nav('machinestable', 5) |
512 | # check title "Compatible software recipes" is displayed | 445 | # check title "Compatible software recipes" is displayed |
513 | self.assertTrue("Compatible machines" in self.get_page_source()) | 446 | self.assertIn("Compatible machines", self.get_page_source()) |
514 | # Test search input | 447 | # Test search input |
515 | self._mixin_test_table_search_input( | 448 | self._mixin_test_table_search_input( |
516 | input_selector='search-input-machinestable', | 449 | input_selector='search-input-machinestable', |
@@ -519,17 +452,13 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
519 | table_selector='machinestable' | 452 | table_selector='machinestable' |
520 | ) | 453 | ) |
521 | # check "Select machine" button works | 454 | # check "Select machine" button works |
522 | rows = self.find_all('#machinestable tbody tr') | 455 | finder = lambda driver: self.find_all('#machinestable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]') |
523 | machine_to_select = rows[0] | 456 | select_btn = self.wait_until_element_clickable(finder) |
524 | select_btn = machine_to_select.find_element( | 457 | select_btn.click() |
525 | By.XPATH, | 458 | self.wait_until_visible('#project-machine-name') |
526 | '//td[@class="add-del-layers"]//a[1]' | ||
527 | ) | ||
528 | select_btn.send_keys(Keys.RETURN) | ||
529 | self.wait_until_visible('#config-nav') | ||
530 | project_machine_name = self.find('#project-machine-name') | 459 | project_machine_name = self.find('#project-machine-name') |
531 | self.assertTrue( | 460 | self.assertIn( |
532 | 'qemux86-64' in project_machine_name.text | 461 | 'qemux86-64', project_machine_name.text |
533 | ) | 462 | ) |
534 | # check "Add layer" button works | 463 | # check "Add layer" button works |
535 | self._navigate_to_config_nav('machinestable', 5) | 464 | self._navigate_to_config_nav('machinestable', 5) |
@@ -540,16 +469,23 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
540 | searchBtn_selector='search-submit-machinestable', | 469 | searchBtn_selector='search-submit-machinestable', |
541 | table_selector='machinestable' | 470 | table_selector='machinestable' |
542 | ) | 471 | ) |
543 | self.wait_until_visible('#machinestable tbody tr', poll=3) | 472 | |
544 | rows = self.find_all('#machinestable tbody tr') | 473 | self.wait_until_visible('#machinestable tbody tr') |
545 | machine_to_add = rows[0] | 474 | # Locate a machine to add button |
546 | add_btn = machine_to_add.find_element(By.XPATH, '//td[@class="add-del-layers"]') | 475 | finder = lambda driver: self.find_all('#machinestable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]') |
476 | add_btn = self.wait_until_element_clickable(finder) | ||
547 | add_btn.click() | 477 | add_btn.click() |
548 | self.wait_until_visible('#change-notification') | 478 | self.wait_until_visible('#change-notification') |
549 | change_notification = self.find('#change-notification') | 479 | change_notification = self.find('#change-notification') |
550 | self.assertTrue( | 480 | self.assertIn( |
551 | f'You have added 1 layer to your project' in str(change_notification.text) | 481 | f'You have added 1 layer to your project', str(change_notification.text) |
552 | ) | 482 | ) |
483 | |||
484 | finder = lambda driver: self.find('#hide-alert') | ||
485 | hide_button = self.wait_until_element_clickable(finder) | ||
486 | hide_button.click() | ||
487 | self.wait_until_not_visible('#change-notification') | ||
488 | |||
553 | # check Machine table feature(show/hide column, pagination) | 489 | # check Machine table feature(show/hide column, pagination) |
554 | self._navigate_to_config_nav('machinestable', 5) | 490 | self._navigate_to_config_nav('machinestable', 5) |
555 | column_list = [ | 491 | column_list = [ |
@@ -580,7 +516,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
580 | """ | 516 | """ |
581 | self._navigate_to_config_nav('layerstable', 6) | 517 | self._navigate_to_config_nav('layerstable', 6) |
582 | # check title "Compatible layers" is displayed | 518 | # check title "Compatible layers" is displayed |
583 | self.assertTrue("Compatible layers" in self.get_page_source()) | 519 | self.assertIn("Compatible layers", self.get_page_source()) |
584 | # Test search input | 520 | # Test search input |
585 | input_text='meta-tanowrt' | 521 | input_text='meta-tanowrt' |
586 | self._mixin_test_table_search_input( | 522 | self._mixin_test_table_search_input( |
@@ -590,42 +526,44 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
590 | table_selector='layerstable' | 526 | table_selector='layerstable' |
591 | ) | 527 | ) |
592 | # check "Add layer" button works | 528 | # check "Add layer" button works |
593 | self.wait_until_visible('#layerstable tbody tr', poll=3) | 529 | self.wait_until_visible('#layerstable tbody tr') |
594 | rows = self.find_all('#layerstable tbody tr') | 530 | finder = lambda driver: self.find_all('#layerstable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]/a[@data-directive="add"]') |
595 | layer_to_add = rows[0] | 531 | add_btn = self.wait_until_element_clickable(finder) |
596 | add_btn = layer_to_add.find_element( | ||
597 | By.XPATH, | ||
598 | '//td[@class="add-del-layers"]' | ||
599 | ) | ||
600 | add_btn.click() | 532 | add_btn.click() |
601 | # check modal is displayed | 533 | # check modal is displayed |
602 | self.wait_until_visible('#dependencies-modal', poll=3) | 534 | self.wait_until_visible('#dependencies-modal') |
603 | list_dependencies = self.find_all('#dependencies-list li') | 535 | list_dependencies = self.find_all('#dependencies-list li') |
604 | # click on add-layers button | 536 | # click on add-layers button |
605 | add_layers_btn = self.driver.find_element( | 537 | finder = lambda driver: self.driver.find_element(By.XPATH, '//form[@id="dependencies-modal-form"]//button[@class="btn btn-primary"]') |
606 | By.XPATH, | 538 | add_layers_btn = self.wait_until_element_clickable(finder) |
607 | '//form[@id="dependencies-modal-form"]//button[@class="btn btn-primary"]' | ||
608 | ) | ||
609 | add_layers_btn.click() | 539 | add_layers_btn.click() |
610 | self.wait_until_visible('#change-notification') | 540 | self.wait_until_visible('#change-notification') |
611 | change_notification = self.find('#change-notification') | 541 | change_notification = self.find('#change-notification') |
612 | self.assertTrue( | 542 | self.assertIn( |
613 | f'You have added {len(list_dependencies)+1} layers to your project: {input_text} and its dependencies' in str(change_notification.text) | 543 | f'You have added {len(list_dependencies)+1} layers to your project: {input_text} and its dependencies', str(change_notification.text) |
614 | ) | 544 | ) |
545 | |||
546 | finder = lambda driver: self.find('#hide-alert') | ||
547 | hide_button = self.wait_until_element_clickable(finder) | ||
548 | hide_button.click() | ||
549 | self.wait_until_not_visible('#change-notification') | ||
550 | |||
615 | # check "Remove layer" button works | 551 | # check "Remove layer" button works |
616 | self.wait_until_visible('#layerstable tbody tr', poll=3) | 552 | self.wait_until_visible('#layerstable tbody tr') |
617 | rows = self.find_all('#layerstable tbody tr') | 553 | finder = lambda driver: self.find_all('#layerstable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]/a[@data-directive="remove"]') |
618 | layer_to_remove = rows[0] | 554 | remove_btn = self.wait_until_element_clickable(finder) |
619 | remove_btn = layer_to_remove.find_element( | ||
620 | By.XPATH, | ||
621 | '//td[@class="add-del-layers"]' | ||
622 | ) | ||
623 | remove_btn.click() | 555 | remove_btn.click() |
624 | self.wait_until_visible('#change-notification', poll=2) | 556 | self.wait_until_visible('#change-notification') |
625 | change_notification = self.find('#change-notification') | 557 | change_notification = self.find('#change-notification') |
626 | self.assertTrue( | 558 | self.assertIn( |
627 | f'You have removed 1 layer from your project: {input_text}' in str(change_notification.text) | 559 | f'You have removed 1 layer from your project: {input_text}', str(change_notification.text) |
628 | ) | 560 | ) |
561 | |||
562 | finder = lambda driver: self.find('#hide-alert') | ||
563 | hide_button = self.wait_until_element_clickable(finder) | ||
564 | hide_button.click() | ||
565 | self.wait_until_not_visible('#change-notification') | ||
566 | |||
629 | # check layers table feature(show/hide column, pagination) | 567 | # check layers table feature(show/hide column, pagination) |
630 | self._navigate_to_config_nav('layerstable', 6) | 568 | self._navigate_to_config_nav('layerstable', 6) |
631 | column_list = [ | 569 | column_list = [ |
@@ -656,7 +594,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
656 | """ | 594 | """ |
657 | self._navigate_to_config_nav('distrostable', 7) | 595 | self._navigate_to_config_nav('distrostable', 7) |
658 | # check title "Compatible distros" is displayed | 596 | # check title "Compatible distros" is displayed |
659 | self.assertTrue("Compatible Distros" in self.get_page_source()) | 597 | self.assertIn("Compatible Distros", self.get_page_source()) |
660 | # Test search input | 598 | # Test search input |
661 | input_text='poky-altcfg' | 599 | input_text='poky-altcfg' |
662 | self._mixin_test_table_search_input( | 600 | self._mixin_test_table_search_input( |
@@ -666,17 +604,14 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
666 | table_selector='distrostable' | 604 | table_selector='distrostable' |
667 | ) | 605 | ) |
668 | # check "Add distro" button works | 606 | # check "Add distro" button works |
669 | rows = self.find_all('#distrostable tbody tr') | 607 | self.wait_until_visible(".add-del-layers") |
670 | distro_to_add = rows[0] | 608 | finder = lambda driver: self.find_all('#distrostable tbody tr')[0].find_element(By.XPATH, '//td[@class="add-del-layers"]') |
671 | add_btn = distro_to_add.find_element( | 609 | add_btn = self.wait_until_element_clickable(finder) |
672 | By.XPATH, | ||
673 | '//td[@class="add-del-layers"]//a[1]' | ||
674 | ) | ||
675 | add_btn.click() | 610 | add_btn.click() |
676 | self.wait_until_visible('#change-notification', poll=2) | 611 | self.wait_until_visible('#change-notification') |
677 | change_notification = self.find('#change-notification') | 612 | change_notification = self.find('#change-notification') |
678 | self.assertTrue( | 613 | self.assertIn( |
679 | f'You have changed the distro to: {input_text}' in str(change_notification.text) | 614 | f'You have changed the distro to: {input_text}', str(change_notification.text) |
680 | ) | 615 | ) |
681 | # check distro table feature(show/hide column, pagination) | 616 | # check distro table feature(show/hide column, pagination) |
682 | self._navigate_to_config_nav('distrostable', 7) | 617 | self._navigate_to_config_nav('distrostable', 7) |
@@ -699,7 +634,7 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
699 | ) | 634 | ) |
700 | 635 | ||
701 | def test_single_layer_page(self): | 636 | def test_single_layer_page(self): |
702 | """ Test layer page | 637 | """ Test layer details page using meta-poky as an example (assumes is added to start with) |
703 | - Check if title is displayed | 638 | - Check if title is displayed |
704 | - Check add/remove layer button works | 639 | - Check add/remove layer button works |
705 | - Check tabs(layers, recipes, machines) are displayed | 640 | - Check tabs(layers, recipes, machines) are displayed |
@@ -708,45 +643,62 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
708 | - Check layer summary | 643 | - Check layer summary |
709 | - Check layer description | 644 | - Check layer description |
710 | """ | 645 | """ |
711 | url = reverse("layerdetails", args=(TestProjectPage.project_id, 8)) | 646 | self._navigate_to_config_nav('layerstable', 6) |
712 | self.get(url) | 647 | layer_link = self.driver.find_element(By.XPATH, '//tr/td[@class="layer__name"]/a[contains(text(),"meta-poky")]') |
648 | layer_link.click() | ||
713 | self.wait_until_visible('.page-header') | 649 | self.wait_until_visible('.page-header') |
714 | # check title is displayed | 650 | # check title is displayed |
715 | self.assertTrue(self.find('.page-header h1').is_displayed()) | 651 | self.assertTrue(self.find('.page-header h1').is_displayed()) |
716 | 652 | ||
717 | # check add layer button works | 653 | # check remove layer button works |
718 | remove_layer_btn = self.find('#add-remove-layer-btn') | 654 | finder = lambda driver: self.find('#add-remove-layer-btn') |
655 | remove_layer_btn = self.wait_until_element_clickable(finder) | ||
719 | remove_layer_btn.click() | 656 | remove_layer_btn.click() |
720 | self.wait_until_visible('#change-notification', poll=2) | 657 | self.wait_until_visible('#change-notification') |
721 | change_notification = self.find('#change-notification') | 658 | change_notification = self.find('#change-notification') |
722 | self.assertTrue( | 659 | self.assertIn( |
723 | f'You have removed 1 layer from your project' in str(change_notification.text) | 660 | f'You have removed 1 layer from your project', str(change_notification.text) |
724 | ) | 661 | ) |
725 | # check add layer button works, 18 is the random layer id | 662 | finder = lambda driver: self.find('#hide-alert') |
726 | add_layer_btn = self.find('#add-remove-layer-btn') | 663 | hide_button = self.wait_until_element_clickable(finder) |
664 | hide_button.click() | ||
665 | # check add layer button works | ||
666 | self.wait_until_not_visible('#change-notification') | ||
667 | finder = lambda driver: self.find('#add-remove-layer-btn') | ||
668 | add_layer_btn = self.wait_until_element_clickable(finder) | ||
727 | add_layer_btn.click() | 669 | add_layer_btn.click() |
728 | self.wait_until_visible('#change-notification') | 670 | self.wait_until_visible('#change-notification') |
729 | change_notification = self.find('#change-notification') | 671 | change_notification = self.find('#change-notification') |
730 | self.assertTrue( | 672 | self.assertIn( |
731 | f'You have added 1 layer to your project' in str(change_notification.text) | 673 | f'You have added 1 layer to your project', str(change_notification.text) |
732 | ) | 674 | ) |
675 | finder = lambda driver: self.find('#hide-alert') | ||
676 | hide_button = self.wait_until_element_clickable(finder) | ||
677 | hide_button.click() | ||
678 | self.wait_until_not_visible('#change-notification') | ||
733 | # check tabs(layers, recipes, machines) are displayed | 679 | # check tabs(layers, recipes, machines) are displayed |
734 | tabs = self.find_all('.nav-tabs li') | 680 | tabs = self.find_all('.nav-tabs li') |
735 | self.assertEqual(len(tabs), 3) | 681 | self.assertEqual(len(tabs), 3) |
736 | # Check first tab | 682 | # Check first tab |
737 | tabs[0].click() | 683 | tabs[0].click() |
738 | self.assertTrue( | 684 | self.assertIn( |
739 | 'active' in str(self.find('#information').get_attribute('class')) | 685 | 'active', str(self.find('#information').get_attribute('class')) |
740 | ) | 686 | ) |
741 | # Check second tab | 687 | # Check second tab (recipes) |
688 | self.wait_until_visible('.nav-tabs') | ||
689 | # Ensure page is scrolled to the top | ||
690 | self.driver.execute_script('window.scrollTo({behavior: "instant", top: 0, left: 0})') | ||
742 | tabs[1].click() | 691 | tabs[1].click() |
743 | self.assertTrue( | 692 | self.assertIn( |
744 | 'active' in str(self.find('#recipes').get_attribute('class')) | 693 | 'active', str(self.find('#recipes').get_attribute('class')) |
745 | ) | 694 | ) |
746 | # Check third tab | 695 | # Check third tab (machines) |
696 | self.wait_until_visible('.nav-tabs') | ||
697 | # Ensure page is scrolled to the top | ||
698 | self.driver.execute_script('window.scrollTo({behavior: "instant", top: 0, left: 0})') | ||
747 | tabs[2].click() | 699 | tabs[2].click() |
748 | self.assertTrue( | 700 | self.assertIn( |
749 | 'active' in str(self.find('#machines').get_attribute('class')) | 701 | 'active', str(self.find('#machines').get_attribute('class')) |
750 | ) | 702 | ) |
751 | # Check left section is displayed | 703 | # Check left section is displayed |
752 | section = self.find('.well') | 704 | section = self.find('.well') |
@@ -755,9 +707,13 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
755 | section.find_element(By.XPATH, '//h2[1]').is_displayed() | 707 | section.find_element(By.XPATH, '//h2[1]').is_displayed() |
756 | ) | 708 | ) |
757 | # Check layer summary | 709 | # Check layer summary |
758 | self.assertTrue("Summary" in section.text) | 710 | self.assertIn("Summary", section.text) |
759 | # Check layer description | 711 | # Check layer description |
760 | self.assertTrue("Description" in section.text) | 712 | self.assertIn("Description", section.text) |
713 | |||
714 | @pytest.mark.django_db | ||
715 | @pytest.mark.order("last") | ||
716 | class TestProjectPageRecipes(TestProjectPageBase): | ||
761 | 717 | ||
762 | def test_single_recipe_page(self): | 718 | def test_single_recipe_page(self): |
763 | """ Test recipe page | 719 | """ Test recipe page |
@@ -767,7 +723,12 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
767 | - Check recipe: name, summary, description, Version, Section, | 723 | - Check recipe: name, summary, description, Version, Section, |
768 | License, Approx. packages included, Approx. size, Recipe file | 724 | License, Approx. packages included, Approx. size, Recipe file |
769 | """ | 725 | """ |
770 | url = reverse("recipedetails", args=(TestProjectPage.project_id, 53428)) | 726 | # Use a recipe which is likely to exist in the layer index but not enabled |
727 | # in poky out the box - xen-image-minimal from meta-virtualization | ||
728 | self._navigate_to_project_page() | ||
729 | prj = Project.objects.get(pk=TestProjectPageBase.project_id) | ||
730 | recipe_id = prj.get_all_compatible_recipes().get(name="xen-image-minimal").pk | ||
731 | url = reverse("recipedetails", args=(TestProjectPageBase.project_id, recipe_id)) | ||
771 | self.get(url) | 732 | self.get(url) |
772 | self.wait_until_visible('.page-header') | 733 | self.wait_until_visible('.page-header') |
773 | # check title is displayed | 734 | # check title is displayed |
@@ -782,11 +743,33 @@ class TestProjectPage(SeleniumFunctionalTestCase): | |||
782 | section.find_element(By.XPATH, '//h2[1]').is_displayed() | 743 | section.find_element(By.XPATH, '//h2[1]').is_displayed() |
783 | ) | 744 | ) |
784 | # Check recipe sections details info are displayed | 745 | # Check recipe sections details info are displayed |
785 | self.assertTrue("Summary" in section.text) | 746 | self.assertIn("Summary", section.text) |
786 | self.assertTrue("Description" in section.text) | 747 | self.assertIn("Description", section.text) |
787 | self.assertTrue("Version" in section.text) | 748 | self.assertIn("Version", section.text) |
788 | self.assertTrue("Section" in section.text) | 749 | self.assertIn("Section", section.text) |
789 | self.assertTrue("License" in section.text) | 750 | self.assertIn("License", section.text) |
790 | self.assertTrue("Approx. packages included" in section.text) | 751 | self.assertIn("Approx. packages included", section.text) |
791 | self.assertTrue("Approx. package size" in section.text) | 752 | self.assertIn("Approx. package size", section.text) |
792 | self.assertTrue("Recipe file" in section.text) | 753 | self.assertIn("Recipe file", section.text) |
754 | |||
755 | def test_image_recipe_editColumn(self): | ||
756 | """ Test the edit column feature in image recipe table on project page """ | ||
757 | self._get_create_builds(success=10, failure=10) | ||
758 | |||
759 | url = reverse('projectimagerecipes', args=(TestProjectPageBase.project_id,)) | ||
760 | self.get(url) | ||
761 | self.wait_until_present('#imagerecipestable tbody tr') | ||
762 | |||
763 | column_list = [ | ||
764 | 'get_description_or_summary', 'layer_version__get_vcs_reference', | ||
765 | 'layer_version__layer__name', 'license', 'recipe-file', 'section', | ||
766 | 'version' | ||
767 | ] | ||
768 | |||
769 | # Check that we can hide the edit column | ||
770 | self._mixin_test_table_edit_column( | ||
771 | 'imagerecipestable', | ||
772 | 'edit-columns-button', | ||
773 | [f'checkbox-{column}' for column in column_list] | ||
774 | ) | ||
775 | |||
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py index eb905ddf3f..80c53e1544 100644 --- a/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py +++ b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py | |||
@@ -7,72 +7,27 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | import string | 9 | import string |
10 | import random | 10 | import time |
11 | import pytest | 11 | import pytest |
12 | from django.urls import reverse | 12 | from django.urls import reverse |
13 | from selenium.webdriver import Keys | 13 | from selenium.webdriver import Keys |
14 | from selenium.webdriver.support.select import Select | 14 | from selenium.webdriver.support.select import Select |
15 | from selenium.common.exceptions import ElementClickInterceptedException, NoSuchElementException, TimeoutException | 15 | from selenium.common.exceptions import ElementClickInterceptedException, NoSuchElementException, TimeoutException |
16 | from orm.models import Project | ||
17 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase | 16 | from tests.functional.functional_helpers import SeleniumFunctionalTestCase |
18 | from selenium.webdriver.common.by import By | 17 | from selenium.webdriver.common.by import By |
19 | 18 | ||
20 | from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled | 19 | from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled |
21 | 20 | ||
22 | 21 | class TestProjectConfigTabBase(SeleniumFunctionalTestCase): | |
23 | @pytest.mark.django_db | ||
24 | @pytest.mark.order("last") | ||
25 | class TestProjectConfigTab(SeleniumFunctionalTestCase): | ||
26 | PROJECT_NAME = 'TestProjectConfigTab' | 22 | PROJECT_NAME = 'TestProjectConfigTab' |
27 | project_id = None | 23 | project_id = None |
28 | 24 | ||
29 | def _create_project(self, project_name, **kwargs): | ||
30 | """ Create/Test new project using: | ||
31 | - Project Name: Any string | ||
32 | - Release: Any string | ||
33 | - Merge Toaster settings: True or False | ||
34 | """ | ||
35 | release = kwargs.get('release', '3') | ||
36 | self.get(reverse('newproject')) | ||
37 | self.wait_until_visible('#new-project-name') | ||
38 | self.find("#new-project-name").send_keys(project_name) | ||
39 | select = Select(self.find("#projectversion")) | ||
40 | select.select_by_value(release) | ||
41 | |||
42 | # check merge toaster settings | ||
43 | checkbox = self.find('.checkbox-mergeattr') | ||
44 | if not checkbox.is_selected(): | ||
45 | checkbox.click() | ||
46 | |||
47 | if self.PROJECT_NAME != 'TestProjectConfigTab': | ||
48 | # Reset project name if it's not the default one | ||
49 | self.PROJECT_NAME = 'TestProjectConfigTab' | ||
50 | |||
51 | self.find("#create-project-button").click() | ||
52 | |||
53 | try: | ||
54 | self.wait_until_visible('#hint-error-project-name', poll=3) | ||
55 | url = reverse('project', args=(TestProjectConfigTab.project_id, )) | ||
56 | self.get(url) | ||
57 | self.wait_until_visible('#config-nav', poll=3) | ||
58 | except TimeoutException: | ||
59 | self.wait_until_visible('#config-nav', poll=3) | ||
60 | |||
61 | def _random_string(self, length): | ||
62 | return ''.join( | ||
63 | random.choice(string.ascii_letters) for _ in range(length) | ||
64 | ) | ||
65 | |||
66 | def _navigate_to_project_page(self): | 25 | def _navigate_to_project_page(self): |
67 | # Navigate to project page | 26 | # Navigate to project page |
68 | if TestProjectConfigTab.project_id is None: | 27 | if TestProjectConfigTabBase.project_id is None: |
69 | self._create_project(project_name=self._random_string(10)) | 28 | TestProjectConfigTabBase.project_id = self.create_new_project(self.PROJECT_NAME, '3', None, True) |
70 | current_url = self.driver.current_url | 29 | url = reverse('project', args=(TestProjectConfigTabBase.project_id,)) |
71 | TestProjectConfigTab.project_id = get_projectId_from_url( | 30 | self.get(url) |
72 | current_url) | ||
73 | else: | ||
74 | url = reverse('project', args=(TestProjectConfigTab.project_id,)) | ||
75 | self.get(url) | ||
76 | self.wait_until_visible('#config-nav') | 31 | self.wait_until_visible('#config-nav') |
77 | 32 | ||
78 | def _create_builds(self): | 33 | def _create_builds(self): |
@@ -88,8 +43,8 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
88 | '//div[@id="latest-builds"]/div', | 43 | '//div[@id="latest-builds"]/div', |
89 | ) | 44 | ) |
90 | last_build = lastest_builds[0] | 45 | last_build = lastest_builds[0] |
91 | self.assertTrue( | 46 | self.assertIn( |
92 | 'foo' in str(last_build.text) | 47 | 'foo', str(last_build.text) |
93 | ) | 48 | ) |
94 | last_build = lastest_builds[0] | 49 | last_build = lastest_builds[0] |
95 | try: | 50 | try: |
@@ -114,6 +69,8 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
114 | config_nav = self.find('#config-nav') | 69 | config_nav = self.find('#config-nav') |
115 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] | 70 | return config_nav.find_elements(By.TAG_NAME, 'li')[index] |
116 | 71 | ||
72 | class TestProjectConfigTab(TestProjectConfigTabBase): | ||
73 | |||
117 | def test_project_config_nav(self): | 74 | def test_project_config_nav(self): |
118 | """ Test project config tab navigation: | 75 | """ Test project config tab navigation: |
119 | - Check if the menu is displayed and contains the right elements: | 76 | - Check if the menu is displayed and contains the right elements: |
@@ -138,48 +95,48 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
138 | 95 | ||
139 | def check_config_nav_item(index, item_name, url): | 96 | def check_config_nav_item(index, item_name, url): |
140 | item = _get_config_nav_item(index) | 97 | item = _get_config_nav_item(index) |
141 | self.assertTrue(item_name in item.text) | 98 | self.assertIn(item_name, item.text) |
142 | self.assertTrue(item.get_attribute('class') == 'active') | 99 | self.assertEqual(item.get_attribute('class'), 'active') |
143 | self.assertTrue(url in self.driver.current_url) | 100 | self.assertIn(url, self.driver.current_url) |
144 | 101 | ||
145 | # check if the menu contains the right elements | 102 | # check if the menu contains the right elements |
146 | # COMPATIBLE METADATA | 103 | # COMPATIBLE METADATA |
147 | compatible_metadata = _get_config_nav_item(1) | 104 | compatible_metadata = _get_config_nav_item(1) |
148 | self.assertTrue( | 105 | self.assertIn( |
149 | "compatible metadata" in compatible_metadata.text.lower() | 106 | "compatible metadata", compatible_metadata.text.lower() |
150 | ) | 107 | ) |
151 | # EXTRA CONFIGURATION | 108 | # EXTRA CONFIGURATION |
152 | extra_configuration = _get_config_nav_item(8) | 109 | extra_configuration = _get_config_nav_item(8) |
153 | self.assertTrue( | 110 | self.assertIn( |
154 | "extra configuration" in extra_configuration.text.lower() | 111 | "extra configuration", extra_configuration.text.lower() |
155 | ) | 112 | ) |
156 | # Actions | 113 | # Actions |
157 | actions = _get_config_nav_item(10) | 114 | actions = _get_config_nav_item(10) |
158 | self.assertTrue("actions" in str(actions.text).lower()) | 115 | self.assertIn("actions", str(actions.text).lower()) |
159 | 116 | ||
160 | conf_nav_list = [ | 117 | conf_nav_list = [ |
161 | # config | 118 | # config |
162 | [0, 'Configuration', | 119 | [0, 'Configuration', |
163 | f"/toastergui/project/{TestProjectConfigTab.project_id}"], | 120 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}"], |
164 | # custom images | 121 | # custom images |
165 | [2, 'Custom images', | 122 | [2, 'Custom images', |
166 | f"/toastergui/project/{TestProjectConfigTab.project_id}/customimages"], | 123 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}/customimages"], |
167 | # image recipes | 124 | # image recipes |
168 | [3, 'Image recipes', | 125 | [3, 'Image recipes', |
169 | f"/toastergui/project/{TestProjectConfigTab.project_id}/images"], | 126 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}/images"], |
170 | # software recipes | 127 | # software recipes |
171 | [4, 'Software recipes', | 128 | [4, 'Software recipes', |
172 | f"/toastergui/project/{TestProjectConfigTab.project_id}/softwarerecipes"], | 129 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}/softwarerecipes"], |
173 | # machines | 130 | # machines |
174 | [5, 'Machines', | 131 | [5, 'Machines', |
175 | f"/toastergui/project/{TestProjectConfigTab.project_id}/machines"], | 132 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}/machines"], |
176 | # layers | 133 | # layers |
177 | [6, 'Layers', | 134 | [6, 'Layers', |
178 | f"/toastergui/project/{TestProjectConfigTab.project_id}/layers"], | 135 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}/layers"], |
179 | # distro | 136 | # distro |
180 | [7, 'Distros', | 137 | [7, 'Distros', |
181 | f"/toastergui/project/{TestProjectConfigTab.project_id}/distros"], | 138 | f"/toastergui/project/{TestProjectConfigTabBase.project_id}/distros"], |
182 | # [9, 'BitBake variables', f"/toastergui/project/{TestProjectConfigTab.project_id}/configuration"], # bitbake variables | 139 | # [9, 'BitBake variables', f"/toastergui/project/{TestProjectConfigTabBase.project_id}/configuration"], # bitbake variables |
183 | ] | 140 | ] |
184 | for index, item_name, url in conf_nav_list: | 141 | for index, item_name, url in conf_nav_list: |
185 | item = _get_config_nav_item(index) | 142 | item = _get_config_nav_item(index) |
@@ -253,7 +210,7 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
253 | def test_show_rows(row_to_show, show_row_link): | 210 | def test_show_rows(row_to_show, show_row_link): |
254 | # Check that we can show rows == row_to_show | 211 | # Check that we can show rows == row_to_show |
255 | show_row_link.select_by_value(str(row_to_show)) | 212 | show_row_link.select_by_value(str(row_to_show)) |
256 | self.wait_until_visible('#imagerecipestable tbody tr', poll=3) | 213 | self.wait_until_visible('#imagerecipestable tbody tr') |
257 | # check at least some rows are visible | 214 | # check at least some rows are visible |
258 | self.assertTrue( | 215 | self.assertTrue( |
259 | len(self.find_all('#imagerecipestable tbody tr')) > 0 | 216 | len(self.find_all('#imagerecipestable tbody tr')) > 0 |
@@ -299,9 +256,11 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
299 | - meta-poky | 256 | - meta-poky |
300 | - meta-yocto-bsp | 257 | - meta-yocto-bsp |
301 | """ | 258 | """ |
302 | # Create a new project for this test | 259 | project_id = self.create_new_project(self.PROJECT_NAME + "-ST", '3', None, True) |
303 | project_name = self._random_string(10) | 260 | url = reverse('project', args=(project_id,)) |
304 | self._create_project(project_name=project_name) | 261 | self.get(url) |
262 | self.wait_until_visible('#config-nav') | ||
263 | |||
305 | # check if the menu is displayed | 264 | # check if the menu is displayed |
306 | self.wait_until_visible('#project-page') | 265 | self.wait_until_visible('#project-page') |
307 | block_l = self.driver.find_element( | 266 | block_l = self.driver.find_element( |
@@ -313,7 +272,7 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
313 | def check_machine_distro(self, item_name, new_item_name, block_id): | 272 | def check_machine_distro(self, item_name, new_item_name, block_id): |
314 | block = self.find(f'#{block_id}') | 273 | block = self.find(f'#{block_id}') |
315 | title = block.find_element(By.TAG_NAME, 'h3') | 274 | title = block.find_element(By.TAG_NAME, 'h3') |
316 | self.assertTrue(item_name.capitalize() in title.text) | 275 | self.assertIn(item_name.capitalize(), title.text) |
317 | edit_btn = self.find(f'#change-{item_name}-toggle') | 276 | edit_btn = self.find(f'#change-{item_name}-toggle') |
318 | edit_btn.click() | 277 | edit_btn.click() |
319 | self.wait_until_visible(f'#{item_name}-change-input') | 278 | self.wait_until_visible(f'#{item_name}-change-input') |
@@ -324,12 +283,15 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
324 | change_btn.click() | 283 | change_btn.click() |
325 | self.wait_until_visible(f'#project-{item_name}-name') | 284 | self.wait_until_visible(f'#project-{item_name}-name') |
326 | project_name = self.find(f'#project-{item_name}-name') | 285 | project_name = self.find(f'#project-{item_name}-name') |
327 | self.assertTrue(new_item_name in project_name.text) | 286 | self.assertIn(new_item_name, project_name.text) |
328 | # check change notificaiton is displayed | 287 | # check change notificaiton is displayed |
329 | change_notification = self.find('#change-notification') | 288 | change_notification = self.find('#change-notification') |
330 | self.assertTrue( | 289 | self.assertIn( |
331 | f'You have changed the {item_name} to: {new_item_name}' in change_notification.text | 290 | f'You have changed the {item_name} to: {new_item_name}', change_notification.text |
332 | ) | 291 | ) |
292 | hide_button = self.find('#hide-alert') | ||
293 | hide_button.click() | ||
294 | self.wait_until_not_visible('#change-notification') | ||
333 | 295 | ||
334 | # Machine | 296 | # Machine |
335 | check_machine_distro(self, 'machine', 'qemux86-64', 'machine-section') | 297 | check_machine_distro(self, 'machine', 'qemux86-64', 'machine-section') |
@@ -338,97 +300,51 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
338 | 300 | ||
339 | # Project release | 301 | # Project release |
340 | title = project_release.find_element(By.TAG_NAME, 'h3') | 302 | title = project_release.find_element(By.TAG_NAME, 'h3') |
341 | self.assertTrue("Project release" in title.text) | 303 | self.assertIn("Project release", title.text) |
342 | self.assertTrue( | 304 | self.assertIn( |
343 | "Yocto Project master" in self.find('#project-release-title').text | 305 | "Yocto Project master", self.find('#project-release-title').text |
344 | ) | 306 | ) |
345 | # Layers | 307 | # Layers |
346 | title = layers.find_element(By.TAG_NAME, 'h3') | 308 | title = layers.find_element(By.TAG_NAME, 'h3') |
347 | self.assertTrue("Layers" in title.text) | 309 | self.assertIn("Layers", title.text) |
310 | self.wait_until_clickable('#layer-add-input') | ||
348 | # check at least three layers are displayed | 311 | # check at least three layers are displayed |
349 | # openembedded-core | 312 | # openembedded-core |
350 | # meta-poky | 313 | # meta-poky |
351 | # meta-yocto-bsp | 314 | # meta-yocto-bsp |
352 | layers_list = layers.find_element(By.ID, 'layers-in-project-list') | 315 | layer_list_items = [] |
353 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | 316 | starttime = time.time() |
317 | while len(layer_list_items) < 3: | ||
318 | layers_list = self.driver.find_element(By.ID, 'layers-in-project-list') | ||
319 | layer_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | ||
320 | if time.time() > (starttime + 30): | ||
321 | self.fail("Layer list didn't contain at least 3 items within 30s (contained %d)" % len(layer_list_items)) | ||
322 | |||
354 | # remove all layers except the first three layers | 323 | # remove all layers except the first three layers |
355 | for i in range(3, len(layers_list_items)): | 324 | for i in range(3, len(layer_list_items)): |
356 | layers_list_items[i].find_element(By.TAG_NAME, 'span').click() | 325 | layer_list_items[i].find_element(By.TAG_NAME, 'span').click() |
326 | |||
357 | # check can add a layer if exists | 327 | # check can add a layer if exists |
358 | add_layer_input = layers.find_element(By.ID, 'layer-add-input') | 328 | add_layer_input = layers.find_element(By.ID, 'layer-add-input') |
359 | add_layer_input.send_keys('meta-oe') | 329 | add_layer_input.send_keys('meta-oe') |
360 | self.wait_until_visible('#layer-container > form > div > span > div') | 330 | self.wait_until_visible('#layer-container > form > div > span > div') |
361 | dropdown_item = self.driver.find_element( | 331 | self.wait_until_visible('.dropdown-menu') |
362 | By.XPATH, | 332 | finder = lambda driver: driver.find_element(By.XPATH, '//*[@id="layer-container"]/form/div/span/div/div/div') |
363 | '//*[@id="layer-container"]/form/div/span/div' | 333 | dropdown_item = self.wait_until_element_clickable(finder) |
364 | ) | 334 | dropdown_item.click() |
365 | try: | 335 | self.wait_until_clickable('#add-layer-btn') |
366 | dropdown_item.click() | ||
367 | except ElementClickInterceptedException: | ||
368 | self.skipTest( | ||
369 | "layer-container dropdown item click intercepted. Element not properly visible.") | ||
370 | add_layer_btn = layers.find_element(By.ID, 'add-layer-btn') | 336 | add_layer_btn = layers.find_element(By.ID, 'add-layer-btn') |
371 | add_layer_btn.click() | 337 | add_layer_btn.click() |
372 | self.wait_until_visible('#layers-in-project-list') | 338 | self.wait_until_visible('#layers-in-project-list') |
373 | # check layer is added | ||
374 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | ||
375 | self.assertTrue(len(layers_list_items) == 4) | ||
376 | 339 | ||
377 | def test_most_build_recipes(self): | 340 | # check layer is added |
378 | """ Test most build recipes block contains""" | 341 | layer_list_items = [] |
379 | def rebuild_from_most_build_recipes(recipe_list_items): | 342 | starttime = time.time() |
380 | checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input') | 343 | while len(layer_list_items) < 4: |
381 | checkbox.click() | 344 | layers_list = self.driver.find_element(By.ID, 'layers-in-project-list') |
382 | build_btn = self.find('#freq-build-btn') | 345 | layer_list_items = layers_list.find_elements(By.TAG_NAME, 'li') |
383 | build_btn.click() | 346 | if time.time() > (starttime + 30): |
384 | self.wait_until_visible('#latest-builds') | 347 | self.fail("Layer list didn't contain at least 4 items within 30s (contained %d)" % len(layer_list_items)) |
385 | wait_until_build(self, 'queued cloning starting parsing failed') | ||
386 | lastest_builds = self.driver.find_elements( | ||
387 | By.XPATH, | ||
388 | '//div[@id="latest-builds"]/div' | ||
389 | ) | ||
390 | self.assertTrue(len(lastest_builds) >= 2) | ||
391 | last_build = lastest_builds[0] | ||
392 | try: | ||
393 | cancel_button = last_build.find_element( | ||
394 | By.XPATH, | ||
395 | '//span[@class="cancel-build-btn pull-right alert-link"]', | ||
396 | ) | ||
397 | cancel_button.click() | ||
398 | except NoSuchElementException: | ||
399 | # Skip if the build is already cancelled | ||
400 | pass | ||
401 | wait_until_build_cancelled(self) | ||
402 | # Create a new project for remaining asserts | ||
403 | project_name = self._random_string(10) | ||
404 | self._create_project(project_name=project_name, release='2') | ||
405 | current_url = self.driver.current_url | ||
406 | TestProjectConfigTab.project_id = get_projectId_from_url(current_url) | ||
407 | url = current_url.split('?')[0] | ||
408 | |||
409 | # Create a new builds | ||
410 | self._create_builds() | ||
411 | |||
412 | # back to project page | ||
413 | self.driver.get(url) | ||
414 | |||
415 | self.wait_until_visible('#project-page', poll=3) | ||
416 | |||
417 | # Most built recipes | ||
418 | most_built_recipes = self.driver.find_element( | ||
419 | By.XPATH, '//*[@id="project-page"]/div[1]/div[3]') | ||
420 | title = most_built_recipes.find_element(By.TAG_NAME, 'h3') | ||
421 | self.assertTrue("Most built recipes" in title.text) | ||
422 | # check can select a recipe and build it | ||
423 | self.wait_until_visible('#freq-build-list', poll=3) | ||
424 | recipe_list = self.find('#freq-build-list') | ||
425 | recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li') | ||
426 | self.assertTrue( | ||
427 | len(recipe_list_items) > 0, | ||
428 | msg="Any recipes found in the most built recipes list", | ||
429 | ) | ||
430 | rebuild_from_most_build_recipes(recipe_list_items) | ||
431 | TestProjectConfigTab.project_id = None # reset project id | ||
432 | 348 | ||
433 | def test_project_page_tab_importlayer(self): | 349 | def test_project_page_tab_importlayer(self): |
434 | """ Test project page tab import layer """ | 350 | """ Test project page tab import layer """ |
@@ -466,42 +382,42 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
466 | layers = block_l.find_element(By.ID, 'layer-container') | 382 | layers = block_l.find_element(By.ID, 'layer-container') |
467 | layers_list = layers.find_element(By.ID, 'layers-in-project-list') | 383 | layers_list = layers.find_element(By.ID, 'layers-in-project-list') |
468 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') | 384 | layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li') |
469 | self.assertTrue( | 385 | self.assertIn( |
470 | 'meta-fake' in str(layers_list_items[-1].text) | 386 | 'meta-fake', str(layers_list_items[-1].text) |
471 | ) | 387 | ) |
472 | 388 | ||
473 | def test_project_page_custom_image_no_image(self): | 389 | def test_project_page_custom_image_no_image(self): |
474 | """ Test project page tab "New custom image" when no custom image """ | 390 | """ Test project page tab "New custom image" when no custom image """ |
475 | project_name = self._random_string(10) | 391 | project_id = self.create_new_project(self.PROJECT_NAME + "-CustomImage", '3', None, True) |
476 | self._create_project(project_name=project_name) | 392 | url = reverse('project', args=(project_id,)) |
477 | current_url = self.driver.current_url | 393 | self.get(url) |
478 | TestProjectConfigTab.project_id = get_projectId_from_url(current_url) | 394 | self.wait_until_visible('#config-nav') |
395 | |||
479 | # navigate to "Custom image" tab | 396 | # navigate to "Custom image" tab |
480 | custom_image_section = self._get_config_nav_item(2) | 397 | custom_image_section = self._get_config_nav_item(2) |
481 | custom_image_section.click() | 398 | custom_image_section.click() |
482 | self.wait_until_visible('#empty-state-customimagestable') | 399 | self.wait_until_visible('#empty-state-customimagestable') |
483 | 400 | ||
484 | # Check message when no custom image | 401 | # Check message when no custom image |
485 | self.assertTrue( | 402 | self.assertIn( |
486 | "You have not created any custom images yet." in str( | 403 | "You have not created any custom images yet.", str( |
487 | self.find('#empty-state-customimagestable').text | 404 | self.find('#empty-state-customimagestable').text |
488 | ) | 405 | ) |
489 | ) | 406 | ) |
490 | div_empty_msg = self.find('#empty-state-customimagestable') | 407 | div_empty_msg = self.find('#empty-state-customimagestable') |
491 | link_create_custom_image = div_empty_msg.find_element( | 408 | link_create_custom_image = div_empty_msg.find_element( |
492 | By.TAG_NAME, 'a') | 409 | By.TAG_NAME, 'a') |
493 | self.assertTrue(TestProjectConfigTab.project_id is not None) | 410 | self.assertTrue(TestProjectConfigTabBase.project_id is not None) |
494 | self.assertTrue( | 411 | self.assertIn( |
495 | f"/toastergui/project/{TestProjectConfigTab.project_id}/newcustomimage" in str( | 412 | f"/toastergui/project/{project_id}/newcustomimage", str( |
496 | link_create_custom_image.get_attribute('href') | 413 | link_create_custom_image.get_attribute('href') |
497 | ) | 414 | ) |
498 | ) | 415 | ) |
499 | self.assertTrue( | 416 | self.assertIn( |
500 | "Create your first custom image" in str( | 417 | "Create your first custom image", str( |
501 | link_create_custom_image.text | 418 | link_create_custom_image.text |
502 | ) | 419 | ) |
503 | ) | 420 | ) |
504 | TestProjectConfigTab.project_id = None # reset project id | ||
505 | 421 | ||
506 | def test_project_page_image_recipe(self): | 422 | def test_project_page_image_recipe(self): |
507 | """ Test project page section images | 423 | """ Test project page section images |
@@ -526,3 +442,66 @@ class TestProjectConfigTab(SeleniumFunctionalTestCase): | |||
526 | self.wait_until_visible('#imagerecipestable tbody tr') | 442 | self.wait_until_visible('#imagerecipestable tbody tr') |
527 | rows = self.find_all('#imagerecipestable tbody tr') | 443 | rows = self.find_all('#imagerecipestable tbody tr') |
528 | self.assertTrue(len(rows) > 0) | 444 | self.assertTrue(len(rows) > 0) |
445 | |||
446 | @pytest.mark.django_db | ||
447 | @pytest.mark.order("last") | ||
448 | class TestProjectConfigTabDB(TestProjectConfigTabBase): | ||
449 | |||
450 | def test_most_build_recipes(self): | ||
451 | """ Test most build recipes block contains""" | ||
452 | def rebuild_from_most_build_recipes(recipe_list_items): | ||
453 | checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input') | ||
454 | checkbox.click() | ||
455 | build_btn = self.find('#freq-build-btn') | ||
456 | build_btn.click() | ||
457 | self.wait_until_visible('#latest-builds') | ||
458 | wait_until_build(self, 'queued cloning starting parsing failed') | ||
459 | lastest_builds = self.driver.find_elements( | ||
460 | By.XPATH, | ||
461 | '//div[@id="latest-builds"]/div' | ||
462 | ) | ||
463 | self.assertTrue(len(lastest_builds) >= 2) | ||
464 | last_build = lastest_builds[0] | ||
465 | try: | ||
466 | cancel_button = last_build.find_element( | ||
467 | By.XPATH, | ||
468 | '//span[@class="cancel-build-btn pull-right alert-link"]', | ||
469 | ) | ||
470 | cancel_button.click() | ||
471 | except NoSuchElementException: | ||
472 | # Skip if the build is already cancelled | ||
473 | pass | ||
474 | wait_until_build_cancelled(self) | ||
475 | |||
476 | # Create a new project for remaining asserts | ||
477 | project_id = self.create_new_project(self.PROJECT_NAME + "-MostBuilt", '2', None, True) | ||
478 | url = reverse('project', args=(project_id,)) | ||
479 | self.get(url) | ||
480 | self.wait_until_visible('#config-nav') | ||
481 | |||
482 | current_url = self.driver.current_url | ||
483 | url = current_url.split('?')[0] | ||
484 | |||
485 | # Create a new builds | ||
486 | self._create_builds() | ||
487 | |||
488 | # back to project page | ||
489 | self.driver.get(url) | ||
490 | |||
491 | self.wait_until_visible('#project-page') | ||
492 | |||
493 | # Most built recipes | ||
494 | most_built_recipes = self.driver.find_element( | ||
495 | By.XPATH, '//*[@id="project-page"]/div[1]/div[3]') | ||
496 | title = most_built_recipes.find_element(By.TAG_NAME, 'h3') | ||
497 | self.assertIn("Most built recipes", title.text) | ||
498 | # check can select a recipe and build it | ||
499 | self.wait_until_visible('#freq-build-list') | ||
500 | recipe_list = self.find('#freq-build-list') | ||
501 | recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li') | ||
502 | self.assertTrue( | ||
503 | len(recipe_list_items) > 0, | ||
504 | msg="No recipes found in the most built recipes list", | ||
505 | ) | ||
506 | rebuild_from_most_build_recipes(recipe_list_items) | ||
507 | |||
diff --git a/bitbake/lib/toaster/tests/functional/utils.py b/bitbake/lib/toaster/tests/functional/utils.py index 7269fa1805..72345aef9f 100644 --- a/bitbake/lib/toaster/tests/functional/utils.py +++ b/bitbake/lib/toaster/tests/functional/utils.py | |||
@@ -8,7 +8,7 @@ | |||
8 | 8 | ||
9 | 9 | ||
10 | from time import sleep | 10 | from time import sleep |
11 | from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException | 11 | from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException, WebDriverException |
12 | from selenium.webdriver.common.by import By | 12 | from selenium.webdriver.common.by import By |
13 | 13 | ||
14 | from orm.models import Build | 14 | from orm.models import Build |
@@ -36,7 +36,7 @@ def wait_until_build(test_instance, state): | |||
36 | if 'failed' in str(build_state).lower(): | 36 | if 'failed' in str(build_state).lower(): |
37 | break | 37 | break |
38 | except NoSuchElementException: | 38 | except NoSuchElementException: |
39 | continue | 39 | pass |
40 | except TimeoutException: | 40 | except TimeoutException: |
41 | break | 41 | break |
42 | start_time += 1 | 42 | start_time += 1 |
@@ -48,7 +48,6 @@ def wait_until_build_cancelled(test_instance): | |||
48 | """ | 48 | """ |
49 | timeout = 30 | 49 | timeout = 30 |
50 | start_time = 0 | 50 | start_time = 0 |
51 | build = None | ||
52 | while True: | 51 | while True: |
53 | try: | 52 | try: |
54 | if start_time > timeout: | 53 | if start_time > timeout: |
@@ -64,19 +63,17 @@ def wait_until_build_cancelled(test_instance): | |||
64 | if 'failed' in str(build_state).lower(): | 63 | if 'failed' in str(build_state).lower(): |
65 | break | 64 | break |
66 | if 'cancelling' in str(build_state).lower(): | 65 | if 'cancelling' in str(build_state).lower(): |
67 | # Change build state to cancelled | 66 | pass |
68 | if not build: # get build object only once | ||
69 | build = Build.objects.last() | ||
70 | build.outcome = Build.CANCELLED | ||
71 | build.save() | ||
72 | if 'cancelled' in str(build_state).lower(): | 67 | if 'cancelled' in str(build_state).lower(): |
73 | break | 68 | break |
74 | except NoSuchElementException: | ||
75 | continue | ||
76 | except StaleElementReferenceException: | ||
77 | continue | ||
78 | except TimeoutException: | 69 | except TimeoutException: |
79 | break | 70 | break |
71 | except NoSuchElementException: | ||
72 | pass | ||
73 | except StaleElementReferenceException: | ||
74 | pass | ||
75 | except WebDriverException: | ||
76 | pass | ||
80 | start_time += 1 | 77 | start_time += 1 |
81 | sleep(1) # take a breath and try again | 78 | sleep(1) # take a breath and try again |
82 | 79 | ||
diff --git a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt index 71cc083436..6243c00a36 100644 --- a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt +++ b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt | |||
@@ -5,3 +5,5 @@ pytest-env==1.1.0 | |||
5 | pytest-html==4.0.2 | 5 | pytest-html==4.0.2 |
6 | pytest-metadata==3.0.0 | 6 | pytest-metadata==3.0.0 |
7 | pytest-order==1.1.0 | 7 | pytest-order==1.1.0 |
8 | requests | ||
9 | |||
diff --git a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py index bd398f0012..aee9bbcd14 100644 --- a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py +++ b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py | |||
@@ -233,7 +233,6 @@ def filter_sizeovertotal(package_object, total_size): | |||
233 | 233 | ||
234 | return '{:.1%}'.format(float(size)/float(total_size)) | 234 | return '{:.1%}'.format(float(size)/float(total_size)) |
235 | 235 | ||
236 | from django.utils.safestring import mark_safe | ||
237 | @register.filter | 236 | @register.filter |
238 | def format_vpackage_rowclass(size): | 237 | def format_vpackage_rowclass(size): |
239 | if size == -1: | 238 | if size == -1: |
diff --git a/bitbake/lib/toaster/toastergui/views.py b/bitbake/lib/toaster/toastergui/views.py index 40aed265dc..061e6436c8 100644 --- a/bitbake/lib/toaster/toastergui/views.py +++ b/bitbake/lib/toaster/toastergui/views.py | |||
@@ -372,7 +372,6 @@ def _get_parameters_values(request, default_count, default_order): | |||
372 | # set cookies for parameters. this is usefull in case parameters are set | 372 | # set cookies for parameters. this is usefull in case parameters are set |
373 | # manually from the GET values of the link | 373 | # manually from the GET values of the link |
374 | def _set_parameters_values(pagesize, orderby, request): | 374 | def _set_parameters_values(pagesize, orderby, request): |
375 | from django.urls import resolve | ||
376 | current_url = resolve(request.path_info).url_name | 375 | current_url = resolve(request.path_info).url_name |
377 | request.session['%s_count' % current_url] = pagesize | 376 | request.session['%s_count' % current_url] = pagesize |
378 | request.session['%s_orderby' % current_url] =orderby | 377 | request.session['%s_orderby' % current_url] =orderby |
@@ -699,7 +698,6 @@ class LazyEncoder(json.JSONEncoder): | |||
699 | return super(LazyEncoder, self).default(obj) | 698 | return super(LazyEncoder, self).default(obj) |
700 | 699 | ||
701 | from toastergui.templatetags.projecttags import filtered_filesizeformat | 700 | from toastergui.templatetags.projecttags import filtered_filesizeformat |
702 | import os | ||
703 | def _get_dir_entries(build_id, target_id, start): | 701 | def _get_dir_entries(build_id, target_id, start): |
704 | node_str = { | 702 | node_str = { |
705 | Target_File.ITYPE_REGULAR : '-', | 703 | Target_File.ITYPE_REGULAR : '-', |
diff --git a/bitbake/lib/toaster/toastermain/settings.py b/bitbake/lib/toaster/toastermain/settings.py index e06adc5a93..d2a449627f 100644 --- a/bitbake/lib/toaster/toastermain/settings.py +++ b/bitbake/lib/toaster/toastermain/settings.py | |||
@@ -298,7 +298,6 @@ SOUTH_TESTS_MIGRATE = False | |||
298 | 298 | ||
299 | # We automatically detect and install applications here if | 299 | # We automatically detect and install applications here if |
300 | # they have a 'models.py' or 'views.py' file | 300 | # they have a 'models.py' or 'views.py' file |
301 | import os | ||
302 | currentdir = os.path.dirname(__file__) | 301 | currentdir = os.path.dirname(__file__) |
303 | for t in os.walk(os.path.dirname(currentdir)): | 302 | for t in os.walk(os.path.dirname(currentdir)): |
304 | modulename = os.path.basename(t[0]) | 303 | modulename = os.path.basename(t[0]) |