summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py10
-rw-r--r--bitbake/lib/bb/__init__.py143
-rwxr-xr-xbitbake/lib/bb/acl.py2
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py2
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py114
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py44
-rw-r--r--bitbake/lib/bb/build.py27
-rw-r--r--bitbake/lib/bb/cache.py35
-rw-r--r--bitbake/lib/bb/checksum.py25
-rw-r--r--bitbake/lib/bb/codeparser.py97
-rw-r--r--bitbake/lib/bb/command.py39
-rw-r--r--bitbake/lib/bb/compress/lz4.py4
-rw-r--r--bitbake/lib/bb/cooker.py257
-rw-r--r--bitbake/lib/bb/cookerdata.py20
-rw-r--r--bitbake/lib/bb/data.py2
-rw-r--r--bitbake/lib/bb/data_smart.py136
-rw-r--r--bitbake/lib/bb/event.py38
-rw-r--r--bitbake/lib/bb/exceptions.py96
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py224
-rw-r--r--bitbake/lib/bb/fetch2/az.py9
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--bitbake/lib/bb/fetch2/crate.py9
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py15
-rw-r--r--bitbake/lib/bb/fetch2/git.py467
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py126
-rw-r--r--bitbake/lib/bb/fetch2/gomod.py273
-rw-r--r--bitbake/lib/bb/fetch2/local.py9
-rw-r--r--bitbake/lib/bb/fetch2/npm.py24
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py98
-rw-r--r--bitbake/lib/bb/fetch2/s3.py2
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py2
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py3
-rw-r--r--bitbake/lib/bb/fetch2/svn.py3
-rw-r--r--bitbake/lib/bb/fetch2/wget.py111
-rw-r--r--bitbake/lib/bb/filter.py142
-rwxr-xr-xbitbake/lib/bb/main.py6
-rw-r--r--bitbake/lib/bb/msg.py4
-rw-r--r--bitbake/lib/bb/parse/__init__.py49
-rw-r--r--bitbake/lib/bb/parse/ast.py176
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py53
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py24
-rw-r--r--bitbake/lib/bb/persist_data.py271
-rw-r--r--bitbake/lib/bb/runqueue.py319
-rw-r--r--bitbake/lib/bb/server/process.py86
-rw-r--r--bitbake/lib/bb/server/xmlrpcserver.py11
-rw-r--r--bitbake/lib/bb/siggen.py77
-rw-r--r--bitbake/lib/bb/tests/codeparser.py40
-rw-r--r--bitbake/lib/bb/tests/compression.py4
-rw-r--r--bitbake/lib/bb/tests/data.py49
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php3528
-rw-r--r--bitbake/lib/bb/tests/fetch.py1146
-rw-r--r--bitbake/lib/bb/tests/filter.py88
-rw-r--r--bitbake/lib/bb/tests/parse.py281
-rw-r--r--bitbake/lib/bb/tests/persist_data.py129
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass4
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb0
-rw-r--r--bitbake/lib/bb/tests/runqueue.py11
-rw-r--r--bitbake/lib/bb/tests/setup.py358
-rw-r--r--bitbake/lib/bb/tests/support/httpserver.py4
-rw-r--r--bitbake/lib/bb/tests/utils.py19
-rw-r--r--bitbake/lib/bb/tinfoil.py183
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py5
-rw-r--r--bitbake/lib/bb/ui/knotty.py126
-rw-r--r--bitbake/lib/bb/ui/teamcity.py5
-rw-r--r--bitbake/lib/bb/ui/uihelper.py5
-rw-r--r--bitbake/lib/bb/utils.py762
67 files changed, 8119 insertions, 2320 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index 76bc08a3ea..4af03c54ad 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -36,8 +36,9 @@ class COWDictMeta(COWMeta):
36 __marker__ = tuple() 36 __marker__ = tuple()
37 37
38 def __str__(cls): 38 def __str__(cls):
39 # FIXME: I have magic numbers! 39 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
40 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 40 keys = set(cls.__dict__.keys()) - ignored_keys
41 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
41 42
42 __repr__ = __str__ 43 __repr__ = __str__
43 44
@@ -161,8 +162,9 @@ class COWDictMeta(COWMeta):
161 162
162class COWSetMeta(COWDictMeta): 163class COWSetMeta(COWDictMeta):
163 def __str__(cls): 164 def __str__(cls):
164 # FIXME: I have magic numbers! 165 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
165 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 166 keys = set(cls.__dict__.keys()) - ignored_keys
167 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
166 168
167 __repr__ = __str__ 169 __repr__ = __str__
168 170
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 15013540c2..407c4509d4 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,11 +9,11 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "2.9.0" 12__version__ = "2.15.2"
13 13
14import sys 14import sys
15if sys.version_info < (3, 8, 0): 15if sys.version_info < (3, 9, 0):
16 raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake") 16 raise RuntimeError("Sorry, python 3.9.0 or later is required for this version of bitbake")
17 17
18if sys.version_info < (3, 10, 0): 18if sys.version_info < (3, 10, 0):
19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work" 19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
@@ -36,6 +36,35 @@ class BBHandledException(Exception):
36 36
37import os 37import os
38import logging 38import logging
39from collections import namedtuple
40import multiprocessing as mp
41
42# Python 3.14 changes the default multiprocessing context from "fork" to
43# "forkserver". However, bitbake heavily relies on "fork" behavior to
44# efficiently pass data to the child processes. Places that need this should do:
45# from bb import multiprocessing
46# in place of
47# import multiprocessing
48
49class MultiprocessingContext(object):
50 """
51 Multiprocessing proxy object that uses the "fork" context for a property if
52 available, otherwise goes to the main multiprocessing module. This allows
53 it to be a drop-in replacement for the multiprocessing module, but use the
54 fork context
55 """
56 def __init__(self):
57 super().__setattr__("_ctx", mp.get_context("fork"))
58
59 def __getattr__(self, name):
60 if hasattr(self._ctx, name):
61 return getattr(self._ctx, name)
62 return getattr(mp, name)
63
64 def __setattr__(self, name, value):
65 raise AttributeError(f"Unable to set attribute {name}")
66
67multiprocessing = MultiprocessingContext()
39 68
40 69
41class NullHandler(logging.Handler): 70class NullHandler(logging.Handler):
@@ -103,26 +132,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin):
103 self.setup_bblogger(logger.name) 132 self.setup_bblogger(logger.name)
104 super().__init__(logger, *args, **kwargs) 133 super().__init__(logger, *args, **kwargs)
105 134
106 if sys.version_info < (3, 6):
107 # These properties were added in Python 3.6. Add them in older versions
108 # for compatibility
109 @property
110 def manager(self):
111 return self.logger.manager
112
113 @manager.setter
114 def manager(self, value):
115 self.logger.manager = value
116
117 @property
118 def name(self):
119 return self.logger.name
120
121 def __repr__(self):
122 logger = self.logger
123 level = logger.getLevelName(logger.getEffectiveLevel())
124 return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
125
126logging.LoggerAdapter = BBLoggerAdapter 135logging.LoggerAdapter = BBLoggerAdapter
127 136
128logger = logging.getLogger("BitBake") 137logger = logging.getLogger("BitBake")
@@ -148,9 +157,25 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
148 157
149# Messaging convenience functions 158# Messaging convenience functions
150def plain(*args): 159def plain(*args):
160 """
161 Prints a message at "plain" level (higher level than a ``bb.note()``).
162
163 Arguments:
164
165 - ``args``: one or more strings to print.
166 """
151 mainlogger.plain(''.join(args)) 167 mainlogger.plain(''.join(args))
152 168
153def debug(lvl, *args): 169def debug(lvl, *args):
170 """
171 Prints a debug message.
172
173 Arguments:
174
175 - ``lvl``: debug level. Higher value increases the debug level
176 (determined by ``bitbake -D``).
177 - ``args``: one or more strings to print.
178 """
154 if isinstance(lvl, str): 179 if isinstance(lvl, str):
155 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) 180 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
156 args = (lvl,) + args 181 args = (lvl,) + args
@@ -158,33 +183,81 @@ def debug(lvl, *args):
158 mainlogger.bbdebug(lvl, ''.join(args)) 183 mainlogger.bbdebug(lvl, ''.join(args))
159 184
160def note(*args): 185def note(*args):
186 """
187 Prints a message at "note" level.
188
189 Arguments:
190
191 - ``args``: one or more strings to print.
192 """
161 mainlogger.info(''.join(args)) 193 mainlogger.info(''.join(args))
162 194
163#
164# A higher prioity note which will show on the console but isn't a warning
165#
166# Something is happening the user should be aware of but they probably did
167# something to make it happen
168#
169def verbnote(*args): 195def verbnote(*args):
196 """
197 A higher priority note which will show on the console but isn't a warning.
198
199 Use in contexts when something is happening the user should be aware of but
200 they probably did something to make it happen.
201
202 Arguments:
203
204 - ``args``: one or more strings to print.
205 """
170 mainlogger.verbnote(''.join(args)) 206 mainlogger.verbnote(''.join(args))
171 207
172# 208#
173# Warnings - things the user likely needs to pay attention to and fix 209# Warnings - things the user likely needs to pay attention to and fix
174# 210#
175def warn(*args): 211def warn(*args):
212 """
213 Prints a warning message.
214
215 Arguments:
216
217 - ``args``: one or more strings to print.
218 """
176 mainlogger.warning(''.join(args)) 219 mainlogger.warning(''.join(args))
177 220
178def warnonce(*args): 221def warnonce(*args):
222 """
223 Prints a warning message like ``bb.warn()``, but only prints the message
224 once.
225
226 Arguments:
227
228 - ``args``: one or more strings to print.
229 """
179 mainlogger.warnonce(''.join(args)) 230 mainlogger.warnonce(''.join(args))
180 231
181def error(*args, **kwargs): 232def error(*args, **kwargs):
233 """
234 Prints an error message.
235
236 Arguments:
237
238 - ``args``: one or more strings to print.
239 """
182 mainlogger.error(''.join(args), extra=kwargs) 240 mainlogger.error(''.join(args), extra=kwargs)
183 241
184def erroronce(*args): 242def erroronce(*args):
243 """
244 Prints an error message like ``bb.error()``, but only prints the message
245 once.
246
247 Arguments:
248
249 - ``args``: one or more strings to print.
250 """
185 mainlogger.erroronce(''.join(args)) 251 mainlogger.erroronce(''.join(args))
186 252
187def fatal(*args, **kwargs): 253def fatal(*args, **kwargs):
254 """
255 Prints an error message and stops the BitBake execution.
256
257 Arguments:
258
259 - ``args``: one or more strings to print.
260 """
188 mainlogger.critical(''.join(args), extra=kwargs) 261 mainlogger.critical(''.join(args), extra=kwargs)
189 raise BBHandledException() 262 raise BBHandledException()
190 263
@@ -213,7 +286,6 @@ def deprecated(func, name=None, advice=""):
213# For compatibility 286# For compatibility
214def deprecate_import(current, modulename, fromlist, renames = None): 287def deprecate_import(current, modulename, fromlist, renames = None):
215 """Import objects from one module into another, wrapping them with a DeprecationWarning""" 288 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
216 import sys
217 289
218 module = __import__(modulename, fromlist = fromlist) 290 module = __import__(modulename, fromlist = fromlist)
219 for position, objname in enumerate(fromlist): 291 for position, objname in enumerate(fromlist):
@@ -227,3 +299,14 @@ def deprecate_import(current, modulename, fromlist, renames = None):
227 299
228 setattr(sys.modules[current], newname, newobj) 300 setattr(sys.modules[current], newname, newobj)
229 301
302TaskData = namedtuple("TaskData", [
303 "pn",
304 "taskname",
305 "fn",
306 "deps",
307 "provides",
308 "taskhash",
309 "unihash",
310 "hashfn",
311 "taskhash_deps",
312])
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py
index 0f41b275cf..e9dbdb617f 100755
--- a/bitbake/lib/bb/acl.py
+++ b/bitbake/lib/bb/acl.py
@@ -195,8 +195,6 @@ class ACL(object):
195 195
196def main(): 196def main():
197 import argparse 197 import argparse
198 import pwd
199 import grp
200 from pathlib import Path 198 from pathlib import Path
201 199
202 parser = argparse.ArgumentParser() 200 parser = argparse.ArgumentParser()
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
index 639e1607f8..a4371643d7 100644
--- a/bitbake/lib/bb/asyncrpc/__init__.py
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -5,7 +5,7 @@
5# 5#
6 6
7 7
8from .client import AsyncClient, Client, ClientPool 8from .client import AsyncClient, Client
9from .serv import AsyncServer, AsyncServerConnection 9from .serv import AsyncServer, AsyncServerConnection
10from .connection import DEFAULT_MAX_CHUNK 10from .connection import DEFAULT_MAX_CHUNK
11from .exceptions import ( 11from .exceptions import (
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
index a350b4fb12..17b72033b9 100644
--- a/bitbake/lib/bb/asyncrpc/client.py
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -24,6 +24,12 @@ ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1 24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2 25ADDR_TYPE_WS = 2
26 26
27WEBSOCKETS_MIN_VERSION = (9, 1)
28# Need websockets 10 with python 3.10+
29if sys.version_info >= (3, 10, 0):
30 WEBSOCKETS_MIN_VERSION = (10, 0)
31
32
27def parse_address(addr): 33def parse_address(addr):
28 if addr.startswith(UNIX_PREFIX): 34 if addr.startswith(UNIX_PREFIX):
29 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) 35 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
@@ -39,6 +45,7 @@ def parse_address(addr):
39 45
40 return (ADDR_TYPE_TCP, (host, int(port))) 46 return (ADDR_TYPE_TCP, (host, int(port)))
41 47
48
42class AsyncClient(object): 49class AsyncClient(object):
43 def __init__( 50 def __init__(
44 self, 51 self,
@@ -86,8 +93,35 @@ class AsyncClient(object):
86 async def connect_websocket(self, uri): 93 async def connect_websocket(self, uri):
87 import websockets 94 import websockets
88 95
96 try:
97 version = tuple(
98 int(v)
99 for v in websockets.__version__.split(".")[
100 0 : len(WEBSOCKETS_MIN_VERSION)
101 ]
102 )
103 except ValueError:
104 raise ImportError(
105 f"Unable to parse websockets version '{websockets.__version__}'"
106 )
107
108 if version < WEBSOCKETS_MIN_VERSION:
109 min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION)
110 raise ImportError(
111 f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}"
112 )
113
89 async def connect_sock(): 114 async def connect_sock():
90 websocket = await websockets.connect(uri, ping_interval=None) 115 try:
116 websocket = await websockets.connect(
117 uri,
118 ping_interval=None,
119 open_timeout=self.timeout,
120 )
121 except asyncio.exceptions.TimeoutError:
122 raise ConnectionError("Timeout while connecting to websocket")
123 except (OSError, websockets.InvalidHandshake, websockets.InvalidURI) as exc:
124 raise ConnectionError(f"Could not connect to websocket: {exc}") from exc
91 return WebsocketConnection(websocket, self.timeout) 125 return WebsocketConnection(websocket, self.timeout)
92 126
93 self._connect_sock = connect_sock 127 self._connect_sock = connect_sock
@@ -225,85 +259,9 @@ class Client(object):
225 def close(self): 259 def close(self):
226 if self.loop: 260 if self.loop:
227 self.loop.run_until_complete(self.client.close()) 261 self.loop.run_until_complete(self.client.close())
228 if sys.version_info >= (3, 6):
229 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
230 self.loop.close()
231 self.loop = None
232
233 def __enter__(self):
234 return self
235
236 def __exit__(self, exc_type, exc_value, traceback):
237 self.close()
238 return False
239
240
241class ClientPool(object):
242 def __init__(self, max_clients):
243 self.avail_clients = []
244 self.num_clients = 0
245 self.max_clients = max_clients
246 self.loop = None
247 self.client_condition = None
248
249 @abc.abstractmethod
250 async def _new_client(self):
251 raise NotImplementedError("Must be implemented in derived class")
252
253 def close(self):
254 if self.client_condition:
255 self.client_condition = None
256
257 if self.loop:
258 self.loop.run_until_complete(self.__close_clients())
259 self.loop.run_until_complete(self.loop.shutdown_asyncgens()) 262 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
260 self.loop.close() 263 self.loop.close()
261 self.loop = None 264 self.loop = None
262
263 def run_tasks(self, tasks):
264 if not self.loop:
265 self.loop = asyncio.new_event_loop()
266
267 thread = Thread(target=self.__thread_main, args=(tasks,))
268 thread.start()
269 thread.join()
270
271 @contextlib.asynccontextmanager
272 async def get_client(self):
273 async with self.client_condition:
274 if self.avail_clients:
275 client = self.avail_clients.pop()
276 elif self.num_clients < self.max_clients:
277 self.num_clients += 1
278 client = await self._new_client()
279 else:
280 while not self.avail_clients:
281 await self.client_condition.wait()
282 client = self.avail_clients.pop()
283
284 try:
285 yield client
286 finally:
287 async with self.client_condition:
288 self.avail_clients.append(client)
289 self.client_condition.notify()
290
291 def __thread_main(self, tasks):
292 async def process_task(task):
293 async with self.get_client() as client:
294 await task(client)
295
296 asyncio.set_event_loop(self.loop)
297 if not self.client_condition:
298 self.client_condition = asyncio.Condition()
299 tasks = [process_task(t) for t in tasks]
300 self.loop.run_until_complete(asyncio.gather(*tasks))
301
302 async def __close_clients(self):
303 for c in self.avail_clients:
304 await c.close()
305 self.avail_clients = []
306 self.num_clients = 0
307 265
308 def __enter__(self): 266 def __enter__(self):
309 return self 267 return self
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
index a66117acad..bd1aded8db 100644
--- a/bitbake/lib/bb/asyncrpc/serv.py
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -11,7 +11,7 @@ import os
11import signal 11import signal
12import socket 12import socket
13import sys 13import sys
14import multiprocessing 14from bb import multiprocessing
15import logging 15import logging
16from .connection import StreamConnection, WebsocketConnection 16from .connection import StreamConnection, WebsocketConnection
17from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError 17from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
@@ -138,14 +138,20 @@ class StreamServer(object):
138 138
139 139
140class TCPStreamServer(StreamServer): 140class TCPStreamServer(StreamServer):
141 def __init__(self, host, port, handler, logger): 141 def __init__(self, host, port, handler, logger, *, reuseport=False):
142 super().__init__(handler, logger) 142 super().__init__(handler, logger)
143 self.host = host 143 self.host = host
144 self.port = port 144 self.port = port
145 self.reuseport = reuseport
145 146
146 def start(self, loop): 147 def start(self, loop):
147 self.server = loop.run_until_complete( 148 self.server = loop.run_until_complete(
148 asyncio.start_server(self.handle_stream_client, self.host, self.port) 149 asyncio.start_server(
150 self.handle_stream_client,
151 self.host,
152 self.port,
153 reuse_port=self.reuseport,
154 )
149 ) 155 )
150 156
151 for s in self.server.sockets: 157 for s in self.server.sockets:
@@ -205,15 +211,19 @@ class UnixStreamServer(StreamServer):
205 self.server.close() 211 self.server.close()
206 212
207 def cleanup(self): 213 def cleanup(self):
208 os.unlink(self.path) 214 try:
215 os.unlink(self.path)
216 except FileNotFoundError:
217 pass
209 218
210 219
211class WebsocketsServer(object): 220class WebsocketsServer(object):
212 def __init__(self, host, port, handler, logger): 221 def __init__(self, host, port, handler, logger, *, reuseport=False):
213 self.host = host 222 self.host = host
214 self.port = port 223 self.port = port
215 self.handler = handler 224 self.handler = handler
216 self.logger = logger 225 self.logger = logger
226 self.reuseport = reuseport
217 227
218 def start(self, loop): 228 def start(self, loop):
219 import websockets.server 229 import websockets.server
@@ -224,6 +234,7 @@ class WebsocketsServer(object):
224 self.host, 234 self.host,
225 self.port, 235 self.port,
226 ping_interval=None, 236 ping_interval=None,
237 reuse_port=self.reuseport,
227 ) 238 )
228 ) 239 )
229 240
@@ -262,14 +273,26 @@ class AsyncServer(object):
262 self.loop = None 273 self.loop = None
263 self.run_tasks = [] 274 self.run_tasks = []
264 275
265 def start_tcp_server(self, host, port): 276 def start_tcp_server(self, host, port, *, reuseport=False):
266 self.server = TCPStreamServer(host, port, self._client_handler, self.logger) 277 self.server = TCPStreamServer(
278 host,
279 port,
280 self._client_handler,
281 self.logger,
282 reuseport=reuseport,
283 )
267 284
268 def start_unix_server(self, path): 285 def start_unix_server(self, path):
269 self.server = UnixStreamServer(path, self._client_handler, self.logger) 286 self.server = UnixStreamServer(path, self._client_handler, self.logger)
270 287
271 def start_websocket_server(self, host, port): 288 def start_websocket_server(self, host, port, reuseport=False):
272 self.server = WebsocketsServer(host, port, self._client_handler, self.logger) 289 self.server = WebsocketsServer(
290 host,
291 port,
292 self._client_handler,
293 self.logger,
294 reuseport=reuseport,
295 )
273 296
274 async def _client_handler(self, socket): 297 async def _client_handler(self, socket):
275 address = socket.address 298 address = socket.address
@@ -368,8 +391,7 @@ class AsyncServer(object):
368 391
369 self._serve_forever(tasks) 392 self._serve_forever(tasks)
370 393
371 if sys.version_info >= (3, 6): 394 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
372 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
373 self.loop.close() 395 self.loop.close()
374 396
375 queue = multiprocessing.Queue() 397 queue = multiprocessing.Queue()
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 44d08f5c55..40839a81b5 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -197,6 +197,8 @@ def exec_func(func, d, dirs = None):
197 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
198 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
199 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
200 202
201 if flags and dirs is None: 203 if flags and dirs is None:
202 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
@@ -395,7 +397,7 @@ def create_progress_handler(func, progress, logfile, d):
395 # Use specified regex 397 # Use specified regex
396 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) 398 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
397 elif progress.startswith("custom:"): 399 elif progress.startswith("custom:"):
398 # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ 400 # Use a custom progress handler that was injected via other means
399 import functools 401 import functools
400 from types import ModuleType 402 from types import ModuleType
401 403
@@ -741,7 +743,7 @@ def _exec_task(fn, task, d, quieterr):
741 743
742 if quieterr: 744 if quieterr:
743 if not handled: 745 if not handled:
744 logger.warning(repr(exc)) 746 logger.warning(str(exc))
745 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 747 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
746 else: 748 else:
747 errprinted = errchk.triggered 749 errprinted = errchk.triggered
@@ -750,7 +752,7 @@ def _exec_task(fn, task, d, quieterr):
750 if verboseStdoutLogging or handled: 752 if verboseStdoutLogging or handled:
751 errprinted = True 753 errprinted = True
752 if not handled: 754 if not handled:
753 logger.error(repr(exc)) 755 logger.error(str(exc))
754 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 756 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
755 return 1 757 return 1
756 758
@@ -930,9 +932,13 @@ def add_tasks(tasklist, d):
930 # don't assume holding a reference 932 # don't assume holding a reference
931 d.setVar('_task_deps', task_deps) 933 d.setVar('_task_deps', task_deps)
932 934
935def ensure_task_prefix(name):
936 if name[:3] != "do_":
937 name = "do_" + name
938 return name
939
933def addtask(task, before, after, d): 940def addtask(task, before, after, d):
934 if task[:3] != "do_": 941 task = ensure_task_prefix(task)
935 task = "do_" + task
936 942
937 d.setVarFlag(task, "task", 1) 943 d.setVarFlag(task, "task", 1)
938 bbtasks = d.getVar('__BBTASKS', False) or [] 944 bbtasks = d.getVar('__BBTASKS', False) or []
@@ -944,19 +950,20 @@ def addtask(task, before, after, d):
944 if after is not None: 950 if after is not None:
945 # set up deps for function 951 # set up deps for function
946 for entry in after.split(): 952 for entry in after.split():
953 entry = ensure_task_prefix(entry)
947 if entry not in existing: 954 if entry not in existing:
948 existing.append(entry) 955 existing.append(entry)
949 d.setVarFlag(task, "deps", existing) 956 d.setVarFlag(task, "deps", existing)
950 if before is not None: 957 if before is not None:
951 # set up things that depend on this func 958 # set up things that depend on this func
952 for entry in before.split(): 959 for entry in before.split():
960 entry = ensure_task_prefix(entry)
953 existing = d.getVarFlag(entry, "deps", False) or [] 961 existing = d.getVarFlag(entry, "deps", False) or []
954 if task not in existing: 962 if task not in existing:
955 d.setVarFlag(entry, "deps", [task] + existing) 963 d.setVarFlag(entry, "deps", [task] + existing)
956 964
957def deltask(task, d): 965def deltask(task, d):
958 if task[:3] != "do_": 966 task = ensure_task_prefix(task)
959 task = "do_" + task
960 967
961 bbtasks = d.getVar('__BBTASKS', False) or [] 968 bbtasks = d.getVar('__BBTASKS', False) or []
962 if task in bbtasks: 969 if task in bbtasks:
@@ -1021,3 +1028,9 @@ def tasksbetween(task_start, task_end, d):
1021 chain.pop() 1028 chain.pop()
1022 follow_chain(task_start, task_end) 1029 follow_chain(task_start, task_end)
1023 return outtasks 1030 return outtasks
1031
1032def listtasks(d):
1033 """
1034 Return the list of tasks in the current recipe.
1035 """
1036 return tuple(d.getVar('__BBTASKS', False) or ())
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 18d5574a31..2361c5684d 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -28,7 +28,7 @@ import shutil
28 28
29logger = logging.getLogger("BitBake.Cache") 29logger = logging.getLogger("BitBake.Cache")
30 30
31__cache_version__ = "155" 31__cache_version__ = "156"
32 32
33def getCacheFile(path, filename, mc, data_hash): 33def getCacheFile(path, filename, mc, data_hash):
34 mcspec = '' 34 mcspec = ''
@@ -395,7 +395,7 @@ class Cache(object):
395 # It will be used later for deciding whether we 395 # It will be used later for deciding whether we
396 # need extra cache file dump/load support 396 # need extra cache file dump/load support
397 self.mc = mc 397 self.mc = mc
398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) 398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else ''), logger)
399 self.caches_array = caches_array 399 self.caches_array = caches_array
400 self.cachedir = self.data.getVar("CACHE") 400 self.cachedir = self.data.getVar("CACHE")
401 self.clean = set() 401 self.clean = set()
@@ -441,7 +441,7 @@ class Cache(object):
441 else: 441 else:
442 symlink = os.path.join(self.cachedir, "bb_cache.dat") 442 symlink = os.path.join(self.cachedir, "bb_cache.dat")
443 443
444 if os.path.exists(symlink): 444 if os.path.exists(symlink) or os.path.islink(symlink):
445 bb.utils.remove(symlink) 445 bb.utils.remove(symlink)
446 try: 446 try:
447 os.symlink(os.path.basename(self.cachefile), symlink) 447 os.symlink(os.path.basename(self.cachefile), symlink)
@@ -779,25 +779,6 @@ class MulticonfigCache(Mapping):
779 for k in self.__caches: 779 for k in self.__caches:
780 yield k 780 yield k
781 781
782def init(cooker):
783 """
784 The Objective: Cache the minimum amount of data possible yet get to the
785 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
786
787 To do this, we intercept getVar calls and only cache the variables we see
788 being accessed. We rely on the cache getVar calls being made for all
789 variables bitbake might need to use to reach this stage. For each cached
790 file we need to track:
791
792 * Its mtime
793 * The mtimes of all its dependencies
794 * Whether it caused a parse.SkipRecipe exception
795
796 Files causing parsing errors are evicted from the cache.
797
798 """
799 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
800
801 782
802class CacheData(object): 783class CacheData(object):
803 """ 784 """
@@ -866,6 +847,16 @@ class MultiProcessCache(object):
866 data = [{}] 847 data = [{}]
867 return data 848 return data
868 849
850 def clear_cache(self):
851 if not self.cachefile:
852 bb.fatal("Can't clear invalid cachefile")
853
854 self.cachedata = self.create_cachedata()
855 self.cachedata_extras = self.create_cachedata()
856 with bb.utils.fileslocked([self.cachefile + ".lock"]):
857 bb.utils.remove(self.cachefile)
858 bb.utils.remove(self.cachefile + "-*")
859
869 def save_extras(self): 860 def save_extras(self):
870 if not self.cachefile: 861 if not self.cachefile:
871 return 862 return
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
index 557793d366..3fb39a303e 100644
--- a/bitbake/lib/bb/checksum.py
+++ b/bitbake/lib/bb/checksum.py
@@ -142,3 +142,28 @@ class FileChecksumCache(MultiProcessCache):
142 142
143 checksums.sort(key=operator.itemgetter(1)) 143 checksums.sort(key=operator.itemgetter(1))
144 return checksums 144 return checksums
145
146class RevisionsCache(MultiProcessCache):
147 cache_file_name = "local_srcrevisions.dat"
148 CACHE_VERSION = 1
149
150 def __init__(self):
151 MultiProcessCache.__init__(self)
152
153 def get_revs(self):
154 return self.cachedata[0]
155
156 def get_rev(self, k):
157 if k in self.cachedata_extras[0]:
158 return self.cachedata_extras[0][k]
159 if k in self.cachedata[0]:
160 return self.cachedata[0][k]
161 return None
162
163 def set_rev(self, k, v):
164 self.cachedata[0][k] = v
165 self.cachedata_extras[0][k] = v
166
167 def merge_data(self, source, dest):
168 for h in source[0]:
169 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 2e8b7ced3c..4f70cf7fe7 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -69,12 +69,25 @@ def add_module_functions(fn, functions, namespace):
69 name = "%s.%s" % (namespace, f) 69 name = "%s.%s" % (namespace, f)
70 parser = PythonParser(name, logger) 70 parser = PythonParser(name, logger)
71 try: 71 try:
72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f) 72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f, func=functions[f])
73 #bb.warn("Cached %s" % f) 73 #bb.warn("Cached %s" % f)
74 except KeyError: 74 except KeyError:
75 lines, lineno = inspect.getsourcelines(functions[f]) 75 try:
76 targetfn = inspect.getsourcefile(functions[f])
77 except TypeError:
78 # Builtin
79 continue
80 if fn != targetfn:
81 # Skip references to other modules outside this file
82 #bb.warn("Skipping %s" % name)
83 continue
84 try:
85 lines, lineno = inspect.getsourcelines(functions[f])
86 except TypeError:
87 # Builtin
88 continue
76 src = "".join(lines) 89 src = "".join(lines)
77 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f) 90 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f, func=functions[f])
78 #bb.warn("Not cached %s" % f) 91 #bb.warn("Not cached %s" % f)
79 execs = parser.execs.copy() 92 execs = parser.execs.copy()
80 # Expand internal module exec references 93 # Expand internal module exec references
@@ -82,14 +95,17 @@ def add_module_functions(fn, functions, namespace):
82 if e in functions: 95 if e in functions:
83 execs.remove(e) 96 execs.remove(e)
84 execs.add(namespace + "." + e) 97 execs.add(namespace + "." + e)
85 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()] 98 visitorcode = None
99 if hasattr(functions[f], 'visitorcode'):
100 visitorcode = getattr(functions[f], "visitorcode")
101 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy(), parser.extra, visitorcode]
86 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains)) 102 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
87 103
88def update_module_dependencies(d): 104def update_module_dependencies(d):
89 for mod in modulecode_deps: 105 for mod in modulecode_deps:
90 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split()) 106 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
91 if excludes: 107 if excludes:
92 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]] 108 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3], modulecode_deps[mod][4], modulecode_deps[mod][5]]
93 109
94# A custom getstate/setstate using tuples is actually worth 15% cachesize by 110# A custom getstate/setstate using tuples is actually worth 15% cachesize by
95# avoiding duplication of the attribute names! 111# avoiding duplication of the attribute names!
@@ -112,21 +128,22 @@ class SetCache(object):
112codecache = SetCache() 128codecache = SetCache()
113 129
114class pythonCacheLine(object): 130class pythonCacheLine(object):
115 def __init__(self, refs, execs, contains): 131 def __init__(self, refs, execs, contains, extra):
116 self.refs = codecache.internSet(refs) 132 self.refs = codecache.internSet(refs)
117 self.execs = codecache.internSet(execs) 133 self.execs = codecache.internSet(execs)
118 self.contains = {} 134 self.contains = {}
119 for c in contains: 135 for c in contains:
120 self.contains[c] = codecache.internSet(contains[c]) 136 self.contains[c] = codecache.internSet(contains[c])
137 self.extra = extra
121 138
122 def __getstate__(self): 139 def __getstate__(self):
123 return (self.refs, self.execs, self.contains) 140 return (self.refs, self.execs, self.contains, self.extra)
124 141
125 def __setstate__(self, state): 142 def __setstate__(self, state):
126 (refs, execs, contains) = state 143 (refs, execs, contains, extra) = state
127 self.__init__(refs, execs, contains) 144 self.__init__(refs, execs, contains, extra)
128 def __hash__(self): 145 def __hash__(self):
129 l = (hash(self.refs), hash(self.execs)) 146 l = (hash(self.refs), hash(self.execs), hash(self.extra))
130 for c in sorted(self.contains.keys()): 147 for c in sorted(self.contains.keys()):
131 l = l + (c, hash(self.contains[c])) 148 l = l + (c, hash(self.contains[c]))
132 return hash(l) 149 return hash(l)
@@ -155,7 +172,7 @@ class CodeParserCache(MultiProcessCache):
155 # so that an existing cache gets invalidated. Additionally you'll need 172 # so that an existing cache gets invalidated. Additionally you'll need
156 # to increment __cache_version__ in cache.py in order to ensure that old 173 # to increment __cache_version__ in cache.py in order to ensure that old
157 # recipe caches don't trigger "Taskhash mismatch" errors. 174 # recipe caches don't trigger "Taskhash mismatch" errors.
158 CACHE_VERSION = 11 175 CACHE_VERSION = 14
159 176
160 def __init__(self): 177 def __init__(self):
161 MultiProcessCache.__init__(self) 178 MultiProcessCache.__init__(self)
@@ -169,8 +186,8 @@ class CodeParserCache(MultiProcessCache):
169 self.pythoncachelines = {} 186 self.pythoncachelines = {}
170 self.shellcachelines = {} 187 self.shellcachelines = {}
171 188
172 def newPythonCacheLine(self, refs, execs, contains): 189 def newPythonCacheLine(self, refs, execs, contains, extra):
173 cacheline = pythonCacheLine(refs, execs, contains) 190 cacheline = pythonCacheLine(refs, execs, contains, extra)
174 h = hash(cacheline) 191 h = hash(cacheline)
175 if h in self.pythoncachelines: 192 if h in self.pythoncachelines:
176 return self.pythoncachelines[h] 193 return self.pythoncachelines[h]
@@ -255,7 +272,15 @@ class PythonParser():
255 272
256 def visit_Call(self, node): 273 def visit_Call(self, node):
257 name = self.called_node_name(node.func) 274 name = self.called_node_name(node.func)
258 if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): 275 if name and name in modulecode_deps and modulecode_deps[name][5]:
276 visitorcode = modulecode_deps[name][5]
277 contains, execs, warn = visitorcode(name, node.args)
278 for i in contains:
279 self.contains[i] = contains[i]
280 self.execs |= execs
281 if warn:
282 self.warn(node.func, warn)
283 elif name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
259 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str): 284 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
260 varname = node.args[0].value 285 varname = node.args[0].value
261 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant): 286 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
@@ -323,7 +348,7 @@ class PythonParser():
323 # For the python module code it is expensive to have the function text so it is 348 # For the python module code it is expensive to have the function text so it is
324 # uses a different fixedhash to cache against. We can take the hit on obtaining the 349 # uses a different fixedhash to cache against. We can take the hit on obtaining the
325 # text if it isn't in the cache. 350 # text if it isn't in the cache.
326 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None): 351 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None, func=None):
327 if not fixedhash and (not node or not node.strip()): 352 if not fixedhash and (not node or not node.strip()):
328 return 353 return
329 354
@@ -338,6 +363,7 @@ class PythonParser():
338 self.contains = {} 363 self.contains = {}
339 for i in codeparsercache.pythoncache[h].contains: 364 for i in codeparsercache.pythoncache[h].contains:
340 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) 365 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
366 self.extra = codeparsercache.pythoncache[h].extra
341 return 367 return
342 368
343 if h in codeparsercache.pythoncacheextras: 369 if h in codeparsercache.pythoncacheextras:
@@ -346,6 +372,7 @@ class PythonParser():
346 self.contains = {} 372 self.contains = {}
347 for i in codeparsercache.pythoncacheextras[h].contains: 373 for i in codeparsercache.pythoncacheextras[h].contains:
348 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) 374 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
375 self.extra = codeparsercache.pythoncacheextras[h].extra
349 return 376 return
350 377
351 if fixedhash and not node: 378 if fixedhash and not node:
@@ -363,9 +390,16 @@ class PythonParser():
363 if n.__class__.__name__ == "Call": 390 if n.__class__.__name__ == "Call":
364 self.visit_Call(n) 391 self.visit_Call(n)
365 392
393 if func is not None:
394 self.references |= getattr(func, "bb_vardeps", set())
395 self.references -= getattr(func, "bb_vardepsexclude", set())
396
366 self.execs.update(self.var_execs) 397 self.execs.update(self.var_execs)
398 self.extra = None
399 if fixedhash:
400 self.extra = bbhash(str(node))
367 401
368 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) 402 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains, self.extra)
369 403
370class ShellParser(): 404class ShellParser():
371 def __init__(self, name, log): 405 def __init__(self, name, log):
@@ -484,19 +518,34 @@ class ShellParser():
484 """ 518 """
485 519
486 words = list(words) 520 words = list(words)
487 for word in list(words): 521 for word in words:
488 wtree = pyshlex.make_wordtree(word[1]) 522 wtree = pyshlex.make_wordtree(word[1])
489 for part in wtree: 523 for part in wtree:
490 if not isinstance(part, list): 524 if not isinstance(part, list):
491 continue 525 continue
492 526
493 if part[0] in ('`', '$('): 527 candidates = [part]
494 command = pyshlex.wordtree_as_string(part[1:-1]) 528
495 self._parse_shell(command) 529 # If command is of type:
496 530 #
497 if word[0] in ("cmd_name", "cmd_word"): 531 # var="... $(cmd [...]) ..."
498 if word in words: 532 #
499 words.remove(word) 533 # Then iterate on what's between the quotes and if we find a
534 # list, make that what we check for below.
535 if len(part) >= 3 and part[0] == '"':
536 for p in part[1:-1]:
537 if isinstance(p, list):
538 candidates.append(p)
539
540 for candidate in candidates:
541 if len(candidate) >= 2:
542 if candidate[0] in ('`', '$('):
543 command = pyshlex.wordtree_as_string(candidate[1:-1])
544 self._parse_shell(command)
545
546 if word[0] in ("cmd_name", "cmd_word"):
547 if word in words:
548 words.remove(word)
500 549
501 usetoken = False 550 usetoken = False
502 for word in words: 551 for word in words:
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 1fcb9bf14c..59a979ee90 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -24,6 +24,7 @@ import io
24import bb.event 24import bb.event
25import bb.cooker 25import bb.cooker
26import bb.remotedata 26import bb.remotedata
27import bb.parse
27 28
28class DataStoreConnectionHandle(object): 29class DataStoreConnectionHandle(object):
29 def __init__(self, dsindex=0): 30 def __init__(self, dsindex=0):
@@ -108,7 +109,7 @@ class Command:
108 109
109 def runAsyncCommand(self, _, process_server, halt): 110 def runAsyncCommand(self, _, process_server, halt):
110 try: 111 try:
111 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): 112 if self.cooker.state in (bb.cooker.State.ERROR, bb.cooker.State.SHUTDOWN, bb.cooker.State.FORCE_SHUTDOWN):
112 # updateCache will trigger a shutdown of the parser 113 # updateCache will trigger a shutdown of the parser
113 # and then raise BBHandledException triggering an exit 114 # and then raise BBHandledException triggering an exit
114 self.cooker.updateCache() 115 self.cooker.updateCache()
@@ -118,7 +119,7 @@ class Command:
118 (command, options) = cmd 119 (command, options) = cmd
119 commandmethod = getattr(CommandsAsync, command) 120 commandmethod = getattr(CommandsAsync, command)
120 needcache = getattr( commandmethod, "needcache" ) 121 needcache = getattr( commandmethod, "needcache" )
121 if needcache and self.cooker.state != bb.cooker.state.running: 122 if needcache and self.cooker.state != bb.cooker.State.RUNNING:
122 self.cooker.updateCache() 123 self.cooker.updateCache()
123 return True 124 return True
124 else: 125 else:
@@ -142,14 +143,14 @@ class Command:
142 return bb.server.process.idleFinish(traceback.format_exc()) 143 return bb.server.process.idleFinish(traceback.format_exc())
143 144
144 def finishAsyncCommand(self, msg=None, code=None): 145 def finishAsyncCommand(self, msg=None, code=None):
146 self.cooker.finishcommand()
147 self.process_server.clear_async_cmd()
145 if msg or msg == "": 148 if msg or msg == "":
146 bb.event.fire(CommandFailed(msg), self.cooker.data) 149 bb.event.fire(CommandFailed(msg), self.cooker.data)
147 elif code: 150 elif code:
148 bb.event.fire(CommandExit(code), self.cooker.data) 151 bb.event.fire(CommandExit(code), self.cooker.data)
149 else: 152 else:
150 bb.event.fire(CommandCompleted(), self.cooker.data) 153 bb.event.fire(CommandCompleted(), self.cooker.data)
151 self.cooker.finishcommand()
152 self.process_server.clear_async_cmd()
153 154
154 def reset(self): 155 def reset(self):
155 if self.remotedatastores: 156 if self.remotedatastores:
@@ -310,7 +311,7 @@ class CommandsSync:
310 def revalidateCaches(self, command, params): 311 def revalidateCaches(self, command, params):
311 """Called by UI clients when metadata may have changed""" 312 """Called by UI clients when metadata may have changed"""
312 command.cooker.revalidateCaches() 313 command.cooker.revalidateCaches()
313 parseConfiguration.needconfig = False 314 revalidateCaches.needconfig = False
314 315
315 def getRecipes(self, command, params): 316 def getRecipes(self, command, params):
316 try: 317 try:
@@ -420,15 +421,30 @@ class CommandsSync:
420 return command.cooker.recipecaches[mc].pkg_dp 421 return command.cooker.recipecaches[mc].pkg_dp
421 getDefaultPreference.readonly = True 422 getDefaultPreference.readonly = True
422 423
424
423 def getSkippedRecipes(self, command, params): 425 def getSkippedRecipes(self, command, params):
426 """
427 Get the map of skipped recipes for the specified multiconfig/mc name (`params[0]`).
428
429 Invoked by `bb.tinfoil.Tinfoil.get_skipped_recipes`
430
431 :param command: Internally used parameter.
432 :param params: Parameter array. params[0] is multiconfig/mc name. If not given, then default mc '' is assumed.
433 :return: Dict whose keys are virtualfns and values are `bb.cooker.SkippedPackage`
434 """
435 try:
436 mc = params[0]
437 except IndexError:
438 mc = ''
439
424 # Return list sorted by reverse priority order 440 # Return list sorted by reverse priority order
425 import bb.cache 441 import bb.cache
426 def sortkey(x): 442 def sortkey(x):
427 vfn, _ = x 443 vfn, _ = x
428 realfn, _, mc = bb.cache.virtualfn2realfn(vfn) 444 realfn, _, item_mc = bb.cache.virtualfn2realfn(vfn)
429 return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn) 445 return -command.cooker.collections[item_mc].calc_bbfile_priority(realfn)[0], vfn
430 446
431 skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey)) 447 skipdict = OrderedDict(sorted(command.cooker.skiplist_by_mc[mc].items(), key=sortkey))
432 return list(skipdict.items()) 448 return list(skipdict.items())
433 getSkippedRecipes.readonly = True 449 getSkippedRecipes.readonly = True
434 450
@@ -582,6 +598,13 @@ class CommandsSync:
582 return DataStoreConnectionHandle(idx) 598 return DataStoreConnectionHandle(idx)
583 parseRecipeFile.readonly = True 599 parseRecipeFile.readonly = True
584 600
601 def finalizeData(self, command, params):
602 newdata = command.cooker.data.createCopy()
603 bb.data.expandKeys(newdata)
604 bb.parse.ast.runAnonFuncs(newdata)
605 idx = command.remotedatastores.store(newdata)
606 return DataStoreConnectionHandle(idx)
607
585class CommandsAsync: 608class CommandsAsync:
586 """ 609 """
587 A class of asynchronous commands 610 A class of asynchronous commands
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py
index 88b0989322..2a64681c86 100644
--- a/bitbake/lib/bb/compress/lz4.py
+++ b/bitbake/lib/bb/compress/lz4.py
@@ -13,7 +13,7 @@ def open(*args, **kwargs):
13 13
14class LZ4File(bb.compress._pipecompress.PipeFile): 14class LZ4File(bb.compress._pipecompress.PipeFile):
15 def get_compress(self): 15 def get_compress(self):
16 return ["lz4c", "-z", "-c"] 16 return ["lz4", "-z", "-c"]
17 17
18 def get_decompress(self): 18 def get_decompress(self):
19 return ["lz4c", "-d", "-c"] 19 return ["lz4", "-d", "-c"]
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index c5bfef55d6..03f262ac16 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -8,16 +8,16 @@
8# 8#
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11import enum
12import sys, os, glob, os.path, re, time 12import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
15import multiprocessing 15from bb import multiprocessing
16import threading 16import threading
17from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
18from contextlib import closing 18from contextlib import closing
19from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command 20import bb, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue 22import queue
23import signal 23import signal
@@ -26,6 +26,7 @@ import json
26import pickle 26import pickle
27import codecs 27import codecs
28import hashserv 28import hashserv
29import ctypes
29 30
30logger = logging.getLogger("BitBake") 31logger = logging.getLogger("BitBake")
31collectlog = logging.getLogger("BitBake.Collection") 32collectlog = logging.getLogger("BitBake.Collection")
@@ -48,16 +49,15 @@ class CollectionError(bb.BBHandledException):
48 Exception raised when layer configuration is incorrect 49 Exception raised when layer configuration is incorrect
49 """ 50 """
50 51
51class state:
52 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
53 52
54 @classmethod 53class State(enum.Enum):
55 def get_name(cls, code): 54 INITIAL = 0,
56 for name in dir(cls): 55 PARSING = 1,
57 value = getattr(cls, name) 56 RUNNING = 2,
58 if type(value) == type(cls.initial) and value == code: 57 SHUTDOWN = 3,
59 return name 58 FORCE_SHUTDOWN = 4,
60 raise ValueError("Invalid status code: %s" % code) 59 STOPPED = 5,
60 ERROR = 6
61 61
62 62
63class SkippedPackage: 63class SkippedPackage:
@@ -134,7 +134,8 @@ class BBCooker:
134 self.baseconfig_valid = False 134 self.baseconfig_valid = False
135 self.parsecache_valid = False 135 self.parsecache_valid = False
136 self.eventlog = None 136 self.eventlog = None
137 self.skiplist = {} 137 # The skiplists, one per multiconfig
138 self.skiplist_by_mc = defaultdict(dict)
138 self.featureset = CookerFeatures() 139 self.featureset = CookerFeatures()
139 if featureSet: 140 if featureSet:
140 for f in featureSet: 141 for f in featureSet:
@@ -180,7 +181,7 @@ class BBCooker:
180 pass 181 pass
181 182
182 self.command = bb.command.Command(self, self.process_server) 183 self.command = bb.command.Command(self, self.process_server)
183 self.state = state.initial 184 self.state = State.INITIAL
184 185
185 self.parser = None 186 self.parser = None
186 187
@@ -226,23 +227,22 @@ class BBCooker:
226 bb.warn("Cooker received SIGTERM, shutting down...") 227 bb.warn("Cooker received SIGTERM, shutting down...")
227 elif signum == signal.SIGHUP: 228 elif signum == signal.SIGHUP:
228 bb.warn("Cooker received SIGHUP, shutting down...") 229 bb.warn("Cooker received SIGHUP, shutting down...")
229 self.state = state.forceshutdown 230 self.state = State.FORCE_SHUTDOWN
230 bb.event._should_exit.set() 231 bb.event._should_exit.set()
231 232
232 def setFeatures(self, features): 233 def setFeatures(self, features):
233 # we only accept a new feature set if we're in state initial, so we can reset without problems 234 # we only accept a new feature set if we're in state initial, so we can reset without problems
234 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 235 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
235 raise Exception("Illegal state for feature set change") 236 raise Exception("Illegal state for feature set change")
236 original_featureset = list(self.featureset) 237 original_featureset = list(self.featureset)
237 for feature in features: 238 for feature in features:
238 self.featureset.setFeature(feature) 239 self.featureset.setFeature(feature)
239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 240 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
240 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 241 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
241 self.reset() 242 self.reset()
242 243
243 def initConfigurationData(self): 244 def initConfigurationData(self):
244 245 self.state = State.INITIAL
245 self.state = state.initial
246 self.caches_array = [] 246 self.caches_array = []
247 247
248 sys.path = self.orig_syspath.copy() 248 sys.path = self.orig_syspath.copy()
@@ -281,7 +281,6 @@ class BBCooker:
281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
282 self.databuilder.parseBaseConfiguration() 282 self.databuilder.parseBaseConfiguration()
283 self.data = self.databuilder.data 283 self.data = self.databuilder.data
284 self.data_hash = self.databuilder.data_hash
285 self.extraconfigdata = {} 284 self.extraconfigdata = {}
286 285
287 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") 286 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
@@ -315,13 +314,22 @@ class BBCooker:
315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 314 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None 315 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
317 if upstream: 316 if upstream:
318 import socket
319 try: 317 try:
320 sock = socket.create_connection(upstream.split(":"), 5) 318 with hashserv.create_client(upstream) as client:
321 sock.close() 319 client.ping()
322 except socket.error as e: 320 except ImportError as e:
323 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 321 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module:
322%s
323Please install the needed module on the build host, or use an environment containing it:
324 - if you are using bitbake-setup, run 'bitbake-setup install-buildtools'
325 - openembedded-core layer contains 'scripts/install-buildtools' that can also be used
326 - or set up pip venv
327You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts."""
328 % (upstream, repr(e)))
329 except ConnectionError as e:
330 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s"
324 % (upstream, repr(e))) 331 % (upstream, repr(e)))
332 upstream = None
325 333
326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 334 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
327 self.hashserv = hashserv.create_server( 335 self.hashserv = hashserv.create_server(
@@ -370,6 +378,11 @@ class BBCooker:
370 if not clean: 378 if not clean:
371 bb.parse.BBHandler.cached_statements = {} 379 bb.parse.BBHandler.cached_statements = {}
372 380
381 # If writes were made to any of the data stores, we need to recalculate the data
382 # store cache
383 if hasattr(self, "databuilder"):
384 self.databuilder.calc_datastore_hashes()
385
373 def parseConfiguration(self): 386 def parseConfiguration(self):
374 self.updateCacheSync() 387 self.updateCacheSync()
375 388
@@ -612,8 +625,8 @@ class BBCooker:
612 localdata = {} 625 localdata = {}
613 626
614 for mc in self.multiconfigs: 627 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) 628 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 629 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
617 bb.data.expandKeys(localdata[mc]) 630 bb.data.expandKeys(localdata[mc])
618 631
619 current = 0 632 current = 0
@@ -680,14 +693,14 @@ class BBCooker:
680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 693 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
681 return taskdata, runlist 694 return taskdata, runlist
682 695
683 def prepareTreeData(self, pkgs_to_build, task): 696 def prepareTreeData(self, pkgs_to_build, task, halt=False):
684 """ 697 """
685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 698 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
686 """ 699 """
687 700
688 # We set halt to False here to prevent unbuildable targets raising 701 # We set halt to False here to prevent unbuildable targets raising
689 # an exception when we're just generating data 702 # an exception when we're just generating data
690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 703 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
691 704
692 return runlist, taskdata 705 return runlist, taskdata
693 706
@@ -701,7 +714,7 @@ class BBCooker:
701 if not task.startswith("do_"): 714 if not task.startswith("do_"):
702 task = "do_%s" % task 715 task = "do_%s" % task
703 716
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 717 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 718 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
706 rq.rqdata.prepare() 719 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata) 720 return self.buildDependTree(rq, taskdata)
@@ -896,10 +909,11 @@ class BBCooker:
896 909
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 910 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898 911
899 with open('pn-buildlist', 'w') as f: 912 pns = depgraph["pn"].keys()
900 for pn in depgraph["pn"]: 913 if pns:
901 f.write(pn + "\n") 914 with open('pn-buildlist', 'w') as f:
902 logger.info("PN build list saved to 'pn-buildlist'") 915 f.write("%s\n" % "\n".join(sorted(pns)))
916 logger.info("PN build list saved to 'pn-buildlist'")
903 917
904 # Remove old format output files to ensure no confusion with stale data 918 # Remove old format output files to ensure no confusion with stale data
905 try: 919 try:
@@ -933,7 +947,7 @@ class BBCooker:
933 for mc in self.multiconfigs: 947 for mc in self.multiconfigs:
934 # First get list of recipes, including skipped 948 # First get list of recipes, including skipped
935 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 949 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
936 recipefns.extend(self.skiplist.keys()) 950 recipefns.extend(self.skiplist_by_mc[mc].keys())
937 951
938 # Work out list of bbappends that have been applied 952 # Work out list of bbappends that have been applied
939 applied_appends = [] 953 applied_appends = []
@@ -952,13 +966,7 @@ class BBCooker:
952 '\n '.join(appends_without_recipes[mc]))) 966 '\n '.join(appends_without_recipes[mc])))
953 967
954 if msgs: 968 if msgs:
955 msg = "\n".join(msgs) 969 bb.fatal("\n".join(msgs))
956 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
957 False) or "no"
958 if warn_only.lower() in ("1", "yes", "true"):
959 bb.warn(msg)
960 else:
961 bb.fatal(msg)
962 970
963 def handlePrefProviders(self): 971 def handlePrefProviders(self):
964 972
@@ -1338,7 +1346,7 @@ class BBCooker:
1338 self.buildSetVars() 1346 self.buildSetVars()
1339 self.reset_mtime_caches() 1347 self.reset_mtime_caches()
1340 1348
1341 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1349 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1342 1350
1343 layername = self.collections[mc].calc_bbfile_priority(fn)[2] 1351 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1344 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) 1352 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
@@ -1399,11 +1407,11 @@ class BBCooker:
1399 1407
1400 msg = None 1408 msg = None
1401 interrupted = 0 1409 interrupted = 0
1402 if halt or self.state == state.forceshutdown: 1410 if halt or self.state == State.FORCE_SHUTDOWN:
1403 rq.finish_runqueue(True) 1411 rq.finish_runqueue(True)
1404 msg = "Forced shutdown" 1412 msg = "Forced shutdown"
1405 interrupted = 2 1413 interrupted = 2
1406 elif self.state == state.shutdown: 1414 elif self.state == State.SHUTDOWN:
1407 rq.finish_runqueue(False) 1415 rq.finish_runqueue(False)
1408 msg = "Stopped build" 1416 msg = "Stopped build"
1409 interrupted = 1 1417 interrupted = 1
@@ -1429,8 +1437,7 @@ class BBCooker:
1429 if quietlog: 1437 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel) 1438 bb.runqueue.logger.setLevel(rqloglevel)
1431 return bb.server.process.idleFinish(msg) 1439 return bb.server.process.idleFinish(msg)
1432 if retval is True: 1440
1433 return True
1434 return retval 1441 return retval
1435 1442
1436 self.idleCallBackRegister(buildFileIdle, rq) 1443 self.idleCallBackRegister(buildFileIdle, rq)
@@ -1459,7 +1466,6 @@ class BBCooker:
1459 1466
1460 if t in task or getAllTaskSignatures: 1467 if t in task or getAllTaskSignatures:
1461 try: 1468 try:
1462 rq.rqdata.prepare_task_hash(tid)
1463 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) 1469 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1464 except KeyError: 1470 except KeyError:
1465 sig.append(self.getTaskSignatures(target, [t])[0]) 1471 sig.append(self.getTaskSignatures(target, [t])[0])
@@ -1474,12 +1480,12 @@ class BBCooker:
1474 def buildTargetsIdle(server, rq, halt): 1480 def buildTargetsIdle(server, rq, halt):
1475 msg = None 1481 msg = None
1476 interrupted = 0 1482 interrupted = 0
1477 if halt or self.state == state.forceshutdown: 1483 if halt or self.state == State.FORCE_SHUTDOWN:
1478 bb.event._should_exit.set() 1484 bb.event._should_exit.set()
1479 rq.finish_runqueue(True) 1485 rq.finish_runqueue(True)
1480 msg = "Forced shutdown" 1486 msg = "Forced shutdown"
1481 interrupted = 2 1487 interrupted = 2
1482 elif self.state == state.shutdown: 1488 elif self.state == State.SHUTDOWN:
1483 rq.finish_runqueue(False) 1489 rq.finish_runqueue(False)
1484 msg = "Stopped build" 1490 msg = "Stopped build"
1485 interrupted = 1 1491 interrupted = 1
@@ -1500,8 +1506,6 @@ class BBCooker:
1500 bb.event.disable_heartbeat() 1506 bb.event.disable_heartbeat()
1501 return bb.server.process.idleFinish(msg) 1507 return bb.server.process.idleFinish(msg)
1502 1508
1503 if retval is True:
1504 return True
1505 return retval 1509 return retval
1506 1510
1507 self.reset_mtime_caches() 1511 self.reset_mtime_caches()
@@ -1574,7 +1578,7 @@ class BBCooker:
1574 1578
1575 1579
1576 def updateCacheSync(self): 1580 def updateCacheSync(self):
1577 if self.state == state.running: 1581 if self.state == State.RUNNING:
1578 return 1582 return
1579 1583
1580 if not self.baseconfig_valid: 1584 if not self.baseconfig_valid:
@@ -1584,19 +1588,19 @@ class BBCooker:
1584 1588
1585 # This is called for all async commands when self.state != running 1589 # This is called for all async commands when self.state != running
1586 def updateCache(self): 1590 def updateCache(self):
1587 if self.state == state.running: 1591 if self.state == State.RUNNING:
1588 return 1592 return
1589 1593
1590 if self.state in (state.shutdown, state.forceshutdown, state.error): 1594 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1591 if hasattr(self.parser, 'shutdown'): 1595 if hasattr(self.parser, 'shutdown'):
1592 self.parser.shutdown(clean=False) 1596 self.parser.shutdown(clean=False)
1593 self.parser.final_cleanup() 1597 self.parser.final_cleanup()
1594 raise bb.BBHandledException() 1598 raise bb.BBHandledException()
1595 1599
1596 if self.state != state.parsing: 1600 if self.state != State.PARSING:
1597 self.updateCacheSync() 1601 self.updateCacheSync()
1598 1602
1599 if self.state != state.parsing and not self.parsecache_valid: 1603 if self.state != State.PARSING and not self.parsecache_valid:
1600 bb.server.process.serverlog("Parsing started") 1604 bb.server.process.serverlog("Parsing started")
1601 self.parsewatched = {} 1605 self.parsewatched = {}
1602 1606
@@ -1630,9 +1634,10 @@ class BBCooker:
1630 self.parser = CookerParser(self, mcfilelist, total_masked) 1634 self.parser = CookerParser(self, mcfilelist, total_masked)
1631 self._parsecache_set(True) 1635 self._parsecache_set(True)
1632 1636
1633 self.state = state.parsing 1637 self.state = State.PARSING
1634 1638
1635 if not self.parser.parse_next(): 1639 if not self.parser.parse_next():
1640 bb.server.process.serverlog("Parsing completed")
1636 collectlog.debug("parsing complete") 1641 collectlog.debug("parsing complete")
1637 if self.parser.error: 1642 if self.parser.error:
1638 raise bb.BBHandledException() 1643 raise bb.BBHandledException()
@@ -1640,7 +1645,7 @@ class BBCooker:
1640 self.handlePrefProviders() 1645 self.handlePrefProviders()
1641 for mc in self.multiconfigs: 1646 for mc in self.multiconfigs:
1642 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1647 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1643 self.state = state.running 1648 self.state = State.RUNNING
1644 1649
1645 # Send an event listing all stamps reachable after parsing 1650 # Send an event listing all stamps reachable after parsing
1646 # which the metadata may use to clean up stale data 1651 # which the metadata may use to clean up stale data
@@ -1713,10 +1718,10 @@ class BBCooker:
1713 1718
1714 def shutdown(self, force=False): 1719 def shutdown(self, force=False):
1715 if force: 1720 if force:
1716 self.state = state.forceshutdown 1721 self.state = State.FORCE_SHUTDOWN
1717 bb.event._should_exit.set() 1722 bb.event._should_exit.set()
1718 else: 1723 else:
1719 self.state = state.shutdown 1724 self.state = State.SHUTDOWN
1720 1725
1721 if self.parser: 1726 if self.parser:
1722 self.parser.shutdown(clean=False) 1727 self.parser.shutdown(clean=False)
@@ -1726,7 +1731,7 @@ class BBCooker:
1726 if hasattr(self.parser, 'shutdown'): 1731 if hasattr(self.parser, 'shutdown'):
1727 self.parser.shutdown(clean=False) 1732 self.parser.shutdown(clean=False)
1728 self.parser.final_cleanup() 1733 self.parser.final_cleanup()
1729 self.state = state.initial 1734 self.state = State.INITIAL
1730 bb.event._should_exit.clear() 1735 bb.event._should_exit.clear()
1731 1736
1732 def reset(self): 1737 def reset(self):
@@ -1813,8 +1818,8 @@ class CookerCollectFiles(object):
1813 bb.event.fire(CookerExit(), eventdata) 1818 bb.event.fire(CookerExit(), eventdata)
1814 1819
1815 # We need to track where we look so that we can know when the cache is invalid. There 1820 # We need to track where we look so that we can know when the cache is invalid. There
1816 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1821 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1817 # (or os.scandir() for python 3.6+) calls while we run glob(). 1822 # calls while we run glob().
1818 origlistdir = os.listdir 1823 origlistdir = os.listdir
1819 if hasattr(os, 'scandir'): 1824 if hasattr(os, 'scandir'):
1820 origscandir = os.scandir 1825 origscandir = os.scandir
@@ -1994,8 +1999,9 @@ class ParsingFailure(Exception):
1994 Exception.__init__(self, realexception, recipe) 1999 Exception.__init__(self, realexception, recipe)
1995 2000
1996class Parser(multiprocessing.Process): 2001class Parser(multiprocessing.Process):
1997 def __init__(self, jobs, results, quit, profile): 2002 def __init__(self, jobs, next_job_id, results, quit, profile):
1998 self.jobs = jobs 2003 self.jobs = jobs
2004 self.next_job_id = next_job_id
1999 self.results = results 2005 self.results = results
2000 self.quit = quit 2006 self.quit = quit
2001 multiprocessing.Process.__init__(self) 2007 multiprocessing.Process.__init__(self)
@@ -2005,6 +2011,7 @@ class Parser(multiprocessing.Process):
2005 self.queue_signals = False 2011 self.queue_signals = False
2006 self.signal_received = [] 2012 self.signal_received = []
2007 self.signal_threadlock = threading.Lock() 2013 self.signal_threadlock = threading.Lock()
2014 self.exit = False
2008 2015
2009 def catch_sig(self, signum, frame): 2016 def catch_sig(self, signum, frame):
2010 if self.queue_signals: 2017 if self.queue_signals:
@@ -2017,24 +2024,10 @@ class Parser(multiprocessing.Process):
2017 signal.signal(signal.SIGTERM, signal.SIG_DFL) 2024 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2018 os.kill(os.getpid(), signal.SIGTERM) 2025 os.kill(os.getpid(), signal.SIGTERM)
2019 elif signum == signal.SIGINT: 2026 elif signum == signal.SIGINT:
2020 signal.default_int_handler(signum, frame) 2027 self.exit = True
2021 2028
2022 def run(self): 2029 def run(self):
2023 2030 bb.utils.profile_function("parsing" in self.profile, self.realrun, "profile-parse-%s.log" % multiprocessing.current_process().name, process=False)
2024 if not self.profile:
2025 self.realrun()
2026 return
2027
2028 try:
2029 import cProfile as profile
2030 except:
2031 import profile
2032 prof = profile.Profile()
2033 try:
2034 profile.Profile.runcall(prof, self.realrun)
2035 finally:
2036 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2037 prof.dump_stats(logfile)
2038 2031
2039 def realrun(self): 2032 def realrun(self):
2040 # Signal handling here is hard. We must not terminate any process or thread holding the write 2033 # Signal handling here is hard. We must not terminate any process or thread holding the write
@@ -2055,15 +2048,19 @@ class Parser(multiprocessing.Process):
2055 pending = [] 2048 pending = []
2056 havejobs = True 2049 havejobs = True
2057 try: 2050 try:
2058 while havejobs or pending: 2051 while (havejobs or pending) and not self.exit:
2059 if self.quit.is_set(): 2052 if self.quit.is_set():
2060 break 2053 break
2061 2054
2062 job = None 2055 job = None
2063 try: 2056 if havejobs:
2064 job = self.jobs.pop() 2057 with self.next_job_id.get_lock():
2065 except IndexError: 2058 if self.next_job_id.value < len(self.jobs):
2066 havejobs = False 2059 job = self.jobs[self.next_job_id.value]
2060 self.next_job_id.value += 1
2061 else:
2062 havejobs = False
2063
2067 if job: 2064 if job:
2068 result = self.parse(*job) 2065 result = self.parse(*job)
2069 # Clear the siggen cache after parsing to control memory usage, its huge 2066 # Clear the siggen cache after parsing to control memory usage, its huge
@@ -2098,7 +2095,6 @@ class Parser(multiprocessing.Process):
2098 except Exception as exc: 2095 except Exception as exc:
2099 tb = sys.exc_info()[2] 2096 tb = sys.exc_info()[2]
2100 exc.recipe = filename 2097 exc.recipe = filename
2101 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2102 return True, None, exc 2098 return True, None, exc
2103 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2099 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2104 # and for example a worker thread doesn't just exit on its own in response to 2100 # and for example a worker thread doesn't just exit on its own in response to
@@ -2113,7 +2109,7 @@ class CookerParser(object):
2113 self.mcfilelist = mcfilelist 2109 self.mcfilelist = mcfilelist
2114 self.cooker = cooker 2110 self.cooker = cooker
2115 self.cfgdata = cooker.data 2111 self.cfgdata = cooker.data
2116 self.cfghash = cooker.data_hash 2112 self.cfghash = cooker.databuilder.data_hash
2117 self.cfgbuilder = cooker.databuilder 2113 self.cfgbuilder = cooker.databuilder
2118 2114
2119 # Accounting statistics 2115 # Accounting statistics
@@ -2130,13 +2126,13 @@ class CookerParser(object):
2130 2126
2131 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array) 2127 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2132 self.fromcache = set() 2128 self.fromcache = set()
2133 self.willparse = set() 2129 self.willparse = []
2134 for mc in self.cooker.multiconfigs: 2130 for mc in self.cooker.multiconfigs:
2135 for filename in self.mcfilelist[mc]: 2131 for filename in self.mcfilelist[mc]:
2136 appends = self.cooker.collections[mc].get_file_appends(filename) 2132 appends = self.cooker.collections[mc].get_file_appends(filename)
2137 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2] 2133 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2138 if not self.bb_caches[mc].cacheValid(filename, appends): 2134 if not self.bb_caches[mc].cacheValid(filename, appends):
2139 self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername)) 2135 self.willparse.append((mc, self.bb_caches[mc], filename, appends, layername))
2140 else: 2136 else:
2141 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername)) 2137 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2142 2138
@@ -2155,18 +2151,18 @@ class CookerParser(object):
2155 def start(self): 2151 def start(self):
2156 self.results = self.load_cached() 2152 self.results = self.load_cached()
2157 self.processes = [] 2153 self.processes = []
2154
2158 if self.toparse: 2155 if self.toparse:
2159 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2156 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2160 2157
2158 next_job_id = multiprocessing.Value(ctypes.c_int, 0)
2161 self.parser_quit = multiprocessing.Event() 2159 self.parser_quit = multiprocessing.Event()
2162 self.result_queue = multiprocessing.Queue() 2160 self.result_queue = multiprocessing.Queue()
2163 2161
2164 def chunkify(lst,n): 2162 # Have to pass in willparse at fork time so all parsing processes have the unpickleable data
2165 return [lst[i::n] for i in range(n)] 2163 # then access it by index from the parse queue.
2166 self.jobs = chunkify(list(self.willparse), self.num_processes)
2167
2168 for i in range(0, self.num_processes): 2164 for i in range(0, self.num_processes):
2169 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile) 2165 parser = Parser(self.willparse, next_job_id, self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2170 parser.start() 2166 parser.start()
2171 self.process_names.append(parser.name) 2167 self.process_names.append(parser.name)
2172 self.processes.append(parser) 2168 self.processes.append(parser)
@@ -2193,11 +2189,17 @@ class CookerParser(object):
2193 2189
2194 # Cleanup the queue before call process.join(), otherwise there might be 2190 # Cleanup the queue before call process.join(), otherwise there might be
2195 # deadlocks. 2191 # deadlocks.
2196 while True: 2192 def read_results():
2197 try: 2193 while True:
2198 self.result_queue.get(timeout=0.25) 2194 try:
2199 except queue.Empty: 2195 self.result_queue.get(timeout=0.25)
2200 break 2196 except queue.Empty:
2197 break
2198 except KeyError:
2199 # The restore state from SiggenRecipeInfo in cache.py can
2200 # fail here if this is an unclean shutdown since the state may have been
2201 # reset. Ignore key errors for that reason, we don't care.
2202 pass
2201 2203
2202 def sync_caches(): 2204 def sync_caches():
2203 for c in self.bb_caches.values(): 2205 for c in self.bb_caches.values():
@@ -2209,15 +2211,19 @@ class CookerParser(object):
2209 2211
2210 self.parser_quit.set() 2212 self.parser_quit.set()
2211 2213
2214 read_results()
2215
2212 for process in self.processes: 2216 for process in self.processes:
2213 process.join(0.5) 2217 process.join(2)
2214 2218
2215 for process in self.processes: 2219 for process in self.processes:
2216 if process.exitcode is None: 2220 if process.exitcode is None:
2217 os.kill(process.pid, signal.SIGINT) 2221 os.kill(process.pid, signal.SIGINT)
2218 2222
2223 read_results()
2224
2219 for process in self.processes: 2225 for process in self.processes:
2220 process.join(0.5) 2226 process.join(2)
2221 2227
2222 for process in self.processes: 2228 for process in self.processes:
2223 if process.exitcode is None: 2229 if process.exitcode is None:
@@ -2225,9 +2231,8 @@ class CookerParser(object):
2225 2231
2226 for process in self.processes: 2232 for process in self.processes:
2227 process.join() 2233 process.join()
2228 # Added in 3.7, cleans up zombies 2234 # clean up zombies
2229 if hasattr(process, "close"): 2235 process.close()
2230 process.close()
2231 2236
2232 bb.codeparser.parser_cache_save() 2237 bb.codeparser.parser_cache_save()
2233 bb.codeparser.parser_cache_savemerge() 2238 bb.codeparser.parser_cache_savemerge()
@@ -2237,12 +2242,13 @@ class CookerParser(object):
2237 profiles = [] 2242 profiles = []
2238 for i in self.process_names: 2243 for i in self.process_names:
2239 logfile = "profile-parse-%s.log" % i 2244 logfile = "profile-parse-%s.log" % i
2240 if os.path.exists(logfile): 2245 if os.path.exists(logfile) and os.path.getsize(logfile):
2241 profiles.append(logfile) 2246 profiles.append(logfile)
2242 2247
2243 pout = "profile-parse.log.processed" 2248 if profiles:
2244 bb.utils.process_profilelog(profiles, pout = pout) 2249 fn_out = "profile-parse.log.report"
2245 print("Processed parsing statistics saved to %s" % (pout)) 2250 bb.utils.process_profilelog(profiles, fn_out=fn_out)
2251 print("Processed parsing statistics saved to %s" % (fn_out))
2246 2252
2247 def final_cleanup(self): 2253 def final_cleanup(self):
2248 if self.syncthread: 2254 if self.syncthread:
@@ -2274,7 +2280,7 @@ class CookerParser(object):
2274 yield result 2280 yield result
2275 2281
2276 if not (self.parsed >= self.toparse): 2282 if not (self.parsed >= self.toparse):
2277 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None) 2283 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? (%s %s of %s) Exiting." % (len(self.processes), self.parsed, self.toparse), None)
2278 2284
2279 2285
2280 def parse_next(self): 2286 def parse_next(self):
@@ -2299,8 +2305,12 @@ class CookerParser(object):
2299 return False 2305 return False
2300 except ParsingFailure as exc: 2306 except ParsingFailure as exc:
2301 self.error += 1 2307 self.error += 1
2302 logger.error('Unable to parse %s: %s' % 2308
2303 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2309 exc_desc = str(exc)
2310 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2311 exc_desc = 'Exited with "%d"' % exc.code
2312
2313 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2304 self.shutdown(clean=False) 2314 self.shutdown(clean=False)
2305 return False 2315 return False
2306 except bb.parse.ParseError as exc: 2316 except bb.parse.ParseError as exc:
@@ -2309,20 +2319,33 @@ class CookerParser(object):
2309 self.shutdown(clean=False, eventmsg=str(exc)) 2319 self.shutdown(clean=False, eventmsg=str(exc))
2310 return False 2320 return False
2311 except bb.data_smart.ExpansionError as exc: 2321 except bb.data_smart.ExpansionError as exc:
2322 def skip_frames(f, fn_prefix):
2323 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2324 f = f.tb_next
2325 return f
2326
2312 self.error += 1 2327 self.error += 1
2313 bbdir = os.path.dirname(__file__) + os.sep 2328 bbdir = os.path.dirname(__file__) + os.sep
2314 etype, value, _ = sys.exc_info() 2329 etype, value, tb = sys.exc_info()
2315 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2330
2331 # Remove any frames where the code comes from bitbake. This
2332 # prevents deep (and pretty useless) backtraces for expansion error
2333 tb = skip_frames(tb, bbdir)
2334 cur = tb
2335 while cur:
2336 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2337 cur = cur.tb_next
2338
2316 logger.error('ExpansionError during parsing %s', value.recipe, 2339 logger.error('ExpansionError during parsing %s', value.recipe,
2317 exc_info=(etype, value, tb)) 2340 exc_info=(etype, value, tb))
2318 self.shutdown(clean=False) 2341 self.shutdown(clean=False)
2319 return False 2342 return False
2320 except Exception as exc: 2343 except Exception as exc:
2321 self.error += 1 2344 self.error += 1
2322 etype, value, tb = sys.exc_info() 2345 _, value, _ = sys.exc_info()
2323 if hasattr(value, "recipe"): 2346 if hasattr(value, "recipe"):
2324 logger.error('Unable to parse %s' % value.recipe, 2347 logger.error('Unable to parse %s' % value.recipe,
2325 exc_info=(etype, value, exc.traceback)) 2348 exc_info=sys.exc_info())
2326 else: 2349 else:
2327 # Most likely, an exception occurred during raising an exception 2350 # Most likely, an exception occurred during raising an exception
2328 import traceback 2351 import traceback
@@ -2343,7 +2366,7 @@ class CookerParser(object):
2343 for virtualfn, info_array in result: 2366 for virtualfn, info_array in result:
2344 if info_array[0].skipped: 2367 if info_array[0].skipped:
2345 self.skipped += 1 2368 self.skipped += 1
2346 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2369 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2347 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2370 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2348 parsed=parsed, watcher = self.cooker.add_filewatch) 2371 parsed=parsed, watcher = self.cooker.add_filewatch)
2349 return True 2372 return True
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 0649e40995..65c153a5bb 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -1,3 +1,4 @@
1
1# 2#
2# Copyright (C) 2003, 2004 Chris Larson 3# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell 4# Copyright (C) 2003, 2004 Phil Blundell
@@ -254,14 +255,21 @@ class CookerDataBuilder(object):
254 self.data = self.basedata 255 self.data = self.basedata
255 self.mcdata = {} 256 self.mcdata = {}
256 257
258 def calc_datastore_hashes(self):
259 data_hash = hashlib.sha256()
260 data_hash.update(self.data.get_hash().encode('utf-8'))
261 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
262 for config in multiconfig:
263 data_hash.update(self.mcdata[config].get_hash().encode('utf-8'))
264 self.data_hash = data_hash.hexdigest()
265
257 def parseBaseConfiguration(self, worker=False): 266 def parseBaseConfiguration(self, worker=False):
258 mcdata = {} 267 mcdata = {}
259 data_hash = hashlib.sha256()
260 try: 268 try:
261 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) 269 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
262 270
263 if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker: 271 servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker
264 bb.fetch.fetcher_init(self.data) 272 bb.fetch.fetcher_init(self.data, servercontext)
265 bb.parse.init_parser(self.data) 273 bb.parse.init_parser(self.data)
266 274
267 bb.event.fire(bb.event.ConfigParsed(), self.data) 275 bb.event.fire(bb.event.ConfigParsed(), self.data)
@@ -279,7 +287,6 @@ class CookerDataBuilder(object):
279 bb.event.fire(bb.event.ConfigParsed(), self.data) 287 bb.event.fire(bb.event.ConfigParsed(), self.data)
280 288
281 bb.parse.init_parser(self.data) 289 bb.parse.init_parser(self.data)
282 data_hash.update(self.data.get_hash().encode('utf-8'))
283 mcdata[''] = self.data 290 mcdata[''] = self.data
284 291
285 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() 292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
@@ -289,11 +296,9 @@ class CookerDataBuilder(object):
289 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) 296 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
290 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata) 297 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
291 mcdata[config] = parsed_mcdata 298 mcdata[config] = parsed_mcdata
292 data_hash.update(parsed_mcdata.get_hash().encode('utf-8'))
293 if multiconfig: 299 if multiconfig:
294 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data) 300 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
295 301
296 self.data_hash = data_hash.hexdigest()
297 except bb.data_smart.ExpansionError as e: 302 except bb.data_smart.ExpansionError as e:
298 logger.error(str(e)) 303 logger.error(str(e))
299 raise bb.BBHandledException() 304 raise bb.BBHandledException()
@@ -328,6 +333,7 @@ class CookerDataBuilder(object):
328 for mc in mcdata: 333 for mc in mcdata:
329 self.mcdata[mc] = bb.data.createCopy(mcdata[mc]) 334 self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
330 self.data = self.mcdata[''] 335 self.data = self.mcdata['']
336 self.calc_datastore_hashes()
331 337
332 def reset(self): 338 def reset(self):
333 # We may not have run parseBaseConfiguration() yet 339 # We may not have run parseBaseConfiguration() yet
@@ -340,7 +346,7 @@ class CookerDataBuilder(object):
340 def _findLayerConf(self, data): 346 def _findLayerConf(self, data):
341 return findConfigFile("bblayers.conf", data) 347 return findConfigFile("bblayers.conf", data)
342 348
343 def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"): 349 def parseConfigurationFiles(self, prefiles, postfiles, mc = ""):
344 data = bb.data.createCopy(self.basedata) 350 data = bb.data.createCopy(self.basedata)
345 data.setVar("BB_CURRENT_MC", mc) 351 data.setVar("BB_CURRENT_MC", mc)
346 352
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 505f42950f..f672a84451 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -293,7 +293,7 @@ def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_va
293 if key in mod_funcs: 293 if key in mod_funcs:
294 exclusions = set() 294 exclusions = set()
295 moddep = bb.codeparser.modulecode_deps[key] 295 moddep = bb.codeparser.modulecode_deps[key]
296 value = handle_contains("", moddep[3], exclusions, d) 296 value = handle_contains(moddep[4], moddep[3], exclusions, d)
297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value 297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
298 298
299 if key[-1] == ']': 299 if key[-1] == ']':
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 0128a5bb17..2e0d308588 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -24,6 +24,7 @@ from collections.abc import MutableMapping
24import logging 24import logging
25import hashlib 25import hashlib
26import bb, bb.codeparser 26import bb, bb.codeparser
27import bb.filter
27from bb import utils 28from bb import utils
28from bb.COW import COWDictBase 29from bb.COW import COWDictBase
29 30
@@ -31,7 +32,7 @@ logger = logging.getLogger("BitBake.Data")
31 32
32__setvar_keyword__ = [":append", ":prepend", ":remove"] 33__setvar_keyword__ = [":append", ":prepend", ":remove"]
33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$') 34__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}") 35__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+}")
35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}") 36__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
36__whitespace_split__ = re.compile(r'(\s)') 37__whitespace_split__ = re.compile(r'(\s)')
37__override_regexp__ = re.compile(r'[a-z0-9]+') 38__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -106,52 +107,52 @@ class VariableParse:
106 self.contains = {} 107 self.contains = {}
107 108
108 def var_sub(self, match): 109 def var_sub(self, match):
109 key = match.group()[2:-1] 110 key = match.group()[2:-1]
110 if self.varname and key: 111 if self.varname and key:
111 if self.varname == key: 112 if self.varname == key:
112 raise Exception("variable %s references itself!" % self.varname) 113 raise Exception("variable %s references itself!" % self.varname)
113 var = self.d.getVarFlag(key, "_content") 114 var = self.d.getVarFlag(key, "_content")
114 self.references.add(key) 115 self.references.add(key)
115 if var is not None: 116 if var is not None:
116 return var 117 return var
117 else: 118 else:
118 return match.group() 119 return match.group()
119 120
120 def python_sub(self, match): 121 def python_sub(self, match):
121 if isinstance(match, str): 122 if isinstance(match, str):
122 code = match 123 code = match
123 else: 124 else:
124 code = match.group()[3:-1] 125 code = match.group()[3:-1]
125 126
126 # Do not run code that contains one or more unexpanded variables 127 # Do not run code that contains one or more unexpanded variables
127 # instead return the code with the characters we removed put back 128 # instead return the code with the characters we removed put back
128 if __expand_var_regexp__.findall(code): 129 if __expand_var_regexp__.findall(code):
129 return "${@" + code + "}" 130 return "${@" + code + "}"
130 131
131 if self.varname: 132 if self.varname:
132 varname = 'Var <%s>' % self.varname 133 varname = 'Var <%s>' % self.varname
133 else: 134 else:
134 varname = '<expansion>' 135 varname = '<expansion>'
135 codeobj = compile(code.strip(), varname, "eval") 136 codeobj = compile(code.strip(), varname, "eval")
136 137
137 parser = bb.codeparser.PythonParser(self.varname, logger) 138 parser = bb.codeparser.PythonParser(self.varname, logger)
138 parser.parse_python(code) 139 parser.parse_python(code)
139 if self.varname: 140 if self.varname:
140 vardeps = self.d.getVarFlag(self.varname, "vardeps") 141 vardeps = self.d.getVarFlag(self.varname, "vardeps")
141 if vardeps is None: 142 if vardeps is None:
142 parser.log.flush()
143 else:
144 parser.log.flush() 143 parser.log.flush()
145 self.references |= parser.references 144 else:
146 self.execs |= parser.execs 145 parser.log.flush()
146 self.references |= parser.references
147 self.execs |= parser.execs
147 148
148 for k in parser.contains: 149 for k in parser.contains:
149 if k not in self.contains: 150 if k not in self.contains:
150 self.contains[k] = parser.contains[k].copy() 151 self.contains[k] = parser.contains[k].copy()
151 else: 152 else:
152 self.contains[k].update(parser.contains[k]) 153 self.contains[k].update(parser.contains[k])
153 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) 154 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
154 return str(value) 155 return str(value)
155 156
156class DataContext(dict): 157class DataContext(dict):
157 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe']) 158 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
@@ -272,12 +273,9 @@ class VariableHistory(object):
272 return 273 return
273 if 'op' not in loginfo or not loginfo['op']: 274 if 'op' not in loginfo or not loginfo['op']:
274 loginfo['op'] = 'set' 275 loginfo['op'] = 'set'
275 if 'detail' in loginfo:
276 loginfo['detail'] = str(loginfo['detail'])
277 if 'variable' not in loginfo or 'file' not in loginfo: 276 if 'variable' not in loginfo or 'file' not in loginfo:
278 raise ValueError("record() missing variable or file.") 277 raise ValueError("record() missing variable or file.")
279 var = loginfo['variable'] 278 var = loginfo['variable']
280
281 if var not in self.variables: 279 if var not in self.variables:
282 self.variables[var] = [] 280 self.variables[var] = []
283 if not isinstance(self.variables[var], list): 281 if not isinstance(self.variables[var], list):
@@ -336,7 +334,8 @@ class VariableHistory(object):
336 flag = '[%s] ' % (event['flag']) 334 flag = '[%s] ' % (event['flag'])
337 else: 335 else:
338 flag = '' 336 flag = ''
339 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail']))) 337 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % \
338 (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', str(event['detail']))))
340 if len(history) > 1: 339 if len(history) > 1:
341 o.write("# pre-expansion value:\n") 340 o.write("# pre-expansion value:\n")
342 o.write('# "%s"\n' % (commentVal)) 341 o.write('# "%s"\n' % (commentVal))
@@ -390,7 +389,7 @@ class VariableHistory(object):
390 if isset and event['op'] == 'set?': 389 if isset and event['op'] == 'set?':
391 continue 390 continue
392 isset = True 391 isset = True
393 items = d.expand(event['detail']).split() 392 items = d.expand(str(event['detail'])).split()
394 for item in items: 393 for item in items:
395 # This is a little crude but is belt-and-braces to avoid us 394 # This is a little crude but is belt-and-braces to avoid us
396 # having to handle every possible operation type specifically 395 # having to handle every possible operation type specifically
@@ -429,6 +428,7 @@ class DataSmart(MutableMapping):
429 428
430 self.inchistory = IncludeHistory() 429 self.inchistory = IncludeHistory()
431 self.varhistory = VariableHistory(self) 430 self.varhistory = VariableHistory(self)
431 self.filters = {}
432 self._tracking = False 432 self._tracking = False
433 self._var_renames = {} 433 self._var_renames = {}
434 self._var_renames.update(bitbake_renamed_vars) 434 self._var_renames.update(bitbake_renamed_vars)
@@ -582,12 +582,10 @@ class DataSmart(MutableMapping):
582 else: 582 else:
583 loginfo['op'] = keyword 583 loginfo['op'] = keyword
584 self.varhistory.record(**loginfo) 584 self.varhistory.record(**loginfo)
585 # todo make sure keyword is not __doc__ or __module__
586 # pay the cookie monster 585 # pay the cookie monster
587 586
588 # more cookies for the cookie monster 587 # more cookies for the cookie monster
589 if ':' in var: 588 self._setvar_update_overrides(base, **loginfo)
590 self._setvar_update_overrides(base, **loginfo)
591 589
592 if base in self.overridevars: 590 if base in self.overridevars:
593 self._setvar_update_overridevars(var, value) 591 self._setvar_update_overridevars(var, value)
@@ -640,6 +638,7 @@ class DataSmart(MutableMapping):
640 nextnew.update(vardata.contains.keys()) 638 nextnew.update(vardata.contains.keys())
641 new = nextnew 639 new = nextnew
642 self.overrides = None 640 self.overrides = None
641 self.expand_cache = {}
643 642
644 def _setvar_update_overrides(self, var, **loginfo): 643 def _setvar_update_overrides(self, var, **loginfo):
645 # aka pay the cookie monster 644 # aka pay the cookie monster
@@ -681,6 +680,7 @@ class DataSmart(MutableMapping):
681 680
682 srcflags = self.getVarFlags(key, False, True) or {} 681 srcflags = self.getVarFlags(key, False, True) or {}
683 for i in srcflags: 682 for i in srcflags:
683
684 if i not in (__setvar_keyword__): 684 if i not in (__setvar_keyword__):
685 continue 685 continue
686 src = srcflags[i] 686 src = srcflags[i]
@@ -829,6 +829,8 @@ class DataSmart(MutableMapping):
829 value = copy.copy(local_var[flag]) 829 value = copy.copy(local_var[flag])
830 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault: 830 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
831 value = copy.copy(local_var["_defaultval"]) 831 value = copy.copy(local_var["_defaultval"])
832 elif "_defaultval_flag_"+flag in local_var and not noweakdefault:
833 value = copy.copy(local_var["_defaultval_flag_"+flag])
832 834
833 835
834 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing: 836 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
@@ -896,6 +898,12 @@ class DataSmart(MutableMapping):
896 if expand: 898 if expand:
897 value = parser.value 899 value = parser.value
898 900
901 if value and expand and flag == "_content":
902 basevar = var.split(":")[0]
903 if basevar in self.filters:
904 value = bb.filter.apply_filters(value, [self.filters[basevar],])
905 parser.value = value
906
899 if parser: 907 if parser:
900 self.expand_cache[cachename] = parser 908 self.expand_cache[cachename] = parser
901 909
@@ -920,6 +928,8 @@ class DataSmart(MutableMapping):
920 self.varhistory.record(**loginfo) 928 self.varhistory.record(**loginfo)
921 929
922 del self.dict[var][flag] 930 del self.dict[var][flag]
931 if ("_defaultval_flag_" + flag) in self.dict[var]:
932 del self.dict[var]["_defaultval_flag_" + flag]
923 933
924 def appendVarFlag(self, var, flag, value, **loginfo): 934 def appendVarFlag(self, var, flag, value, **loginfo):
925 loginfo['op'] = 'append' 935 loginfo['op'] = 'append'
@@ -954,17 +964,22 @@ class DataSmart(MutableMapping):
954 flags = {} 964 flags = {}
955 965
956 if local_var: 966 if local_var:
957 for i in local_var: 967 for i, val in local_var.items():
958 if i.startswith(("_", ":")) and not internalflags: 968 if i.startswith("_defaultval_flag_") and not internalflags:
969 i = i[len("_defaultval_flag_"):]
970 if i not in local_var:
971 flags[i] = val
972 elif i.startswith(("_", ":")) and not internalflags:
959 continue 973 continue
960 flags[i] = local_var[i] 974 else:
975 flags[i] = val
976
961 if expand and i in expand: 977 if expand and i in expand:
962 flags[i] = self.expand(flags[i], var + "[" + i + "]") 978 flags[i] = self.expand(flags[i], var + "[" + i + "]")
963 if len(flags) == 0: 979 if len(flags) == 0:
964 return None 980 return None
965 return flags 981 return flags
966 982
967
968 def delVarFlags(self, var, **loginfo): 983 def delVarFlags(self, var, **loginfo):
969 self.expand_cache = {} 984 self.expand_cache = {}
970 if not var in self.dict: 985 if not var in self.dict:
@@ -994,6 +1009,7 @@ class DataSmart(MutableMapping):
994 data.varhistory = self.varhistory.copy() 1009 data.varhistory = self.varhistory.copy()
995 data.varhistory.dataroot = data 1010 data.varhistory.dataroot = data
996 data.inchistory = self.inchistory.copy() 1011 data.inchistory = self.inchistory.copy()
1012 data.filters = self.filters.copy()
997 1013
998 data._tracking = self._tracking 1014 data._tracking = self._tracking
999 data._var_renames = self._var_renames 1015 data._var_renames = self._var_renames
@@ -1022,6 +1038,15 @@ class DataSmart(MutableMapping):
1022 if referrervalue and isinstance(referrervalue, str) and ref in referrervalue: 1038 if referrervalue and isinstance(referrervalue, str) and ref in referrervalue:
1023 self.setVar(key, referrervalue.replace(ref, value)) 1039 self.setVar(key, referrervalue.replace(ref, value))
1024 1040
1041 def setVarFilter(self, var, filter):
1042 if filter:
1043 self.filters[var] = filter
1044 else:
1045 try:
1046 del self.filters[var]
1047 except KeyError:
1048 pass
1049
1025 def localkeys(self): 1050 def localkeys(self):
1026 for key in self.dict: 1051 for key in self.dict:
1027 if key not in ['_data']: 1052 if key not in ['_data']:
@@ -1114,5 +1139,10 @@ class DataSmart(MutableMapping):
1114 value = d.getVar(i, False) or "" 1139 value = d.getVar(i, False) or ""
1115 data.update({i:value}) 1140 data.update({i:value})
1116 1141
1142 moddeps = bb.codeparser.modulecode_deps
1143 for dep in sorted(moddeps):
1144 # Ignore visitor code, sort sets
1145 data.update({'moddep[%s]' % dep : [sorted(moddeps[dep][0]), sorted(moddeps[dep][1]), sorted(moddeps[dep][2]), sorted(moddeps[dep][3]), moddeps[dep][4]]})
1146
1117 data_str = str([(k, data[k]) for k in sorted(data.keys())]) 1147 data_str = str([(k, data[k]) for k in sorted(data.keys())])
1118 return hashlib.sha256(data_str.encode("utf-8")).hexdigest() 1148 return hashlib.sha256(data_str.encode("utf-8")).hexdigest()
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 4761c86880..ddf1006c29 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -19,7 +19,6 @@ import sys
19import threading 19import threading
20import traceback 20import traceback
21 21
22import bb.exceptions
23import bb.utils 22import bb.utils
24 23
25# This is the pid for which we should generate the event. This is set when 24# This is the pid for which we should generate the event. This is set when
@@ -91,7 +90,7 @@ def disable_heartbeat():
91# In long running code, this function should be called periodically 90# In long running code, this function should be called periodically
92# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI) 91# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
93# 92#
94def check_for_interrupts(d): 93def check_for_interrupts():
95 global _should_exit 94 global _should_exit
96 if _should_exit.is_set(): 95 if _should_exit.is_set():
97 bb.warn("Exiting due to interrupt.") 96 bb.warn("Exiting due to interrupt.")
@@ -195,7 +194,12 @@ def fire_ui_handlers(event, d):
195 ui_queue.append(event) 194 ui_queue.append(event)
196 return 195 return
197 196
198 with bb.utils.lock_timeout(_thread_lock): 197 with bb.utils.lock_timeout_nocheck(_thread_lock) as lock:
198 if not lock:
199 # If we can't get the lock, we may be recursively called, queue and return
200 ui_queue.append(event)
201 return
202
199 errors = [] 203 errors = []
200 for h in _ui_handlers: 204 for h in _ui_handlers:
201 #print "Sending event %s" % event 205 #print "Sending event %s" % event
@@ -214,6 +218,9 @@ def fire_ui_handlers(event, d):
214 for h in errors: 218 for h in errors:
215 del _ui_handlers[h] 219 del _ui_handlers[h]
216 220
221 while ui_queue:
222 fire_ui_handlers(ui_queue.pop(), d)
223
217def fire(event, d): 224def fire(event, d):
218 """Fire off an Event""" 225 """Fire off an Event"""
219 226
@@ -229,9 +236,12 @@ def fire(event, d):
229 # If messages have been queued up, clear the queue 236 # If messages have been queued up, clear the queue
230 global _uiready, ui_queue 237 global _uiready, ui_queue
231 if _uiready and ui_queue: 238 if _uiready and ui_queue:
232 for queue_event in ui_queue: 239 with bb.utils.lock_timeout_nocheck(_thread_lock):
240 queue = ui_queue
241 ui_queue = []
242 for queue_event in queue:
233 fire_ui_handlers(queue_event, d) 243 fire_ui_handlers(queue_event, d)
234 ui_queue = [] 244
235 fire_ui_handlers(event, d) 245 fire_ui_handlers(event, d)
236 246
237def fire_from_worker(event, d): 247def fire_from_worker(event, d):
@@ -424,6 +434,16 @@ class RecipeEvent(Event):
424 self.fn = fn 434 self.fn = fn
425 Event.__init__(self) 435 Event.__init__(self)
426 436
437class RecipePreDeferredInherits(RecipeEvent):
438 """
439 Called before deferred inherits are processed so code can snoop on class extensions for example
440 Limitations: It won't see inherits of inherited classes and the data is unexpanded
441 """
442 def __init__(self, fn, inherits):
443 self.fn = fn
444 self.inherits = inherits
445 Event.__init__(self)
446
427class RecipePreFinalise(RecipeEvent): 447class RecipePreFinalise(RecipeEvent):
428 """ Recipe Parsing Complete but not yet finalised""" 448 """ Recipe Parsing Complete but not yet finalised"""
429 449
@@ -759,13 +779,7 @@ class LogHandler(logging.Handler):
759 779
760 def emit(self, record): 780 def emit(self, record):
761 if record.exc_info: 781 if record.exc_info:
762 etype, value, tb = record.exc_info 782 record.bb_exc_formatted = traceback.format_exception(*record.exc_info)
763 if hasattr(tb, 'tb_next'):
764 tb = list(bb.exceptions.extract_traceback(tb, context=3))
765 # Need to turn the value into something the logging system can pickle
766 record.bb_exc_info = (etype, value, tb)
767 record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
768 value = str(value)
769 record.exc_info = None 783 record.exc_info = None
770 fire(record, None) 784 fire(record, None)
771 785
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index 801db9c82f..0000000000
--- a/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,96 +0,0 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import inspect
8import traceback
9import bb.namedtuple_with_abc
10from collections import namedtuple
11
12
13class TracebackEntry(namedtuple.abc):
14 """Pickleable representation of a traceback entry"""
15 _fields = 'filename lineno function args code_context index'
16 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
17
18 def format(self, formatter=None):
19 if not self.code_context:
20 return self._header.format(self) + '\n'
21
22 formatted = [self._header.format(self) + ':\n']
23
24 for lineindex, line in enumerate(self.code_context):
25 if formatter:
26 line = formatter(line)
27
28 if lineindex == self.index:
29 formatted.append(' >%s' % line)
30 else:
31 formatted.append(' %s' % line)
32 return formatted
33
34 def __str__(self):
35 return ''.join(self.format())
36
37def _get_frame_args(frame):
38 """Get the formatted arguments and class (if available) for a frame"""
39 arginfo = inspect.getargvalues(frame)
40
41 try:
42 if not arginfo.args:
43 return '', None
44 # There have been reports from the field of python 2.6 which doesn't
45 # return a namedtuple here but simply a tuple so fallback gracefully if
46 # args isn't present.
47 except AttributeError:
48 return '', None
49
50 firstarg = arginfo.args[0]
51 if firstarg == 'self':
52 self = arginfo.locals['self']
53 cls = self.__class__.__name__
54
55 arginfo.args.pop(0)
56 del arginfo.locals['self']
57 else:
58 cls = None
59
60 formatted = inspect.formatargvalues(*arginfo)
61 return formatted, cls
62
63def extract_traceback(tb, context=1):
64 frames = inspect.getinnerframes(tb, context)
65 for frame, filename, lineno, function, code_context, index in frames:
66 formatted_args, cls = _get_frame_args(frame)
67 if cls:
68 function = '%s.%s' % (cls, function)
69 yield TracebackEntry(filename, lineno, function, formatted_args,
70 code_context, index)
71
72def format_extracted(extracted, formatter=None, limit=None):
73 if limit:
74 extracted = extracted[-limit:]
75
76 formatted = []
77 for tracebackinfo in extracted:
78 formatted.extend(tracebackinfo.format(formatter))
79 return formatted
80
81
82def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
83 formatted = ['Traceback (most recent call last):\n']
84
85 if hasattr(tb, 'tb_next'):
86 tb = extract_traceback(tb, context)
87
88 formatted.extend(format_extracted(tb, formatter, limit))
89 formatted.extend(traceback.format_exception_only(etype, value))
90 return formatted
91
92def to_string(exc):
93 if isinstance(exc, SystemExit):
94 if not isinstance(exc.code, str):
95 return 'Exited with "%d"' % exc.code
96 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 5bf2c4b8cf..0ad987c596 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -23,17 +23,18 @@ import collections
23import subprocess 23import subprocess
24import pickle 24import pickle
25import errno 25import errno
26import bb.persist_data, bb.utils 26import bb.utils
27import bb.checksum 27import bb.checksum
28import bb.process 28import bb.process
29import bb.event 29import bb.event
30 30
31__version__ = "2" 31__version__ = "2"
32_checksum_cache = bb.checksum.FileChecksumCache() 32_checksum_cache = bb.checksum.FileChecksumCache()
33_revisions_cache = bb.checksum.RevisionsCache()
33 34
34logger = logging.getLogger("BitBake.Fetcher") 35logger = logging.getLogger("BitBake.Fetcher")
35 36
36CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] 37CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ]
37SHOWN_CHECKSUM_LIST = ["sha256"] 38SHOWN_CHECKSUM_LIST = ["sha256"]
38 39
39class BBFetchException(Exception): 40class BBFetchException(Exception):
@@ -237,7 +238,7 @@ class URI(object):
237 # to RFC compliant URL format. E.g.: 238 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff 239 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden: 240 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1) 241 uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1)
241 reparse = 1 242 reparse = 1
242 243
243 if reparse: 244 if reparse:
@@ -352,6 +353,14 @@ def decodeurl(url):
352 user, password, parameters). 353 user, password, parameters).
353 """ 354 """
354 355
356 uri = URI(url)
357 path = uri.path if uri.path else "/"
358 return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params
359
360def decodemirrorurl(url):
361 """Decodes a mirror URL into the tokens (scheme, network location, path,
362 user, password, parameters).
363 """
355 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) 364 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
356 if not m: 365 if not m:
357 raise MalformedUrl(url) 366 raise MalformedUrl(url)
@@ -370,6 +379,9 @@ def decodeurl(url):
370 elif type.lower() == 'file': 379 elif type.lower() == 'file':
371 host = "" 380 host = ""
372 path = location 381 path = location
382 if user:
383 path = user + '@' + path
384 user = ""
373 else: 385 else:
374 host = location 386 host = location
375 path = "/" 387 path = "/"
@@ -402,32 +414,34 @@ def encodeurl(decoded):
402 414
403 if not type: 415 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) 416 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = ['%s://' % type] 417 uri = URI()
418 uri.scheme = type
406 if user and type != "file": 419 if user and type != "file":
407 url.append("%s" % user) 420 uri.username = user
408 if pswd: 421 if pswd:
409 url.append(":%s" % pswd) 422 uri.password = pswd
410 url.append("@")
411 if host and type != "file": 423 if host and type != "file":
412 url.append("%s" % host) 424 uri.hostname = host
413 if path: 425 if path:
414 # Standardise path to ensure comparisons work 426 # Standardise path to ensure comparisons work
415 while '//' in path: 427 while '//' in path:
416 path = path.replace("//", "/") 428 path = path.replace("//", "/")
417 url.append("%s" % urllib.parse.quote(path)) 429 uri.path = path
430 if type == "file":
431 # Use old not IETF compliant style
432 uri.relative = False
418 if p: 433 if p:
419 for parm in p: 434 uri.params = p
420 url.append(";%s=%s" % (parm, p[parm]))
421 435
422 return "".join(url) 436 return str(uri)
423 437
424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): 438def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425 if not ud.url or not uri_find or not uri_replace: 439 if not ud.url or not uri_find or not uri_replace:
426 logger.error("uri_replace: passed an undefined value, not replacing") 440 logger.error("uri_replace: passed an undefined value, not replacing")
427 return None 441 return None
428 uri_decoded = list(decodeurl(ud.url)) 442 uri_decoded = list(decodemirrorurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find)) 443 uri_find_decoded = list(decodemirrorurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace)) 444 uri_replace_decoded = list(decodemirrorurl(uri_replace))
431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 445 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 446 result_decoded = ['', '', '', '', '', {}]
433 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params 447 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
@@ -460,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
460 for k in replacements: 474 for k in replacements:
461 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) 475 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
462 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) 476 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
463 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) 477 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1)
464 if loc == 2: 478 if loc == 2:
465 # Handle path manipulations 479 # Handle path manipulations
466 basename = None 480 basename = None
@@ -493,18 +507,23 @@ methods = []
493urldata_cache = {} 507urldata_cache = {}
494saved_headrevs = {} 508saved_headrevs = {}
495 509
496def fetcher_init(d): 510def fetcher_init(d, servercontext=True):
497 """ 511 """
498 Called to initialize the fetchers once the configuration data is known. 512 Called to initialize the fetchers once the configuration data is known.
499 Calls before this must not hit the cache. 513 Calls before this must not hit the cache.
500 """ 514 """
501 515
502 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 516 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
517 _revisions_cache.init_cache(d.getVar("BB_CACHEDIR"))
518
519 if not servercontext:
520 return
521
503 try: 522 try:
504 # fetcher_init is called multiple times, so make sure we only save the 523 # fetcher_init is called multiple times, so make sure we only save the
505 # revs the first time it is called. 524 # revs the first time it is called.
506 if not bb.fetch2.saved_headrevs: 525 if not bb.fetch2.saved_headrevs:
507 bb.fetch2.saved_headrevs = dict(revs) 526 bb.fetch2.saved_headrevs = _revisions_cache.get_revs()
508 except: 527 except:
509 pass 528 pass
510 529
@@ -514,11 +533,10 @@ def fetcher_init(d):
514 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 533 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
515 elif srcrev_policy == "clear": 534 elif srcrev_policy == "clear":
516 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) 535 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
517 revs.clear() 536 _revisions_cache.clear_cache()
518 else: 537 else:
519 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 538 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
520 539
521 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
522 540
523 for m in methods: 541 for m in methods:
524 if hasattr(m, "init"): 542 if hasattr(m, "init"):
@@ -526,9 +544,11 @@ def fetcher_init(d):
526 544
527def fetcher_parse_save(): 545def fetcher_parse_save():
528 _checksum_cache.save_extras() 546 _checksum_cache.save_extras()
547 _revisions_cache.save_extras()
529 548
530def fetcher_parse_done(): 549def fetcher_parse_done():
531 _checksum_cache.save_merge() 550 _checksum_cache.save_merge()
551 _revisions_cache.save_merge()
532 552
533def fetcher_compare_revisions(d): 553def fetcher_compare_revisions(d):
534 """ 554 """
@@ -536,7 +556,7 @@ def fetcher_compare_revisions(d):
536 when bitbake was started and return true if they have changed. 556 when bitbake was started and return true if they have changed.
537 """ 557 """
538 558
539 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) 559 headrevs = _revisions_cache.get_revs()
540 return headrevs != bb.fetch2.saved_headrevs 560 return headrevs != bb.fetch2.saved_headrevs
541 561
542def mirror_from_string(data): 562def mirror_from_string(data):
@@ -786,8 +806,8 @@ def _get_srcrev(d, method_name='sortable_revision'):
786 return "", revs 806 return "", revs
787 807
788 808
789 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 809 if len(scms) == 1:
790 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) 810 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name)
791 revs.append(rev) 811 revs.append(rev)
792 if len(rev) > 10: 812 if len(rev) > 10:
793 rev = rev[:10] 813 rev = rev[:10]
@@ -808,13 +828,12 @@ def _get_srcrev(d, method_name='sortable_revision'):
808 seenautoinc = False 828 seenautoinc = False
809 for scm in scms: 829 for scm in scms:
810 ud = urldata[scm] 830 ud = urldata[scm]
811 for name in ud.names: 831 autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name)
812 autoinc, rev = getattr(ud.method, method_name)(ud, d, name) 832 revs.append(rev)
813 revs.append(rev) 833 seenautoinc = seenautoinc or autoinc
814 seenautoinc = seenautoinc or autoinc 834 if len(rev) > 10:
815 if len(rev) > 10: 835 rev = rev[:10]
816 rev = rev[:10] 836 name_to_rev[ud.name] = rev
817 name_to_rev[name] = rev
818 # Replace names by revisions in the SRCREV_FORMAT string. The approach used 837 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
819 # here can handle names being prefixes of other names and names appearing 838 # here can handle names being prefixes of other names and names appearing
820 # as substrings in revisions (in which case the name should not be 839 # as substrings in revisions (in which case the name should not be
@@ -878,6 +897,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH',
878 'AWS_SESSION_TOKEN', 897 'AWS_SESSION_TOKEN',
879 'GIT_CACHE_PATH', 898 'GIT_CACHE_PATH',
880 'REMOTE_CONTAINERS_IPC', 899 'REMOTE_CONTAINERS_IPC',
900 'GITHUB_TOKEN',
881 'SSL_CERT_DIR'] 901 'SSL_CERT_DIR']
882 902
883def get_fetcher_environment(d): 903def get_fetcher_environment(d):
@@ -1072,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1072 # If that tarball is a local file:// we need to provide a symlink to it 1092 # If that tarball is a local file:// we need to provide a symlink to it
1073 dldir = ld.getVar("DL_DIR") 1093 dldir = ld.getVar("DL_DIR")
1074 1094
1095 if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")):
1096 ld = ld.createCopy()
1097 ld.setVar("BB_NO_NETWORK", "1")
1098
1075 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): 1099 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
1076 # Create donestamp in old format to avoid triggering a re-download 1100 # Create donestamp in old format to avoid triggering a re-download
1077 if ud.donestamp: 1101 if ud.donestamp:
@@ -1093,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1093 origud.method.build_mirror_data(origud, ld) 1117 origud.method.build_mirror_data(origud, ld)
1094 return origud.localpath 1118 return origud.localpath
1095 # Otherwise the result is a local file:// and we symlink to it 1119 # Otherwise the result is a local file:// and we symlink to it
1096 ensure_symlink(ud.localpath, origud.localpath) 1120 # This may also be a link to a shallow archive
1121 # When using shallow mode, add a symlink to the original fullshallow
1122 # path to ensure a valid symlink even in the `PREMIRRORS` case
1123 origud.method.update_mirror_links(ud, origud)
1097 update_stamp(origud, ld) 1124 update_stamp(origud, ld)
1098 return ud.localpath 1125 return ud.localpath
1099 1126
@@ -1127,25 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1127 if ud.lockfile and ud.lockfile != origud.lockfile: 1154 if ud.lockfile and ud.lockfile != origud.lockfile:
1128 bb.utils.unlockfile(lf) 1155 bb.utils.unlockfile(lf)
1129 1156
1130
1131def ensure_symlink(target, link_name):
1132 if not os.path.exists(link_name):
1133 dirname = os.path.dirname(link_name)
1134 bb.utils.mkdirhier(dirname)
1135 if os.path.islink(link_name):
1136 # Broken symbolic link
1137 os.unlink(link_name)
1138
1139 # In case this is executing without any file locks held (as is
1140 # the case for file:// URLs), two tasks may end up here at the
1141 # same time, in which case we do not want the second task to
1142 # fail when the link has already been created by the first task.
1143 try:
1144 os.symlink(target, link_name)
1145 except FileExistsError:
1146 pass
1147
1148
1149def try_mirrors(fetch, d, origud, mirrors, check = False): 1157def try_mirrors(fetch, d, origud, mirrors, check = False):
1150 """ 1158 """
1151 Try to use a mirrored version of the sources. 1159 Try to use a mirrored version of the sources.
@@ -1174,7 +1182,7 @@ def trusted_network(d, url):
1174 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): 1182 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
1175 return True 1183 return True
1176 1184
1177 pkgname = d.expand(d.getVar('PN', False)) 1185 pkgname = d.getVar('PN')
1178 trusted_hosts = None 1186 trusted_hosts = None
1179 if pkgname: 1187 if pkgname:
1180 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) 1188 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
@@ -1227,20 +1235,17 @@ def srcrev_internal_helper(ud, d, name):
1227 if srcrev and srcrev != "INVALID": 1235 if srcrev and srcrev != "INVALID":
1228 break 1236 break
1229 1237
1230 if 'rev' in ud.parm and 'tag' in ud.parm: 1238 if 'rev' in ud.parm:
1231 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) 1239 parmrev = ud.parm['rev']
1232
1233 if 'rev' in ud.parm or 'tag' in ud.parm:
1234 if 'rev' in ud.parm:
1235 parmrev = ud.parm['rev']
1236 else:
1237 parmrev = ud.parm['tag']
1238 if srcrev == "INVALID" or not srcrev: 1240 if srcrev == "INVALID" or not srcrev:
1239 return parmrev 1241 return parmrev
1240 if srcrev != parmrev: 1242 if srcrev != parmrev:
1241 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) 1243 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
1242 return parmrev 1244 return parmrev
1243 1245
1246 if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev):
1247 return ud.parm['tag']
1248
1244 if srcrev == "INVALID" or not srcrev: 1249 if srcrev == "INVALID" or not srcrev:
1245 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) 1250 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1246 if srcrev == "AUTOINC": 1251 if srcrev == "AUTOINC":
@@ -1263,7 +1268,7 @@ def get_checksum_file_list(d):
1263 found = False 1268 found = False
1264 paths = ud.method.localfile_searchpaths(ud, d) 1269 paths = ud.method.localfile_searchpaths(ud, d)
1265 for f in paths: 1270 for f in paths:
1266 pth = ud.decodedurl 1271 pth = ud.path
1267 if os.path.exists(f): 1272 if os.path.exists(f):
1268 found = True 1273 found = True
1269 filelist.append(f + ":" + str(os.path.exists(f))) 1274 filelist.append(f + ":" + str(os.path.exists(f)))
@@ -1308,23 +1313,28 @@ class FetchData(object):
1308 self.setup = False 1313 self.setup = False
1309 1314
1310 def configure_checksum(checksum_id): 1315 def configure_checksum(checksum_id):
1316 checksum_plain_name = "%ssum" % checksum_id
1311 if "name" in self.parm: 1317 if "name" in self.parm:
1312 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) 1318 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1313 else: 1319 else:
1314 checksum_name = "%ssum" % checksum_id 1320 checksum_name = checksum_plain_name
1315
1316 setattr(self, "%s_name" % checksum_id, checksum_name)
1317 1321
1318 if checksum_name in self.parm: 1322 if checksum_name in self.parm:
1319 checksum_expected = self.parm[checksum_name] 1323 checksum_expected = self.parm[checksum_name]
1320 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]: 1324 elif checksum_plain_name in self.parm:
1325 checksum_expected = self.parm[checksum_plain_name]
1326 checksum_name = checksum_plain_name
1327 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]:
1321 checksum_expected = None 1328 checksum_expected = None
1322 else: 1329 else:
1323 checksum_expected = d.getVarFlag("SRC_URI", checksum_name) 1330 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1324 1331
1332 setattr(self, "%s_name" % checksum_id, checksum_name)
1325 setattr(self, "%s_expected" % checksum_id, checksum_expected) 1333 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1326 1334
1327 self.names = self.parm.get("name",'default').split(',') 1335 self.name = self.parm.get("name",'default')
1336 if "," in self.name:
1337 raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url)
1328 1338
1329 self.method = None 1339 self.method = None
1330 for m in methods: 1340 for m in methods:
@@ -1376,13 +1386,7 @@ class FetchData(object):
1376 self.lockfile = basepath + '.lock' 1386 self.lockfile = basepath + '.lock'
1377 1387
1378 def setup_revisions(self, d): 1388 def setup_revisions(self, d):
1379 self.revisions = {} 1389 self.revision = srcrev_internal_helper(self, d, self.name)
1380 for name in self.names:
1381 self.revisions[name] = srcrev_internal_helper(self, d, name)
1382
1383 # add compatibility code for non name specified case
1384 if len(self.names) == 1:
1385 self.revision = self.revisions[self.names[0]]
1386 1390
1387 def setup_localpath(self, d): 1391 def setup_localpath(self, d):
1388 if not self.localpath: 1392 if not self.localpath:
@@ -1510,7 +1514,7 @@ class FetchMethod(object):
1510 (file, urldata.parm.get('unpack'))) 1514 (file, urldata.parm.get('unpack')))
1511 1515
1512 base, ext = os.path.splitext(file) 1516 base, ext = os.path.splitext(file)
1513 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: 1517 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']:
1514 efile = os.path.join(rootdir, os.path.basename(base)) 1518 efile = os.path.join(rootdir, os.path.basename(base))
1515 else: 1519 else:
1516 efile = file 1520 efile = file
@@ -1569,11 +1573,11 @@ class FetchMethod(object):
1569 datafile = None 1573 datafile = None
1570 if output: 1574 if output:
1571 for line in output.decode().splitlines(): 1575 for line in output.decode().splitlines():
1572 if line.startswith('data.tar.'): 1576 if line.startswith('data.tar.') or line == 'data.tar':
1573 datafile = line 1577 datafile = line
1574 break 1578 break
1575 else: 1579 else:
1576 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) 1580 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url)
1577 else: 1581 else:
1578 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) 1582 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1579 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) 1583 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
@@ -1606,7 +1610,7 @@ class FetchMethod(object):
1606 if urlpath.find("/") != -1: 1610 if urlpath.find("/") != -1:
1607 destdir = urlpath.rsplit("/", 1)[0] + '/' 1611 destdir = urlpath.rsplit("/", 1)[0] + '/'
1608 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) 1612 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1609 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) 1613 cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
1610 else: 1614 else:
1611 urldata.unpack_tracer.unpack("archive-extract", unpackdir) 1615 urldata.unpack_tracer.unpack("archive-extract", unpackdir)
1612 1616
@@ -1635,6 +1639,28 @@ class FetchMethod(object):
1635 """ 1639 """
1636 bb.utils.remove(urldata.localpath) 1640 bb.utils.remove(urldata.localpath)
1637 1641
1642 def ensure_symlink(self, target, link_name):
1643 if not os.path.exists(link_name):
1644 dirname = os.path.dirname(link_name)
1645 bb.utils.mkdirhier(dirname)
1646 if os.path.islink(link_name):
1647 # Broken symbolic link
1648 os.unlink(link_name)
1649
1650 # In case this is executing without any file locks held (as is
1651 # the case for file:// URLs), two tasks may end up here at the
1652 # same time, in which case we do not want the second task to
1653 # fail when the link has already been created by the first task.
1654 try:
1655 os.symlink(target, link_name)
1656 except FileExistsError:
1657 pass
1658
1659 def update_mirror_links(self, ud, origud):
1660 # For local file:// results, create a symlink to them
1661 # This may also be a link to a shallow archive
1662 self.ensure_symlink(ud.localpath, origud.localpath)
1663
1638 def try_premirror(self, urldata, d): 1664 def try_premirror(self, urldata, d):
1639 """ 1665 """
1640 Should premirrors be used? 1666 Should premirrors be used?
@@ -1662,13 +1688,13 @@ class FetchMethod(object):
1662 if not hasattr(self, "_latest_revision"): 1688 if not hasattr(self, "_latest_revision"):
1663 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) 1689 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
1664 1690
1665 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1666 key = self.generate_revision_key(ud, d, name) 1691 key = self.generate_revision_key(ud, d, name)
1667 try: 1692
1668 return revs[key] 1693 rev = _revisions_cache.get_rev(key)
1669 except KeyError: 1694 if rev is None:
1670 revs[key] = rev = self._latest_revision(ud, d, name) 1695 rev = self._latest_revision(ud, d, name)
1671 return rev 1696 _revisions_cache.set_rev(key, rev)
1697 return rev
1672 1698
1673 def sortable_revision(self, ud, d, name): 1699 def sortable_revision(self, ud, d, name):
1674 latest_rev = self._build_revision(ud, d, name) 1700 latest_rev = self._build_revision(ud, d, name)
@@ -1806,7 +1832,7 @@ class Fetch(object):
1806 self.ud[url] = FetchData(url, self.d) 1832 self.ud[url] = FetchData(url, self.d)
1807 1833
1808 self.ud[url].setup_localpath(self.d) 1834 self.ud[url].setup_localpath(self.d)
1809 return self.d.expand(self.ud[url].localpath) 1835 return self.ud[url].localpath
1810 1836
1811 def localpaths(self): 1837 def localpaths(self):
1812 """ 1838 """
@@ -1859,25 +1885,28 @@ class Fetch(object):
1859 logger.debug(str(e)) 1885 logger.debug(str(e))
1860 done = False 1886 done = False
1861 1887
1888 d = self.d
1862 if premirroronly: 1889 if premirroronly:
1863 self.d.setVar("BB_NO_NETWORK", "1") 1890 # Only disable the network in a copy
1891 d = bb.data.createCopy(self.d)
1892 d.setVar("BB_NO_NETWORK", "1")
1864 1893
1865 firsterr = None 1894 firsterr = None
1866 verified_stamp = False 1895 verified_stamp = False
1867 if done: 1896 if done:
1868 verified_stamp = m.verify_donestamp(ud, self.d) 1897 verified_stamp = m.verify_donestamp(ud, d)
1869 if not done and (not verified_stamp or m.need_update(ud, self.d)): 1898 if not done and (not verified_stamp or m.need_update(ud, d)):
1870 try: 1899 try:
1871 if not trusted_network(self.d, ud.url): 1900 if not trusted_network(d, ud.url):
1872 raise UntrustedUrl(ud.url) 1901 raise UntrustedUrl(ud.url)
1873 logger.debug("Trying Upstream") 1902 logger.debug("Trying Upstream")
1874 m.download(ud, self.d) 1903 m.download(ud, d)
1875 if hasattr(m, "build_mirror_data"): 1904 if hasattr(m, "build_mirror_data"):
1876 m.build_mirror_data(ud, self.d) 1905 m.build_mirror_data(ud, d)
1877 done = True 1906 done = True
1878 # early checksum verify, so that if checksum mismatched, 1907 # early checksum verify, so that if checksum mismatched,
1879 # fetcher still have chance to fetch from mirror 1908 # fetcher still have chance to fetch from mirror
1880 m.update_donestamp(ud, self.d) 1909 m.update_donestamp(ud, d)
1881 1910
1882 except bb.fetch2.NetworkAccess: 1911 except bb.fetch2.NetworkAccess:
1883 raise 1912 raise
@@ -1896,17 +1925,17 @@ class Fetch(object):
1896 firsterr = e 1925 firsterr = e
1897 # Remove any incomplete fetch 1926 # Remove any incomplete fetch
1898 if not verified_stamp and m.cleanup_upon_failure(): 1927 if not verified_stamp and m.cleanup_upon_failure():
1899 m.clean(ud, self.d) 1928 m.clean(ud, d)
1900 logger.debug("Trying MIRRORS") 1929 logger.debug("Trying MIRRORS")
1901 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1930 mirrors = mirror_from_string(d.getVar('MIRRORS'))
1902 done = m.try_mirrors(self, ud, self.d, mirrors) 1931 done = m.try_mirrors(self, ud, d, mirrors)
1903 1932
1904 if not done or not m.done(ud, self.d): 1933 if not done or not m.done(ud, d):
1905 if firsterr: 1934 if firsterr:
1906 logger.error(str(firsterr)) 1935 logger.error(str(firsterr))
1907 raise FetchError("Unable to fetch URL from any source.", u) 1936 raise FetchError("Unable to fetch URL from any source.", u)
1908 1937
1909 m.update_donestamp(ud, self.d) 1938 m.update_donestamp(ud, d)
1910 1939
1911 except IOError as e: 1940 except IOError as e:
1912 if e.errno in [errno.ESTALE]: 1941 if e.errno in [errno.ESTALE]:
@@ -2088,6 +2117,7 @@ from . import npmsw
2088from . import az 2117from . import az
2089from . import crate 2118from . import crate
2090from . import gcp 2119from . import gcp
2120from . import gomod
2091 2121
2092methods.append(local.Local()) 2122methods.append(local.Local())
2093methods.append(wget.Wget()) 2123methods.append(wget.Wget())
@@ -2110,3 +2140,5 @@ methods.append(npmsw.NpmShrinkWrap())
2110methods.append(az.Az()) 2140methods.append(az.Az())
2111methods.append(crate.Crate()) 2141methods.append(crate.Crate())
2112methods.append(gcp.GCP()) 2142methods.append(gcp.GCP())
2143methods.append(gomod.GoMod())
2144methods.append(gomod.GoModGit())
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py
index 3ccc594c22..1d3664f213 100644
--- a/bitbake/lib/bb/fetch2/az.py
+++ b/bitbake/lib/bb/fetch2/az.py
@@ -36,6 +36,8 @@ class Az(Wget):
36 36
37 az_sas = d.getVar('AZ_SAS') 37 az_sas = d.getVar('AZ_SAS')
38 if az_sas and az_sas not in ud.url: 38 if az_sas and az_sas not in ud.url:
39 if not az_sas.startswith('?'):
40 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
39 ud.url += az_sas 41 ud.url += az_sas
40 42
41 return Wget.checkstatus(self, fetch, ud, d, try_again) 43 return Wget.checkstatus(self, fetch, ud, d, try_again)
@@ -62,15 +64,18 @@ class Az(Wget):
62 az_sas = d.getVar('AZ_SAS') 64 az_sas = d.getVar('AZ_SAS')
63 65
64 if az_sas: 66 if az_sas:
67 if not az_sas.startswith('?'):
68 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
65 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) 69 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas)
66 else: 70 else:
67 azuri = '%s%s%s' % ('https://', ud.host, ud.path) 71 azuri = '%s%s%s' % ('https://', ud.host, ud.path)
68 72
73 dldir = d.getVar("DL_DIR")
69 if os.path.exists(ud.localpath): 74 if os.path.exists(ud.localpath):
70 # file exists, but we didnt complete it.. trying again. 75 # file exists, but we didnt complete it.. trying again.
71 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) 76 fetchcmd += " -c -P %s '%s'" % (dldir, azuri)
72 else: 77 else:
73 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) 78 fetchcmd += " -P %s '%s'" % (dldir, azuri)
74 79
75 try: 80 try:
76 self._runwget(ud, d, fetchcmd, False) 81 self._runwget(ud, d, fetchcmd, False)
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
index 1a9c863769..17500daf95 100644
--- a/bitbake/lib/bb/fetch2/clearcase.py
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod):
108 ud.module.replace("/", "."), 108 ud.module.replace("/", "."),
109 ud.label.replace("/", ".")) 109 ud.label.replace("/", "."))
110 110
111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) 111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME"))
112 ud.csname = "%s-config-spec" % (ud.identifier) 112 ud.csname = "%s-config-spec" % (ud.identifier)
113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) 113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) 114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
@@ -130,8 +130,6 @@ class ClearCase(FetchMethod):
130 self.debug("configspecfile = %s" % ud.configspecfile) 130 self.debug("configspecfile = %s" % ud.configspecfile)
131 self.debug("localfile = %s" % ud.localfile) 131 self.debug("localfile = %s" % ud.localfile)
132 132
133 ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
134
135 def _build_ccase_command(self, ud, command): 133 def _build_ccase_command(self, ud, command):
136 """ 134 """
137 Build up a commandline based on ud 135 Build up a commandline based on ud
@@ -196,7 +194,7 @@ class ClearCase(FetchMethod):
196 194
197 def need_update(self, ud, d): 195 def need_update(self, ud, d):
198 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec): 196 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
199 ud.identifier += "-%s" % d.getVar("DATETIME",d, True) 197 ud.identifier += "-%s" % d.getVar("DATETIME")
200 return True 198 return True
201 if os.path.exists(ud.localpath): 199 if os.path.exists(ud.localpath):
202 return False 200 return False
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
index 01d49435c3..e611736f06 100644
--- a/bitbake/lib/bb/fetch2/crate.py
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -70,6 +70,7 @@ class Crate(Wget):
70 host = 'crates.io/api/v1/crates' 70 host = 'crates.io/api/v1/crates'
71 71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version) 72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.versionsurl = "https://%s/%s/versions" % (host, name)
73 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) 74 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
74 if 'name' not in ud.parm: 75 if 'name' not in ud.parm:
75 ud.parm['name'] = '%s-%s' % (name, version) 76 ud.parm['name'] = '%s-%s' % (name, version)
@@ -139,3 +140,11 @@ class Crate(Wget):
139 mdpath = os.path.join(bbpath, cratepath, mdfile) 140 mdpath = os.path.join(bbpath, cratepath, mdfile)
140 with open(mdpath, "w") as f: 141 with open(mdpath, "w") as f:
141 json.dump(metadata, f) 142 json.dump(metadata, f)
143
144 def latest_versionstring(self, ud, d):
145 from functools import cmp_to_key
146 json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d))
147 versions = [(0, i["num"], "") for i in json_data["versions"]]
148 versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp))
149
150 return (versions[-1][1], "")
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
index f40ce2eaa5..86546d40bf 100644
--- a/bitbake/lib/bb/fetch2/gcp.py
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -46,8 +46,7 @@ class GCP(FetchMethod):
46 else: 46 else:
47 ud.basename = os.path.basename(ud.path) 47 ud.basename = os.path.basename(ud.path)
48 48
49 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 49 ud.localfile = ud.basename
50 ud.basecmd = "gsutil stat"
51 50
52 def get_gcp_client(self): 51 def get_gcp_client(self):
53 from google.cloud import storage 52 from google.cloud import storage
@@ -58,17 +57,20 @@ class GCP(FetchMethod):
58 Fetch urls using the GCP API. 57 Fetch urls using the GCP API.
59 Assumes localpath was called first. 58 Assumes localpath was called first.
60 """ 59 """
60 from google.api_core.exceptions import NotFound
61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") 61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
62 if self.gcp_client is None: 62 if self.gcp_client is None:
63 self.get_gcp_client() 63 self.get_gcp_client()
64 64
65 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") 65 bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
66 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
67 66
68 # Path sometimes has leading slash, so strip it 67 # Path sometimes has leading slash, so strip it
69 path = ud.path.lstrip("/") 68 path = ud.path.lstrip("/")
70 blob = self.gcp_client.bucket(ud.host).blob(path) 69 blob = self.gcp_client.bucket(ud.host).blob(path)
71 blob.download_to_filename(ud.localpath) 70 try:
71 blob.download_to_filename(ud.localpath)
72 except NotFound:
73 raise FetchError("The GCP API threw a NotFound exception")
72 74
73 # Additional sanity checks copied from the wget class (although there 75 # Additional sanity checks copied from the wget class (although there
74 # are no known issues which mean these are required, treat the GCP API 76 # are no known issues which mean these are required, treat the GCP API
@@ -90,8 +92,7 @@ class GCP(FetchMethod):
90 if self.gcp_client is None: 92 if self.gcp_client is None:
91 self.get_gcp_client() 93 self.get_gcp_client()
92 94
93 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") 95 bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
94 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
95 96
96 # Path sometimes has leading slash, so strip it 97 # Path sometimes has leading slash, so strip it
97 path = ud.path.lstrip("/") 98 path = ud.path.lstrip("/")
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index c7ff769fdf..52fffe21d7 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -9,15 +9,6 @@ Supported SRC_URI options are:
9- branch 9- branch
10 The git branch to retrieve from. The default is "master" 10 The git branch to retrieve from. The default is "master"
11 11
12 This option also supports multiple branch fetching, with branches
13 separated by commas. In multiple branches case, the name option
14 must have the same number of names to match the branches, which is
15 used to specify the SRC_REV for the branch
16 e.g:
17 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
18 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
19 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
20
21- tag 12- tag
22 The git tag to retrieve. The default is "master" 13 The git tag to retrieve. The default is "master"
23 14
@@ -81,6 +72,7 @@ import shlex
81import shutil 72import shutil
82import subprocess 73import subprocess
83import tempfile 74import tempfile
75import urllib
84import bb 76import bb
85import bb.progress 77import bb.progress
86from contextlib import contextmanager 78from contextlib import contextmanager
@@ -190,14 +182,11 @@ class Git(FetchMethod):
190 ud.bareclone = ud.parm.get("bareclone","0") == "1" 182 ud.bareclone = ud.parm.get("bareclone","0") == "1"
191 if ud.bareclone: 183 if ud.bareclone:
192 ud.nocheckout = 1 184 ud.nocheckout = 1
193 185
194 ud.unresolvedrev = {} 186 ud.unresolvedrev = ""
195 branches = ud.parm.get("branch", "").split(',') 187 ud.branch = ud.parm.get("branch", "")
196 if branches == [""] and not ud.nobranch: 188 if not ud.branch and not ud.nobranch:
197 bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url) 189 raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url)
198 branches = ["master"]
199 if len(branches) != len(ud.names):
200 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
201 190
202 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" 191 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1"
203 192
@@ -207,8 +196,11 @@ class Git(FetchMethod):
207 if ud.bareclone: 196 if ud.bareclone:
208 ud.cloneflags += " --mirror" 197 ud.cloneflags += " --mirror"
209 198
199 ud.shallow_skip_fast = False
210 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" 200 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
211 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() 201 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
202 if 'tag' in ud.parm:
203 ud.shallow_extra_refs.append("refs/tags/" + ud.parm['tag'])
212 204
213 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") 205 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
214 if depth_default is not None: 206 if depth_default is not None:
@@ -225,32 +217,27 @@ class Git(FetchMethod):
225 217
226 revs_default = d.getVar("BB_GIT_SHALLOW_REVS") 218 revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
227 ud.shallow_revs = [] 219 ud.shallow_revs = []
228 ud.branches = {} 220
229 for pos, name in enumerate(ud.names): 221 ud.unresolvedrev = ud.branch
230 branch = branches[pos] 222
231 ud.branches[name] = branch 223 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name)
232 ud.unresolvedrev[name] = branch 224 if shallow_depth is not None:
233 225 try:
234 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name) 226 shallow_depth = int(shallow_depth or 0)
235 if shallow_depth is not None: 227 except ValueError:
236 try: 228 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
237 shallow_depth = int(shallow_depth or 0) 229 else:
238 except ValueError: 230 if shallow_depth < 0:
239 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 231 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
240 else: 232 ud.shallow_depths[ud.name] = shallow_depth
241 if shallow_depth < 0: 233
242 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 234 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name)
243 ud.shallow_depths[name] = shallow_depth 235 if revs is not None:
244 236 ud.shallow_revs.extend(revs.split())
245 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name) 237 elif revs_default is not None:
246 if revs is not None: 238 ud.shallow_revs.extend(revs_default.split())
247 ud.shallow_revs.extend(revs.split()) 239
248 elif revs_default is not None: 240 if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0:
249 ud.shallow_revs.extend(revs_default.split())
250
251 if (ud.shallow and
252 not ud.shallow_revs and
253 all(ud.shallow_depths[n] == 0 for n in ud.names)):
254 # Shallow disabled for this URL 241 # Shallow disabled for this URL
255 ud.shallow = False 242 ud.shallow = False
256 243
@@ -259,10 +246,9 @@ class Git(FetchMethod):
259 # rev of this repository. This will get resolved into a revision 246 # rev of this repository. This will get resolved into a revision
260 # later. If an actual revision happens to have also been provided 247 # later. If an actual revision happens to have also been provided
261 # then this setting will be overridden. 248 # then this setting will be overridden.
262 for name in ud.names: 249 ud.unresolvedrev = 'HEAD'
263 ud.unresolvedrev[name] = 'HEAD'
264 250
265 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all" 251 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
266 252
267 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" 253 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
268 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable 254 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -270,12 +256,11 @@ class Git(FetchMethod):
270 256
271 ud.setup_revisions(d) 257 ud.setup_revisions(d)
272 258
273 for name in ud.names: 259 # Ensure any revision that doesn't look like a SHA-1 is translated into one
274 # Ensure any revision that doesn't look like a SHA-1 is translated into one 260 if not sha1_re.match(ud.revision or ''):
275 if not sha1_re.match(ud.revisions[name] or ''): 261 if ud.revision:
276 if ud.revisions[name]: 262 ud.unresolvedrev = ud.revision
277 ud.unresolvedrev[name] = ud.revisions[name] 263 ud.revision = self.latest_revision(ud, d, ud.name)
278 ud.revisions[name] = self.latest_revision(ud, d, name)
279 264
280 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) 265 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
281 if gitsrcname.startswith('.'): 266 if gitsrcname.startswith('.'):
@@ -286,8 +271,7 @@ class Git(FetchMethod):
286 # upstream repo in the future, the mirror will remain intact and still 271 # upstream repo in the future, the mirror will remain intact and still
287 # contain the revision 272 # contain the revision
288 if ud.rebaseable: 273 if ud.rebaseable:
289 for name in ud.names: 274 gitsrcname = gitsrcname + '_' + ud.revision
290 gitsrcname = gitsrcname + '_' + ud.revisions[name]
291 275
292 dl_dir = d.getVar("DL_DIR") 276 dl_dir = d.getVar("DL_DIR")
293 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") 277 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
@@ -305,15 +289,14 @@ class Git(FetchMethod):
305 if ud.shallow_revs: 289 if ud.shallow_revs:
306 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) 290 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
307 291
308 for name, revision in sorted(ud.revisions.items()): 292 tarballname = "%s_%s" % (tarballname, ud.revision[:7])
309 tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7]) 293 depth = ud.shallow_depths[ud.name]
310 depth = ud.shallow_depths[name] 294 if depth:
311 if depth: 295 tarballname = "%s-%s" % (tarballname, depth)
312 tarballname = "%s-%s" % (tarballname, depth)
313 296
314 shallow_refs = [] 297 shallow_refs = []
315 if not ud.nobranch: 298 if not ud.nobranch:
316 shallow_refs.extend(ud.branches.values()) 299 shallow_refs.append(ud.branch)
317 if ud.shallow_extra_refs: 300 if ud.shallow_extra_refs:
318 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) 301 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
319 if shallow_refs: 302 if shallow_refs:
@@ -338,18 +321,21 @@ class Git(FetchMethod):
338 return True 321 return True
339 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): 322 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
340 return True 323 return True
341 for name in ud.names: 324 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
342 if not self._contains_ref(ud, d, name, ud.clonedir): 325 return True
343 return True 326 if 'tag' in ud.parm and not self._contains_ref(ud, d, ud.name, ud.clonedir, tag=True):
327 return True
344 return False 328 return False
345 329
346 def lfs_need_update(self, ud, d): 330 def lfs_need_update(self, ud, d):
331 if not self._need_lfs(ud):
332 return False
333
347 if self.clonedir_need_update(ud, d): 334 if self.clonedir_need_update(ud, d):
348 return True 335 return True
349 336
350 for name in ud.names: 337 if not self._lfs_objects_downloaded(ud, d, ud.clonedir):
351 if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir): 338 return True
352 return True
353 return False 339 return False
354 340
355 def clonedir_need_shallow_revs(self, ud, d): 341 def clonedir_need_shallow_revs(self, ud, d):
@@ -366,6 +352,13 @@ class Git(FetchMethod):
366 def tarball_need_update(self, ud): 352 def tarball_need_update(self, ud):
367 return ud.write_tarballs and not os.path.exists(ud.fullmirror) 353 return ud.write_tarballs and not os.path.exists(ud.fullmirror)
368 354
355 def update_mirror_links(self, ud, origud):
356 super().update_mirror_links(ud, origud)
357 # When using shallow mode, add a symlink to the original fullshallow
358 # path to ensure a valid symlink even in the `PREMIRRORS` case
359 if ud.shallow and not os.path.exists(origud.fullshallow):
360 self.ensure_symlink(ud.localpath, origud.fullshallow)
361
369 def try_premirror(self, ud, d): 362 def try_premirror(self, ud, d):
370 # If we don't do this, updating an existing checkout with only premirrors 363 # If we don't do this, updating an existing checkout with only premirrors
371 # is not possible 364 # is not possible
@@ -446,6 +439,24 @@ class Git(FetchMethod):
446 if ud.proto.lower() != 'file': 439 if ud.proto.lower() != 'file':
447 bb.fetch2.check_network_access(d, clone_cmd, ud.url) 440 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
448 progresshandler = GitProgressHandler(d) 441 progresshandler = GitProgressHandler(d)
442
443 # Try creating a fast initial shallow clone
444 # Enabling ud.shallow_skip_fast will skip this
445 # If the Git error "Server does not allow request for unadvertised object"
446 # occurs, shallow_skip_fast is enabled automatically.
447 # This may happen if the Git server does not allow the request
448 # or if the Git client has issues with this functionality.
449 if ud.shallow and not ud.shallow_skip_fast:
450 try:
451 self.clone_shallow_with_tarball(ud, d)
452 # When the shallow clone has succeeded, use the shallow tarball
453 ud.localpath = ud.fullshallow
454 return
455 except:
456 logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.")
457
458 # When skipping fast initial shallow or the fast inital shallow clone failed:
459 # Try again with an initial regular clone
449 runfetchcmd(clone_cmd, d, log=progresshandler) 460 runfetchcmd(clone_cmd, d, log=progresshandler)
450 461
451 # Update the checkout if needed 462 # Update the checkout if needed
@@ -473,9 +484,8 @@ class Git(FetchMethod):
473 if exc.errno != errno.ENOENT: 484 if exc.errno != errno.ENOENT:
474 raise 485 raise
475 486
476 for name in ud.names: 487 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
477 if not self._contains_ref(ud, d, name, ud.clonedir): 488 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch))
478 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
479 489
480 if ud.shallow and ud.write_shallow_tarballs: 490 if ud.shallow and ud.write_shallow_tarballs:
481 missing_rev = self.clonedir_need_shallow_revs(ud, d) 491 missing_rev = self.clonedir_need_shallow_revs(ud, d)
@@ -483,128 +493,168 @@ class Git(FetchMethod):
483 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) 493 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
484 494
485 if self.lfs_need_update(ud, d): 495 if self.lfs_need_update(ud, d):
486 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching 496 self.lfs_fetch(ud, d, ud.clonedir, ud.revision)
487 # of all LFS blobs needed at the srcrev.
488 #
489 # It would be nice to just do this inline here by running 'git-lfs fetch'
490 # on the bare clonedir, but that operation requires a working copy on some
491 # releases of Git LFS.
492 with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir:
493 # Do the checkout. This implicitly involves a Git LFS fetch.
494 Git.unpack(self, ud, tmpdir, d)
495
496 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
497 # the bare clonedir.
498 #
499 # As this procedure is invoked repeatedly on incremental fetches as
500 # a recipe's SRCREV is bumped throughout its lifetime, this will
501 # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs
502 # corresponding to all the blobs reachable from the different revs
503 # fetched across time.
504 #
505 # Only do this if the unpack resulted in a .git/lfs directory being
506 # created; this only happens if at least one blob needed to be
507 # downloaded.
508 if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")):
509 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir)
510
511 def build_mirror_data(self, ud, d):
512 497
513 # Create as a temp file and move atomically into position to avoid races 498 def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None):
514 @contextmanager 499 """Helper method for fetching Git LFS data"""
515 def create_atomic(filename): 500 try:
516 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) 501 if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision):
517 try: 502 self._ensure_git_lfs(d, ud)
518 yield tfile 503
519 umask = os.umask(0o666) 504 # Using worktree with the revision because .lfsconfig may exists
520 os.umask(umask) 505 worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision)
521 os.chmod(tfile, (0o666 & ~umask)) 506 runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir)
522 os.rename(tfile, filename) 507 lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "")
523 finally: 508 runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt"))
524 os.close(fd) 509 worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd
510 runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir)
511 except:
512 logger.warning("Fetching LFS did not succeed.")
513
514 @contextmanager
515 def create_atomic(self, filename):
516 """Create as a temp file and move atomically into position to avoid races"""
517 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
518 try:
519 yield tfile
520 umask = os.umask(0o666)
521 os.umask(umask)
522 os.chmod(tfile, (0o666 & ~umask))
523 os.rename(tfile, filename)
524 finally:
525 os.close(fd)
525 526
527 def build_mirror_data(self, ud, d):
526 if ud.shallow and ud.write_shallow_tarballs: 528 if ud.shallow and ud.write_shallow_tarballs:
527 if not os.path.exists(ud.fullshallow): 529 if not os.path.exists(ud.fullshallow):
528 if os.path.islink(ud.fullshallow): 530 if os.path.islink(ud.fullshallow):
529 os.unlink(ud.fullshallow) 531 os.unlink(ud.fullshallow)
530 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 532 self.clone_shallow_with_tarball(ud, d)
531 shallowclone = os.path.join(tempdir, 'git')
532 try:
533 self.clone_shallow_local(ud, shallowclone, d)
534
535 logger.info("Creating tarball of git repository")
536 with create_atomic(ud.fullshallow) as tfile:
537 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
538 runfetchcmd("touch %s.done" % ud.fullshallow, d)
539 finally:
540 bb.utils.remove(tempdir, recurse=True)
541 elif ud.write_tarballs and not os.path.exists(ud.fullmirror): 533 elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
542 if os.path.islink(ud.fullmirror): 534 if os.path.islink(ud.fullmirror):
543 os.unlink(ud.fullmirror) 535 os.unlink(ud.fullmirror)
544 536
545 logger.info("Creating tarball of git repository") 537 logger.info("Creating tarball of git repository")
546 with create_atomic(ud.fullmirror) as tfile: 538 with self.create_atomic(ud.fullmirror) as tfile:
547 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, 539 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
548 quiet=True, workdir=ud.clonedir) 540 quiet=True, workdir=ud.clonedir)
549 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." 541 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
550 % (tfile, mtime), d, workdir=ud.clonedir) 542 % (tfile, mtime), d, workdir=ud.clonedir)
551 runfetchcmd("touch %s.done" % ud.fullmirror, d) 543 runfetchcmd("touch %s.done" % ud.fullmirror, d)
552 544
545 def clone_shallow_with_tarball(self, ud, d):
546 ret = False
547 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
548 shallowclone = os.path.join(tempdir, 'git')
549 try:
550 try:
551 self.clone_shallow_local(ud, shallowclone, d)
552 except:
553 logger.warning("Fast shallow clone failed, try to skip fast mode now.")
554 bb.utils.remove(tempdir, recurse=True)
555 os.mkdir(tempdir)
556 ud.shallow_skip_fast = True
557 self.clone_shallow_local(ud, shallowclone, d)
558 logger.info("Creating tarball of git repository")
559 with self.create_atomic(ud.fullshallow) as tfile:
560 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
561 runfetchcmd("touch %s.done" % ud.fullshallow, d)
562 ret = True
563 finally:
564 bb.utils.remove(tempdir, recurse=True)
565
566 return ret
567
553 def clone_shallow_local(self, ud, dest, d): 568 def clone_shallow_local(self, ud, dest, d):
554 """Clone the repo and make it shallow. 569 """
570 Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
571 - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
572 - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
573 """
555 574
556 The upstream url of the new clone isn't set at this time, as it'll be 575 progresshandler = GitProgressHandler(d)
557 set correctly when unpacked.""" 576 repourl = self._get_repo_url(ud)
558 runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d) 577 bb.utils.mkdirhier(dest)
578 init_cmd = "%s init -q" % ud.basecmd
579 if ud.bareclone:
580 init_cmd += " --bare"
581 runfetchcmd(init_cmd, d, workdir=dest)
582 # Use repourl when creating a fast initial shallow clone
583 # Prefer already existing full bare clones if available
584 if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir):
585 remote = shlex.quote(repourl)
586 else:
587 remote = ud.clonedir
588 runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest)
559 589
560 to_parse, shallow_branches = [], [] 590 # Check the histories which should be excluded
561 for name in ud.names: 591 shallow_exclude = ''
562 revision = ud.revisions[name] 592 for revision in ud.shallow_revs:
563 depth = ud.shallow_depths[name] 593 shallow_exclude += " --shallow-exclude=%s" % revision
564 if depth:
565 to_parse.append('%s~%d^{}' % (revision, depth - 1))
566 594
567 # For nobranch, we need a ref, otherwise the commits will be 595 revision = ud.revision
568 # removed, and for non-nobranch, we truncate the branch to our 596 depth = ud.shallow_depths[ud.name]
569 # srcrev, to avoid keeping unnecessary history beyond that.
570 branch = ud.branches[name]
571 if ud.nobranch:
572 ref = "refs/shallow/%s" % name
573 elif ud.bareclone:
574 ref = "refs/heads/%s" % branch
575 else:
576 ref = "refs/remotes/origin/%s" % branch
577 597
578 shallow_branches.append(ref) 598 # The --depth and --shallow-exclude can't be used together
579 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) 599 if depth and shallow_exclude:
600 raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
580 601
581 # Map srcrev+depths to revisions 602 # For nobranch, we need a ref, otherwise the commits will be
582 parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest) 603 # removed, and for non-nobranch, we truncate the branch to our
604 # srcrev, to avoid keeping unnecessary history beyond that.
605 branch = ud.branch
606 if ud.nobranch:
607 ref = "refs/shallow/%s" % ud.name
608 elif ud.bareclone:
609 ref = "refs/heads/%s" % branch
610 else:
611 ref = "refs/remotes/origin/%s" % branch
583 612
584 # Resolve specified revisions 613 fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
585 parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest) 614 if depth:
586 shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines() 615 fetch_cmd += " --depth %s" % depth
616
617 if shallow_exclude:
618 fetch_cmd += shallow_exclude
619
620 # Advertise the revision for lower version git such as 2.25.1:
621 # error: Server does not allow request for unadvertised object.
622 # The ud.clonedir is a local temporary dir, will be removed when
623 # fetch is done, so we can do anything on it.
624 adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
625 if ud.shallow_skip_fast:
626 runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
627
628 runfetchcmd(fetch_cmd, d, workdir=dest)
629 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
630 # Fetch Git LFS data
631 self.lfs_fetch(ud, d, dest, ud.revision)
587 632
588 # Apply extra ref wildcards 633 # Apply extra ref wildcards
589 all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd, 634 all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
590 d, workdir=dest).splitlines() 635 d, workdir=dest).splitlines()
636 all_refs = []
637 for line in all_refs_remote:
638 all_refs.append(line.split()[-1])
639 extra_refs = []
591 for r in ud.shallow_extra_refs: 640 for r in ud.shallow_extra_refs:
592 if not ud.bareclone: 641 if not ud.bareclone:
593 r = r.replace('refs/heads/', 'refs/remotes/origin/') 642 r = r.replace('refs/heads/', 'refs/remotes/origin/')
594 643
595 if '*' in r: 644 if '*' in r:
596 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) 645 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
597 shallow_branches.extend(matches) 646 extra_refs.extend(matches)
598 else: 647 else:
599 shallow_branches.append(r) 648 extra_refs.append(r)
649
650 for ref in extra_refs:
651 ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '')
652 runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
653 revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
654 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
600 655
601 # Make the repository shallow 656 # The url is local ud.clonedir, set it to upstream one
602 shallow_cmd = [self.make_shallow_path, '-s'] 657 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
603 for b in shallow_branches:
604 shallow_cmd.append('-r')
605 shallow_cmd.append(b)
606 shallow_cmd.extend(shallow_revisions)
607 runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
608 658
609 def unpack(self, ud, destdir, d): 659 def unpack(self, ud, destdir, d):
610 """ unpack the downloaded src to destdir""" 660 """ unpack the downloaded src to destdir"""
@@ -612,7 +662,7 @@ class Git(FetchMethod):
612 subdir = ud.parm.get("subdir") 662 subdir = ud.parm.get("subdir")
613 subpath = ud.parm.get("subpath") 663 subpath = ud.parm.get("subpath")
614 readpathspec = "" 664 readpathspec = ""
615 def_destsuffix = "git/" 665 def_destsuffix = (d.getVar("BB_GIT_DEFAULT_DESTSUFFIX") or "git") + "/"
616 666
617 if subpath: 667 if subpath:
618 readpathspec = ":%s" % subpath 668 readpathspec = ":%s" % subpath
@@ -664,30 +714,43 @@ class Git(FetchMethod):
664 if not source_found: 714 if not source_found:
665 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) 715 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
666 716
717 # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag
718 # matches the revision
719 if 'tag' in ud.parm and sha1_re.match(ud.revision):
720 output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir)
721 output = output.strip()
722 if output != ud.revision:
723 # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list
724 # If it resolves to the same thing there isn't a problem.
725 output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
726 output2 = output2.strip()
727 if output != output2:
728 raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url)
729
667 repourl = self._get_repo_url(ud) 730 repourl = self._get_repo_url(ud)
668 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) 731 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
669 732
670 if self._contains_lfs(ud, d, destdir): 733 if self._contains_lfs(ud, d, destdir):
671 if need_lfs and not self._find_git_lfs(d): 734 if not need_lfs:
672 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
673 elif not need_lfs:
674 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) 735 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
675 else: 736 else:
737 self._ensure_git_lfs(d, ud)
738
676 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) 739 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
677 740
678 if not ud.nocheckout: 741 if not ud.nocheckout:
679 if subpath: 742 if subpath:
680 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, 743 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d,
681 workdir=destdir) 744 workdir=destdir)
682 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) 745 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
683 elif not ud.nobranch: 746 elif not ud.nobranch:
684 branchname = ud.branches[ud.names[0]] 747 branchname = ud.branch
685 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ 748 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
686 ud.revisions[ud.names[0]]), d, workdir=destdir) 749 ud.revision), d, workdir=destdir)
687 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ 750 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
688 branchname), d, workdir=destdir) 751 branchname), d, workdir=destdir)
689 else: 752 else:
690 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir) 753 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
691 754
692 return True 755 return True
693 756
@@ -701,22 +764,29 @@ class Git(FetchMethod):
701 clonedir = os.path.realpath(ud.localpath) 764 clonedir = os.path.realpath(ud.localpath)
702 to_remove.append(clonedir) 765 to_remove.append(clonedir)
703 766
767 # Remove shallow mirror tarball
768 if ud.shallow:
769 to_remove.append(ud.fullshallow)
770 to_remove.append(ud.fullshallow + ".done")
771
704 for r in to_remove: 772 for r in to_remove:
705 if os.path.exists(r): 773 if os.path.exists(r) or os.path.islink(r):
706 bb.note('Removing %s' % r) 774 bb.note('Removing %s' % r)
707 bb.utils.remove(r, True) 775 bb.utils.remove(r, True)
708 776
709 def supports_srcrev(self): 777 def supports_srcrev(self):
710 return True 778 return True
711 779
712 def _contains_ref(self, ud, d, name, wd): 780 def _contains_ref(self, ud, d, name, wd, tag=False):
713 cmd = "" 781 cmd = ""
782 git_ref_name = 'refs/tags/%s' % ud.parm['tag'] if tag else ud.revision
783
714 if ud.nobranch: 784 if ud.nobranch:
715 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( 785 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
716 ud.basecmd, ud.revisions[name]) 786 ud.basecmd, git_ref_name)
717 else: 787 else:
718 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( 788 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
719 ud.basecmd, ud.revisions[name], ud.branches[name]) 789 ud.basecmd, git_ref_name, ud.branch)
720 try: 790 try:
721 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 791 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
722 except bb.fetch2.FetchError: 792 except bb.fetch2.FetchError:
@@ -725,19 +795,21 @@ class Git(FetchMethod):
725 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) 795 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
726 return output.split()[0] != "0" 796 return output.split()[0] != "0"
727 797
728 def _lfs_objects_downloaded(self, ud, d, name, wd): 798 def _lfs_objects_downloaded(self, ud, d, wd):
729 """ 799 """
730 Verifies whether the LFS objects for requested revisions have already been downloaded 800 Verifies whether the LFS objects for requested revisions have already been downloaded
731 """ 801 """
732 # Bail out early if this repository doesn't use LFS 802 # Bail out early if this repository doesn't use LFS
733 if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd): 803 if not self._contains_lfs(ud, d, wd):
734 return True 804 return True
735 805
806 self._ensure_git_lfs(d, ud)
807
736 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file 808 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
737 # existence. 809 # existence.
738 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git 810 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
739 cmd = "%s lfs ls-files -l %s" \ 811 cmd = "%s lfs ls-files -l %s" \
740 % (ud.basecmd, ud.revisions[name]) 812 % (ud.basecmd, ud.revision)
741 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() 813 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
742 # Do not do any further matching if no objects are managed by LFS 814 # Do not do any further matching if no objects are managed by LFS
743 if not output: 815 if not output:
@@ -761,18 +833,8 @@ class Git(FetchMethod):
761 """ 833 """
762 Check if the repository has 'lfs' (large file) content 834 Check if the repository has 'lfs' (large file) content
763 """ 835 """
764 836 cmd = "%s grep '^[^#].*lfs' %s:.gitattributes | wc -l" % (
765 if ud.nobranch: 837 ud.basecmd, ud.revision)
766 # If no branch is specified, use the current git commit
767 refname = self._build_revision(ud, d, ud.names[0])
768 elif wd == ud.clonedir:
769 # The bare clonedir doesn't use the remote names; it has the branch immediately.
770 refname = ud.branches[ud.names[0]]
771 else:
772 refname = "origin/%s" % ud.branches[ud.names[0]]
773
774 cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
775 ud.basecmd, refname)
776 838
777 try: 839 try:
778 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 840 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
@@ -782,12 +844,14 @@ class Git(FetchMethod):
782 pass 844 pass
783 return False 845 return False
784 846
785 def _find_git_lfs(self, d): 847 def _ensure_git_lfs(self, d, ud):
786 """ 848 """
787 Return True if git-lfs can be found, False otherwise. 849 Ensures that git-lfs is available, raising a FetchError if it isn't.
788 """ 850 """
789 import shutil 851 if shutil.which("git-lfs", path=d.getVar('PATH')) is None:
790 return shutil.which("git-lfs", path=d.getVar('PATH')) is not None 852 raise bb.fetch2.FetchError(
853 "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 "
854 "to ignore it)" % self._get_repo_url(ud))
791 855
792 def _get_repo_url(self, ud): 856 def _get_repo_url(self, ud):
793 """ 857 """
@@ -795,21 +859,21 @@ class Git(FetchMethod):
795 """ 859 """
796 # Note that we do not support passwords directly in the git urls. There are several 860 # Note that we do not support passwords directly in the git urls. There are several
797 # reasons. SRC_URI can be written out to things like buildhistory and people don't 861 # reasons. SRC_URI can be written out to things like buildhistory and people don't
798 # want to leak passwords like that. Its also all too easy to share metadata without 862 # want to leak passwords like that. Its also all too easy to share metadata without
799 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as 863 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as
800 # alternatives so we will not take patches adding password support here. 864 # alternatives so we will not take patches adding password support here.
801 if ud.user: 865 if ud.user:
802 username = ud.user + '@' 866 username = ud.user + '@'
803 else: 867 else:
804 username = "" 868 username = ""
805 return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) 869 return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path))
806 870
807 def _revision_key(self, ud, d, name): 871 def _revision_key(self, ud, d, name):
808 """ 872 """
809 Return a unique key for the url 873 Return a unique key for the url
810 """ 874 """
811 # Collapse adjacent slashes 875 # Collapse adjacent slashes
812 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] 876 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev
813 877
814 def _lsremote(self, ud, d, search): 878 def _lsremote(self, ud, d, search):
815 """ 879 """
@@ -842,26 +906,26 @@ class Git(FetchMethod):
842 Compute the HEAD revision for the url 906 Compute the HEAD revision for the url
843 """ 907 """
844 if not d.getVar("__BBSRCREV_SEEN"): 908 if not d.getVar("__BBSRCREV_SEEN"):
845 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path)) 909 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path))
846 910
847 # Ensure we mark as not cached 911 # Ensure we mark as not cached
848 bb.fetch2.mark_recipe_nocache(d) 912 bb.fetch2.mark_recipe_nocache(d)
849 913
850 output = self._lsremote(ud, d, "") 914 output = self._lsremote(ud, d, "")
851 # Tags of the form ^{} may not work, need to fallback to other form 915 # Tags of the form ^{} may not work, need to fallback to other form
852 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: 916 if ud.unresolvedrev[:5] == "refs/" or ud.usehead:
853 head = ud.unresolvedrev[name] 917 head = ud.unresolvedrev
854 tag = ud.unresolvedrev[name] 918 tag = ud.unresolvedrev
855 else: 919 else:
856 head = "refs/heads/%s" % ud.unresolvedrev[name] 920 head = "refs/heads/%s" % ud.unresolvedrev
857 tag = "refs/tags/%s" % ud.unresolvedrev[name] 921 tag = "refs/tags/%s" % ud.unresolvedrev
858 for s in [head, tag + "^{}", tag]: 922 for s in [head, tag + "^{}", tag]:
859 for l in output.strip().split('\n'): 923 for l in output.strip().split('\n'):
860 sha1, ref = l.split() 924 sha1, ref = l.split()
861 if s == ref: 925 if s == ref:
862 return sha1 926 return sha1
863 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ 927 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
864 (ud.unresolvedrev[name], ud.host+ud.path)) 928 (ud.unresolvedrev, ud.host+ud.path))
865 929
866 def latest_versionstring(self, ud, d): 930 def latest_versionstring(self, ud, d):
867 """ 931 """
@@ -912,23 +976,22 @@ class Git(FetchMethod):
912 return pupver 976 return pupver
913 977
914 def _build_revision(self, ud, d, name): 978 def _build_revision(self, ud, d, name):
915 return ud.revisions[name] 979 return ud.revision
916 980
917 def gitpkgv_revision(self, ud, d, name): 981 def gitpkgv_revision(self, ud, d, name):
918 """ 982 """
919 Return a sortable revision number by counting commits in the history 983 Return a sortable revision number by counting commits in the history
920 Based on gitpkgv.bblass in meta-openembedded 984 Based on gitpkgv.bblass in meta-openembedded
921 """ 985 """
922 rev = self._build_revision(ud, d, name) 986 rev = ud.revision
923 localpath = ud.localpath 987 localpath = ud.localpath
924 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) 988 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
925 if not os.path.exists(localpath): 989 if not os.path.exists(localpath):
926 commits = None 990 commits = None
927 else: 991 else:
928 if not os.path.exists(rev_file) or not os.path.getsize(rev_file): 992 if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
929 from pipes import quote
930 commits = bb.fetch2.runfetchcmd( 993 commits = bb.fetch2.runfetchcmd(
931 "git rev-list %s -- | wc -l" % quote(rev), 994 "git rev-list %s -- | wc -l" % shlex.quote(rev),
932 d, quiet=True).strip().lstrip('0') 995 d, quiet=True).strip().lstrip('0')
933 if commits: 996 if commits:
934 open(rev_file, "w").write("%d\n" % int(commits)) 997 open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index f7f3af7212..5869e1b99b 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -62,36 +62,35 @@ class GitSM(Git):
62 return modules 62 return modules
63 63
64 # Collect the defined submodules, and their attributes 64 # Collect the defined submodules, and their attributes
65 for name in ud.names: 65 try:
66 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=workdir)
67 except:
68 # No submodules to update
69 gitmodules = ""
70
71 for m, md in parse_gitmodules(gitmodules).items():
66 try: 72 try:
67 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) 73 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revision, md['path']), d, quiet=True, workdir=workdir)
68 except: 74 except:
69 # No submodules to update 75 # If the command fails, we don't have a valid file to check. If it doesn't
76 # fail -- it still might be a failure, see next check...
77 module_hash = ""
78
79 if not module_hash:
80 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
70 continue 81 continue
71 82
72 for m, md in parse_gitmodules(gitmodules).items(): 83 submodules.append(m)
73 try: 84 paths[m] = md['path']
74 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) 85 revision[m] = ud.revision
75 except: 86 uris[m] = md['url']
76 # If the command fails, we don't have a valid file to check. If it doesn't 87 subrevision[m] = module_hash.split()[2]
77 # fail -- it still might be a failure, see next check... 88
78 module_hash = "" 89 # Convert relative to absolute uri based on parent uri
79 90 if uris[m].startswith('..') or uris[m].startswith('./'):
80 if not module_hash: 91 newud = copy.copy(ud)
81 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) 92 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
82 continue 93 uris[m] = Git._get_repo_url(self, newud)
83
84 submodules.append(m)
85 paths[m] = md['path']
86 revision[m] = ud.revisions[name]
87 uris[m] = md['url']
88 subrevision[m] = module_hash.split()[2]
89
90 # Convert relative to absolute uri based on parent uri
91 if uris[m].startswith('..') or uris[m].startswith('./'):
92 newud = copy.copy(ud)
93 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
94 uris[m] = Git._get_repo_url(self, newud)
95 94
96 for module in submodules: 95 for module in submodules:
97 # Translate the module url into a SRC_URI 96 # Translate the module url into a SRC_URI
@@ -123,7 +122,7 @@ class GitSM(Git):
123 url += ";name=%s" % module 122 url += ";name=%s" % module
124 url += ";subpath=%s" % module 123 url += ";subpath=%s" % module
125 url += ";nobranch=1" 124 url += ";nobranch=1"
126 url += ";lfs=%s" % self._need_lfs(ud) 125 url += ";lfs=%s" % ("1" if self._need_lfs(ud) else "0")
127 # Note that adding "user=" here to give credentials to the 126 # Note that adding "user=" here to give credentials to the
128 # submodule is not supported. Since using SRC_URI to give git:// 127 # submodule is not supported. Since using SRC_URI to give git://
129 # URL a password is not supported, one have to use one of the 128 # URL a password is not supported, one have to use one of the
@@ -147,6 +146,22 @@ class GitSM(Git):
147 146
148 return submodules != [] 147 return submodules != []
149 148
149 def call_process_submodules(self, ud, d, extra_check, subfunc):
150 # If we're using a shallow mirror tarball it needs to be
151 # unpacked temporarily so that we can examine the .gitmodules file
152 # Unpack even when ud.clonedir is not available,
153 # which may occur during a fast shallow clone
154 unpack = extra_check or not os.path.exists(ud.clonedir)
155 if ud.shallow and os.path.exists(ud.fullshallow) and unpack:
156 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
157 try:
158 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
159 self.process_submodules(ud, tmpdir, subfunc, d)
160 finally:
161 shutil.rmtree(tmpdir)
162 else:
163 self.process_submodules(ud, ud.clonedir, subfunc, d)
164
150 def need_update(self, ud, d): 165 def need_update(self, ud, d):
151 if Git.need_update(self, ud, d): 166 if Git.need_update(self, ud, d):
152 return True 167 return True
@@ -164,15 +179,7 @@ class GitSM(Git):
164 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) 179 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
165 need_update_result = True 180 need_update_result = True
166 181
167 # If we're using a shallow mirror tarball it needs to be unpacked 182 self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
168 # temporarily so that we can examine the .gitmodules file
169 if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
170 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
171 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
172 self.process_submodules(ud, tmpdir, need_update_submodule, d)
173 shutil.rmtree(tmpdir)
174 else:
175 self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
176 183
177 if need_update_list: 184 if need_update_list:
178 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) 185 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
@@ -195,16 +202,7 @@ class GitSM(Git):
195 raise 202 raise
196 203
197 Git.download(self, ud, d) 204 Git.download(self, ud, d)
198 205 self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
199 # If we're using a shallow mirror tarball it needs to be unpacked
200 # temporarily so that we can examine the .gitmodules file
201 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
202 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
203 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
204 self.process_submodules(ud, tmpdir, download_submodule, d)
205 shutil.rmtree(tmpdir)
206 else:
207 self.process_submodules(ud, ud.clonedir, download_submodule, d)
208 206
209 def unpack(self, ud, destdir, d): 207 def unpack(self, ud, destdir, d):
210 def unpack_submodules(ud, url, module, modpath, workdir, d): 208 def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -247,15 +245,27 @@ class GitSM(Git):
247 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) 245 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
248 246
249 if not ud.bareclone and ret: 247 if not ud.bareclone and ret:
250 # All submodules should already be downloaded and configured in the tree. This simply 248 cmdprefix = ""
251 # sets up the configuration and checks out the files. The main project config should 249 # Avoid LFS smudging (replacing the LFS pointers with the actual content) when LFS shouldn't be used but git-lfs is installed.
252 # remain unmodified, and no download from the internet should occur. As such, lfs smudge 250 if not self._need_lfs(ud):
253 # should also be skipped as these files were already smudged in the fetch stage if lfs 251 cmdprefix = "GIT_LFS_SKIP_SMUDGE=1 "
254 # was enabled. 252 runfetchcmd("%s%s submodule update --recursive --no-fetch" % (cmdprefix, ud.basecmd), d, quiet=True, workdir=ud.destdir)
255 runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) 253 def clean(self, ud, d):
254 def clean_submodule(ud, url, module, modpath, workdir, d):
255 url += ";bareclone=1;nobranch=1"
256 try:
257 newfetch = Fetch([url], d, cache=False)
258 newfetch.clean()
259 except Exception as e:
260 logger.warning('gitsm: submodule clean failed: %s %s' % (type(e).__name__, str(e)))
261
262 self.call_process_submodules(ud, d, True, clean_submodule)
263
264 # Clean top git dir
265 Git.clean(self, ud, d)
256 266
257 def implicit_urldata(self, ud, d): 267 def implicit_urldata(self, ud, d):
258 import shutil, subprocess, tempfile 268 import subprocess
259 269
260 urldata = [] 270 urldata = []
261 def add_submodule(ud, url, module, modpath, workdir, d): 271 def add_submodule(ud, url, module, modpath, workdir, d):
@@ -263,14 +273,6 @@ class GitSM(Git):
263 newfetch = Fetch([url], d, cache=False) 273 newfetch = Fetch([url], d, cache=False)
264 urldata.extend(newfetch.expanded_urldata()) 274 urldata.extend(newfetch.expanded_urldata())
265 275
266 # If we're using a shallow mirror tarball it needs to be unpacked 276 self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
267 # temporarily so that we can examine the .gitmodules file
268 if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
269 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
270 subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
271 self.process_submodules(ud, tmpdir, add_submodule, d)
272 shutil.rmtree(tmpdir)
273 else:
274 self.process_submodules(ud, ud.clonedir, add_submodule, d)
275 277
276 return urldata 278 return urldata
diff --git a/bitbake/lib/bb/fetch2/gomod.py b/bitbake/lib/bb/fetch2/gomod.py
new file mode 100644
index 0000000000..53c1d8d115
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gomod.py
@@ -0,0 +1,273 @@
1"""
2BitBake 'Fetch' implementation for Go modules
3
4The gomod/gomodgit fetchers are used to download Go modules to the module cache
5from a module proxy or directly from a version control repository.
6
7Example SRC_URI:
8
9SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..."
10SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..."
11
12Required SRC_URI parameters:
13
14- version
15 The version of the module.
16
17Optional SRC_URI parameters:
18
19- mod
20 Fetch and unpack the go.mod file only instead of the complete module.
21 The go command may need to download go.mod files for many different modules
22 when computing the build list, and go.mod files are much smaller than
23 module zip files.
24 The default is "0", set mod=1 for the go.mod file only.
25
26- sha256sum
27 The checksum of the module zip file, or the go.mod file in case of fetching
28 only the go.mod file. Alternatively, set the SRC_URI varible flag for
29 "module@version.sha256sum".
30
31- protocol
32 The method used when fetching directly from a version control repository.
33 The default is "https" for git.
34
35- repo
36 The URL when fetching directly from a version control repository. Required
37 when the URL is different from the module path.
38
39- srcrev
40 The revision identifier used when fetching directly from a version control
41 repository. Alternatively, set the SRCREV varible for "module@version".
42
43- subdir
44 The module subdirectory when fetching directly from a version control
45 repository. Required when the module is not located in the root of the
46 repository.
47
48Related variables:
49
50- GO_MOD_PROXY
51 The module proxy used by the fetcher.
52
53- GO_MOD_CACHE_DIR
54 The directory where the module cache is located.
55 This must match the exported GOMODCACHE variable for the go command to find
56 the downloaded modules.
57
58See the Go modules reference, https://go.dev/ref/mod, for more information
59about the module cache, module proxies and version control systems.
60"""
61
62import hashlib
63import os
64import re
65import shutil
66import subprocess
67import zipfile
68
69import bb
70from bb.fetch2 import FetchError
71from bb.fetch2 import MissingParameterError
72from bb.fetch2 import runfetchcmd
73from bb.fetch2 import subprocess_setup
74from bb.fetch2.git import Git
75from bb.fetch2.wget import Wget
76
77
78def escape(path):
79 """Escape capital letters using exclamation points."""
80 return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path)
81
82
83class GoMod(Wget):
84 """Class to fetch Go modules from a Go module proxy via wget"""
85
86 def supports(self, ud, d):
87 """Check to see if a given URL is for this fetcher."""
88 return ud.type == 'gomod'
89
90 def urldata_init(self, ud, d):
91 """Set up to download the module from the module proxy.
92
93 Set up to download the module zip file to the module cache directory
94 and unpack the go.mod file (unless downloading only the go.mod file):
95
96 cache/download/<module>/@v/<version>.zip: The module zip file.
97 cache/download/<module>/@v/<version>.mod: The go.mod file.
98 """
99
100 proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org'
101 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
102
103 if 'version' not in ud.parm:
104 raise MissingParameterError('version', ud.url)
105
106 module = ud.host
107 if ud.path != '/':
108 module += ud.path
109 ud.parm['module'] = module
110 version = ud.parm['version']
111
112 # Set URL and filename for wget download
113 if ud.parm.get('mod', '0') == '1':
114 ext = '.mod'
115 else:
116 ext = '.zip'
117 path = escape(f"{module}/@v/{version}{ext}")
118 ud.url = bb.fetch2.encodeurl(
119 ('https', proxy, '/' + path, None, None, None))
120 ud.parm['downloadfilename'] = f"{module.replace('/', '.')}@{version}{ext}"
121
122 # Set name for checksum verification
123 ud.parm['name'] = f"{module}@{version}"
124
125 # Set path for unpack
126 ud.parm['unpackpath'] = os.path.join(moddir, 'cache/download', path)
127
128 super().urldata_init(ud, d)
129
130 def unpack(self, ud, rootdir, d):
131 """Unpack the module in the module cache."""
132
133 # Unpack the module zip file or go.mod file
134 unpackpath = os.path.join(rootdir, ud.parm['unpackpath'])
135 unpackdir = os.path.dirname(unpackpath)
136 bb.utils.mkdirhier(unpackdir)
137 ud.unpack_tracer.unpack("file-copy", unpackdir)
138 cmd = f"cp {ud.localpath} {unpackpath}"
139 path = d.getVar('PATH')
140 if path:
141 cmd = f"PATH={path} {cmd}"
142 name = os.path.basename(unpackpath)
143 bb.note(f"Unpacking {name} to {unpackdir}/")
144 subprocess.check_call(cmd, shell=True, preexec_fn=subprocess_setup)
145
146 if name.endswith('.zip'):
147 # Unpack the go.mod file from the zip file
148 module = ud.parm['module']
149 name = name.rsplit('.', 1)[0] + '.mod'
150 bb.note(f"Unpacking {name} to {unpackdir}/")
151 with zipfile.ZipFile(ud.localpath) as zf:
152 with open(os.path.join(unpackdir, name), mode='wb') as mf:
153 try:
154 f = module + '@' + ud.parm['version'] + '/go.mod'
155 shutil.copyfileobj(zf.open(f), mf)
156 except KeyError:
157 # If the module does not have a go.mod file, synthesize
158 # one containing only a module statement.
159 mf.write(f'module {module}\n'.encode())
160
161
162class GoModGit(Git):
163 """Class to fetch Go modules directly from a git repository"""
164
165 def supports(self, ud, d):
166 """Check to see if a given URL is for this fetcher."""
167 return ud.type == 'gomodgit'
168
169 def urldata_init(self, ud, d):
170 """Set up to download the module from the git repository.
171
172 Set up to download the git repository to the module cache directory and
173 unpack the module zip file and the go.mod file:
174
175 cache/vcs/<hash>: The bare git repository.
176 cache/download/<module>/@v/<version>.zip: The module zip file.
177 cache/download/<module>/@v/<version>.mod: The go.mod file.
178 """
179
180 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
181
182 if 'version' not in ud.parm:
183 raise MissingParameterError('version', ud.url)
184
185 module = ud.host
186 if ud.path != '/':
187 module += ud.path
188 ud.parm['module'] = module
189
190 # Set host, path and srcrev for git download
191 if 'repo' in ud.parm:
192 repo = ud.parm['repo']
193 idx = repo.find('/')
194 if idx != -1:
195 ud.host = repo[:idx]
196 ud.path = repo[idx:]
197 else:
198 ud.host = repo
199 ud.path = ''
200 if 'protocol' not in ud.parm:
201 ud.parm['protocol'] = 'https'
202 ud.name = f"{module}@{ud.parm['version']}"
203 srcrev = d.getVar('SRCREV_' + ud.name)
204 if srcrev:
205 if 'srcrev' not in ud.parm:
206 ud.parm['srcrev'] = srcrev
207 else:
208 if 'srcrev' in ud.parm:
209 d.setVar('SRCREV_' + ud.name, ud.parm['srcrev'])
210 if 'branch' not in ud.parm:
211 ud.parm['nobranch'] = '1'
212
213 # Set subpath, subdir and bareclone for git unpack
214 if 'subdir' in ud.parm:
215 ud.parm['subpath'] = ud.parm['subdir']
216 key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode()
217 ud.parm['key'] = key
218 ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs',
219 hashlib.sha256(key).hexdigest())
220 ud.parm['bareclone'] = '1'
221
222 super().urldata_init(ud, d)
223
224 def unpack(self, ud, rootdir, d):
225 """Unpack the module in the module cache."""
226
227 # Unpack the bare git repository
228 super().unpack(ud, rootdir, d)
229
230 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
231
232 # Create the info file
233 module = ud.parm['module']
234 repodir = os.path.join(rootdir, ud.parm['subdir'])
235 with open(repodir + '.info', 'wb') as f:
236 f.write(ud.parm['key'])
237
238 # Unpack the go.mod file from the repository
239 unpackdir = os.path.join(rootdir, moddir, 'cache/download',
240 escape(module), '@v')
241 bb.utils.mkdirhier(unpackdir)
242 srcrev = ud.parm['srcrev']
243 version = ud.parm['version']
244 escaped_version = escape(version)
245 cmd = f"git ls-tree -r --name-only '{srcrev}'"
246 if 'subpath' in ud.parm:
247 cmd += f" '{ud.parm['subpath']}'"
248 files = runfetchcmd(cmd, d, workdir=repodir).split()
249 name = escaped_version + '.mod'
250 bb.note(f"Unpacking {name} to {unpackdir}/")
251 with open(os.path.join(unpackdir, name), mode='wb') as mf:
252 f = 'go.mod'
253 if 'subpath' in ud.parm:
254 f = os.path.join(ud.parm['subpath'], f)
255 if f in files:
256 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
257 subprocess.check_call(cmd, stdout=mf, cwd=repodir,
258 preexec_fn=subprocess_setup)
259 else:
260 # If the module does not have a go.mod file, synthesize one
261 # containing only a module statement.
262 mf.write(f'module {module}\n'.encode())
263
264 # Synthesize the module zip file from the repository
265 name = escaped_version + '.zip'
266 bb.note(f"Unpacking {name} to {unpackdir}/")
267 with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf:
268 prefix = module + '@' + version + '/'
269 for f in files:
270 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
271 data = subprocess.check_output(cmd, cwd=repodir,
272 preexec_fn=subprocess_setup)
273 zf.writestr(prefix + f, data)
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index 7d7668110e..fda56a564e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -29,11 +29,10 @@ class Local(FetchMethod):
29 29
30 def urldata_init(self, ud, d): 30 def urldata_init(self, ud, d):
31 # We don't set localfile as for this fetcher the file is already local! 31 # We don't set localfile as for this fetcher the file is already local!
32 ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) 32 ud.basename = os.path.basename(ud.path)
33 ud.basename = os.path.basename(ud.decodedurl) 33 ud.basepath = ud.path
34 ud.basepath = ud.decodedurl
35 ud.needdonestamp = False 34 ud.needdonestamp = False
36 if "*" in ud.decodedurl: 35 if "*" in ud.path:
37 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url) 36 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
38 return 37 return
39 38
@@ -48,7 +47,7 @@ class Local(FetchMethod):
48 Return the local filename of a given url assuming a successful fetch. 47 Return the local filename of a given url assuming a successful fetch.
49 """ 48 """
50 searched = [] 49 searched = []
51 path = urldata.decodedurl 50 path = urldata.path
52 newpath = path 51 newpath = path
53 if path[0] == "/": 52 if path[0] == "/":
54 logger.debug2("Using absolute %s" % (path)) 53 logger.debug2("Using absolute %s" % (path))
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 15f3f19bc8..e469d66768 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -42,11 +42,12 @@ from bb.utils import is_semver
42 42
43def npm_package(package): 43def npm_package(package):
44 """Convert the npm package name to remove unsupported character""" 44 """Convert the npm package name to remove unsupported character"""
45 # Scoped package names (with the @) use the same naming convention 45 # For scoped package names ('@user/package') the '/' is replaced by a '-'.
46 # as the 'npm pack' command. 46 # This is similar to what 'npm pack' does, but 'npm pack' also strips the
47 # leading '@', which can lead to ambiguous package names.
47 name = re.sub("/", "-", package) 48 name = re.sub("/", "-", package)
48 name = name.lower() 49 name = name.lower()
49 name = re.sub(r"[^\-a-z0-9]", "", name) 50 name = re.sub(r"[^\-a-z0-9@]", "", name)
50 name = name.strip("-") 51 name = name.strip("-")
51 return name 52 return name
52 53
@@ -90,6 +91,12 @@ class NpmEnvironment(object):
90 self.d = d 91 self.d = d
91 92
92 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1) 93 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
94
95 hn = self._home_npmrc(d)
96 if hn is not None:
97 with open(hn, 'r') as hnf:
98 self.user_config.write(hnf.read())
99
93 for key, value in configs: 100 for key, value in configs:
94 self.user_config.write("%s=%s\n" % (key, value)) 101 self.user_config.write("%s=%s\n" % (key, value))
95 102
@@ -102,6 +109,15 @@ class NpmEnvironment(object):
102 if self.user_config: 109 if self.user_config:
103 self.user_config.close() 110 self.user_config.close()
104 111
112 def _home_npmrc(self, d):
113 """Function to return user's HOME .npmrc file (or None if it doesn't exist)"""
114 home_npmrc_file = os.path.join(os.environ.get("HOME"), ".npmrc")
115 if d.getVar("BB_USE_HOME_NPMRC") == "1" and os.path.exists(home_npmrc_file):
116 bb.warn(f"BB_USE_HOME_NPMRC flag set and valid .npmrc detected - "\
117 f"npm fetcher will use {home_npmrc_file}")
118 return home_npmrc_file
119 return None
120
105 def run(self, cmd, args=None, configs=None, workdir=None): 121 def run(self, cmd, args=None, configs=None, workdir=None):
106 """Run npm command in a controlled environment""" 122 """Run npm command in a controlled environment"""
107 with tempfile.TemporaryDirectory() as tmpdir: 123 with tempfile.TemporaryDirectory() as tmpdir:
@@ -165,7 +181,7 @@ class Npm(FetchMethod):
165 # Using the 'downloadfilename' parameter as local filename 181 # Using the 'downloadfilename' parameter as local filename
166 # or the npm package name. 182 # or the npm package name.
167 if "downloadfilename" in ud.parm: 183 if "downloadfilename" in ud.parm:
168 ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"])) 184 ud.localfile = npm_localfile(ud.parm["downloadfilename"])
169 else: 185 else:
170 ud.localfile = npm_localfile(ud.package, ud.version) 186 ud.localfile = npm_localfile(ud.package, ud.version)
171 187
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index ff5f8dc755..2f9599ee9e 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -37,38 +37,26 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
37 """ 37 """
38 Run a callback for each dependencies of a shrinkwrap file. 38 Run a callback for each dependencies of a shrinkwrap file.
39 The callback is using the format: 39 The callback is using the format:
40 callback(name, params, deptree) 40 callback(name, data, location)
41 with: 41 with:
42 name = the package name (string) 42 name = the package name (string)
43 params = the package parameters (dictionary) 43 data = the package data (dictionary)
44 destdir = the destination of the package (string) 44 location = the location of the package (string)
45 """ 45 """
46 # For handling old style dependencies entries in shinkwrap files 46 packages = shrinkwrap.get("packages")
47 def _walk_deps(deps, deptree): 47 if not packages:
48 for name in deps: 48 raise FetchError("Invalid shrinkwrap file format")
49 subtree = [*deptree, name] 49
50 _walk_deps(deps[name].get("dependencies", {}), subtree) 50 for location, data in packages.items():
51 if callback is not None: 51 # Skip empty main and local link target packages
52 if deps[name].get("dev", False) and not dev: 52 if not location.startswith('node_modules/'):
53 continue 53 continue
54 elif deps[name].get("bundled", False): 54 elif not dev and data.get("dev", False):
55 continue 55 continue
56 destsubdirs = [os.path.join("node_modules", dep) for dep in subtree] 56 elif data.get("inBundle", False):
57 destsuffix = os.path.join(*destsubdirs) 57 continue
58 callback(name, deps[name], destsuffix) 58 name = location.split('node_modules/')[-1]
59 59 callback(name, data, location)
60 # packages entry means new style shrinkwrap file, else use dependencies
61 packages = shrinkwrap.get("packages", None)
62 if packages is not None:
63 for package in packages:
64 if package != "":
65 name = package.split('node_modules/')[-1]
66 package_infos = packages.get(package, {})
67 if dev == False and package_infos.get("dev", False):
68 continue
69 callback(name, package_infos, package)
70 else:
71 _walk_deps(shrinkwrap.get("dependencies", {}), [])
72 60
73class NpmShrinkWrap(FetchMethod): 61class NpmShrinkWrap(FetchMethod):
74 """Class to fetch all package from a shrinkwrap file""" 62 """Class to fetch all package from a shrinkwrap file"""
@@ -95,12 +83,18 @@ class NpmShrinkWrap(FetchMethod):
95 extrapaths = [] 83 extrapaths = []
96 unpack = True 84 unpack = True
97 85
98 integrity = params.get("integrity", None) 86 integrity = params.get("integrity")
99 resolved = params.get("resolved", None) 87 resolved = params.get("resolved")
100 version = params.get("version", None) 88 version = params.get("version")
89 link = params.get("link", False)
90
91 # Handle link sources
92 if link:
93 localpath = resolved
94 unpack = False
101 95
102 # Handle registry sources 96 # Handle registry sources
103 if is_semver(version) and integrity: 97 elif version and is_semver(version) and integrity:
104 # Handle duplicate dependencies without url 98 # Handle duplicate dependencies without url
105 if not resolved: 99 if not resolved:
106 return 100 return
@@ -128,10 +122,10 @@ class NpmShrinkWrap(FetchMethod):
128 extrapaths.append(resolvefile) 122 extrapaths.append(resolvefile)
129 123
130 # Handle http tarball sources 124 # Handle http tarball sources
131 elif version.startswith("http") and integrity: 125 elif resolved.startswith("http") and integrity:
132 localfile = npm_localfile(os.path.basename(version)) 126 localfile = npm_localfile(os.path.basename(resolved))
133 127
134 uri = URI(version) 128 uri = URI(resolved)
135 uri.params["downloadfilename"] = localfile 129 uri.params["downloadfilename"] = localfile
136 130
137 checksum_name, checksum_expected = npm_integrity(integrity) 131 checksum_name, checksum_expected = npm_integrity(integrity)
@@ -141,28 +135,12 @@ class NpmShrinkWrap(FetchMethod):
141 135
142 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 136 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
143 137
144 # Handle local tarball and link sources 138 # Handle local tarball sources
145 elif version.startswith("file"): 139 elif resolved.startswith("file"):
146 localpath = version[5:] 140 localpath = resolved[5:]
147 if not version.endswith(".tgz"):
148 unpack = False
149 141
150 # Handle git sources 142 # Handle git sources
151 elif version.startswith(("git", "bitbucket","gist")) or ( 143 elif resolved.startswith("git"):
152 not version.endswith((".tgz", ".tar", ".tar.gz"))
153 and not version.startswith((".", "@", "/"))
154 and "/" in version
155 ):
156 if version.startswith("github:"):
157 version = "git+https://github.com/" + version[len("github:"):]
158 elif version.startswith("gist:"):
159 version = "git+https://gist.github.com/" + version[len("gist:"):]
160 elif version.startswith("bitbucket:"):
161 version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
162 elif version.startswith("gitlab:"):
163 version = "git+https://gitlab.com/" + version[len("gitlab:"):]
164 elif not version.startswith(("git+","git:")):
165 version = "git+https://github.com/" + version
166 regex = re.compile(r""" 144 regex = re.compile(r"""
167 ^ 145 ^
168 git\+ 146 git\+
@@ -174,16 +152,16 @@ class NpmShrinkWrap(FetchMethod):
174 $ 152 $
175 """, re.VERBOSE) 153 """, re.VERBOSE)
176 154
177 match = regex.match(version) 155 match = regex.match(resolved)
178
179 if not match: 156 if not match:
180 raise ParameterError("Invalid git url: %s" % version, ud.url) 157 raise ParameterError("Invalid git url: %s" % resolved, ud.url)
181 158
182 groups = match.groupdict() 159 groups = match.groupdict()
183 160
184 uri = URI("git://" + str(groups["url"])) 161 uri = URI("git://" + str(groups["url"]))
185 uri.params["protocol"] = str(groups["protocol"]) 162 uri.params["protocol"] = str(groups["protocol"])
186 uri.params["rev"] = str(groups["rev"]) 163 uri.params["rev"] = str(groups["rev"])
164 uri.params["nobranch"] = "1"
187 uri.params["destsuffix"] = destsuffix 165 uri.params["destsuffix"] = destsuffix
188 166
189 url = str(uri) 167 url = str(uri)
@@ -268,7 +246,7 @@ class NpmShrinkWrap(FetchMethod):
268 246
269 def unpack(self, ud, rootdir, d): 247 def unpack(self, ud, rootdir, d):
270 """Unpack the downloaded dependencies""" 248 """Unpack the downloaded dependencies"""
271 destdir = d.getVar("S") 249 destdir = rootdir
272 destsuffix = ud.parm.get("destsuffix") 250 destsuffix = ud.parm.get("destsuffix")
273 if destsuffix: 251 if destsuffix:
274 destdir = os.path.join(rootdir, destsuffix) 252 destdir = os.path.join(rootdir, destsuffix)
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
index 6b8ffd5359..22c0538139 100644
--- a/bitbake/lib/bb/fetch2/s3.py
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -77,7 +77,7 @@ class S3(FetchMethod):
77 else: 77 else:
78 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
79 79
80 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
81 81
82 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" 82 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
83 83
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index 7884cce949..bee71a0d0d 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -77,7 +77,7 @@ class SFTP(FetchMethod):
77 else: 77 else:
78 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
79 79
80 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
81 81
82 def download(self, ud, d): 82 def download(self, ud, d):
83 """Fetch urls""" 83 """Fetch urls"""
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 0cbb2a6f25..2a0f2cb44b 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -73,8 +73,7 @@ class SSH(FetchMethod):
73 path = m.group('path') 73 path = m.group('path')
74 path = urllib.parse.unquote(path) 74 path = urllib.parse.unquote(path)
75 host = m.group('host') 75 host = m.group('host')
76 urldata.localpath = os.path.join(d.getVar('DL_DIR'), 76 urldata.localfile = os.path.basename(os.path.normpath(path))
77 os.path.basename(os.path.normpath(path)))
78 77
79 def download(self, urldata, d): 78 def download(self, urldata, d):
80 dldir = d.getVar('DL_DIR') 79 dldir = d.getVar('DL_DIR')
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index d40e4d2909..0852108e7d 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -210,3 +210,6 @@ class Svn(FetchMethod):
210 210
211 def _build_revision(self, ud, d): 211 def _build_revision(self, ud, d):
212 return ud.revision 212 return ud.revision
213
214 def supports_checksum(self, urldata):
215 return False
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index fbfa6938ac..4d19e2134b 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -53,11 +53,6 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
53class Wget(FetchMethod): 53class Wget(FetchMethod):
54 """Class to fetch urls via 'wget'""" 54 """Class to fetch urls via 'wget'"""
55 55
56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail
57 # with the standard wget/urllib User-Agent, so pretend to be a modern
58 # browser.
59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
60
61 def check_certs(self, d): 56 def check_certs(self, d):
62 """ 57 """
63 Should certificates be checked? 58 Should certificates be checked?
@@ -83,11 +78,11 @@ class Wget(FetchMethod):
83 else: 78 else:
84 ud.basename = os.path.basename(ud.path) 79 ud.basename = os.path.basename(ud.path)
85 80
86 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 81 ud.localfile = ud.basename
87 if not ud.localfile: 82 if not ud.localfile:
88 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) 83 ud.localfile = ud.host + ud.path.replace("/", ".")
89 84
90 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" 85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget --tries=2 --timeout=100"
91 86
92 if ud.type == 'ftp' or ud.type == 'ftps': 87 if ud.type == 'ftp' or ud.type == 'ftps':
93 self.basecmd += " --passive-ftp" 88 self.basecmd += " --passive-ftp"
@@ -101,16 +96,17 @@ class Wget(FetchMethod):
101 96
102 logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) 97 logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
103 bb.fetch2.check_network_access(d, command, ud.url) 98 bb.fetch2.check_network_access(d, command, ud.url)
104 runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) 99 runfetchcmd(command + ' --progress=dot --verbose', d, quiet, log=progresshandler, workdir=workdir)
105 100
106 def download(self, ud, d): 101 def download(self, ud, d):
107 """Fetch urls""" 102 """Fetch urls"""
108 103
109 fetchcmd = self.basecmd 104 fetchcmd = self.basecmd
110 105
111 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" 106 dldir = os.path.realpath(d.getVar("DL_DIR"))
107 localpath = os.path.join(dldir, ud.localfile) + ".tmp"
112 bb.utils.mkdirhier(os.path.dirname(localpath)) 108 bb.utils.mkdirhier(os.path.dirname(localpath))
113 fetchcmd += " -O %s" % shlex.quote(localpath) 109 fetchcmd += " --output-document=%s" % shlex.quote(localpath)
114 110
115 if ud.user and ud.pswd: 111 if ud.user and ud.pswd:
116 fetchcmd += " --auth-no-challenge" 112 fetchcmd += " --auth-no-challenge"
@@ -126,14 +122,18 @@ class Wget(FetchMethod):
126 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd) 122 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
127 123
128 uri = ud.url.split(";")[0] 124 uri = ud.url.split(";")[0]
129 if os.path.exists(ud.localpath): 125 fetchcmd += " --continue --directory-prefix=%s '%s'" % (dldir, uri)
130 # file exists, but we didnt complete it.. trying again..
131 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
132 else:
133 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
134
135 self._runwget(ud, d, fetchcmd, False) 126 self._runwget(ud, d, fetchcmd, False)
136 127
128 # Sanity check since wget can pretend it succeed when it didn't
129 # Also, this used to happen if sourceforge sent us to the mirror page
130 if not os.path.exists(localpath):
131 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri)
132
133 if os.path.getsize(localpath) == 0:
134 os.remove(localpath)
135 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
136
137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the 137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
138 # original file, which might be a race (imagine two recipes referencing the same 138 # original file, which might be a race (imagine two recipes referencing the same
139 # source, one with an incorrect checksum) 139 # source, one with an incorrect checksum)
@@ -143,15 +143,6 @@ class Wget(FetchMethod):
143 # Our lock prevents multiple writers but mirroring code may grab incomplete files 143 # Our lock prevents multiple writers but mirroring code may grab incomplete files
144 os.rename(localpath, localpath[:-4]) 144 os.rename(localpath, localpath[:-4])
145 145
146 # Sanity check since wget can pretend it succeed when it didn't
147 # Also, this used to happen if sourceforge sent us to the mirror page
148 if not os.path.exists(ud.localpath):
149 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
150
151 if os.path.getsize(ud.localpath) == 0:
152 os.remove(ud.localpath)
153 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
154
155 return True 146 return True
156 147
157 def checkstatus(self, fetch, ud, d, try_again=True): 148 def checkstatus(self, fetch, ud, d, try_again=True):
@@ -243,7 +234,12 @@ class Wget(FetchMethod):
243 fetch.connection_cache.remove_connection(h.host, h.port) 234 fetch.connection_cache.remove_connection(h.host, h.port)
244 raise urllib.error.URLError(err) 235 raise urllib.error.URLError(err)
245 else: 236 else:
246 r = h.getresponse() 237 try:
238 r = h.getresponse()
239 except TimeoutError as e:
240 if fetch.connection_cache:
241 fetch.connection_cache.remove_connection(h.host, h.port)
242 raise TimeoutError(e)
247 243
248 # Pick apart the HTTPResponse object to get the addinfourl 244 # Pick apart the HTTPResponse object to get the addinfourl
249 # object initialized properly. 245 # object initialized properly.
@@ -304,13 +300,45 @@ class Wget(FetchMethod):
304 300
305 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): 301 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
306 """ 302 """
307 urllib2.HTTPRedirectHandler resets the method to GET on redirect, 303 urllib2.HTTPRedirectHandler before 3.13 has two flaws:
308 when we want to follow redirects using the original method. 304
305 It resets the method to GET on redirect when we want to follow
306 redirects using the original method (typically HEAD). This was fixed
307 in 759e8e7.
308
309 It also doesn't handle 308 (Permanent Redirect). This was fixed in
310 c379bc5.
311
312 Until we depend on Python 3.13 onwards, copy the redirect_request
313 method to fix these issues.
309 """ 314 """
310 def redirect_request(self, req, fp, code, msg, headers, newurl): 315 def redirect_request(self, req, fp, code, msg, headers, newurl):
311 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) 316 m = req.get_method()
312 newreq.get_method = req.get_method 317 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
313 return newreq 318 or code in (301, 302, 303) and m == "POST")):
319 raise urllib.HTTPError(req.full_url, code, msg, headers, fp)
320
321 # Strictly (according to RFC 2616), 301 or 302 in response to
322 # a POST MUST NOT cause a redirection without confirmation
323 # from the user (of urllib.request, in this case). In practice,
324 # essentially all clients do redirect in this case, so we do
325 # the same.
326
327 # Be conciliant with URIs containing a space. This is mainly
328 # redundant with the more complete encoding done in http_error_302(),
329 # but it is kept for compatibility with other callers.
330 newurl = newurl.replace(' ', '%20')
331
332 CONTENT_HEADERS = ("content-length", "content-type")
333 newheaders = {k: v for k, v in req.headers.items()
334 if k.lower() not in CONTENT_HEADERS}
335 return urllib.request.Request(newurl,
336 method="HEAD" if m == "HEAD" else "GET",
337 headers=newheaders,
338 origin_req_host=req.origin_req_host,
339 unverifiable=True)
340
341 http_error_308 = urllib.request.HTTPRedirectHandler.http_error_302
314 342
315 # We need to update the environment here as both the proxy and HTTPS 343 # We need to update the environment here as both the proxy and HTTPS
316 # handlers need variables set. The proxy needs http_proxy and friends to 344 # handlers need variables set. The proxy needs http_proxy and friends to
@@ -343,14 +371,17 @@ class Wget(FetchMethod):
343 opener = urllib.request.build_opener(*handlers) 371 opener = urllib.request.build_opener(*handlers)
344 372
345 try: 373 try:
346 uri_base = ud.url.split(";")[0] 374 parts = urllib.parse.urlparse(ud.url.split(";")[0])
347 uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path) 375 if parts.query:
376 uri = "{}://{}{}?{}".format(parts.scheme, parts.netloc, parts.path, parts.query)
377 else:
378 uri = "{}://{}{}".format(parts.scheme, parts.netloc, parts.path)
348 r = urllib.request.Request(uri) 379 r = urllib.request.Request(uri)
349 r.get_method = lambda: "HEAD" 380 r.get_method = lambda: "HEAD"
350 # Some servers (FusionForge, as used on Alioth) require that the 381 # Some servers (FusionForge, as used on Alioth) require that the
351 # optional Accept header is set. 382 # optional Accept header is set.
352 r.add_header("Accept", "*/*") 383 r.add_header("Accept", "*/*")
353 r.add_header("User-Agent", self.user_agent) 384 r.add_header("User-Agent", "bitbake/{}".format(bb.__version__))
354 def add_basic_auth(login_str, request): 385 def add_basic_auth(login_str, request):
355 '''Adds Basic auth to http request, pass in login:password as string''' 386 '''Adds Basic auth to http request, pass in login:password as string'''
356 import base64 387 import base64
@@ -370,7 +401,7 @@ class Wget(FetchMethod):
370 except (FileNotFoundError, netrc.NetrcParseError): 401 except (FileNotFoundError, netrc.NetrcParseError):
371 pass 402 pass
372 403
373 with opener.open(r, timeout=30) as response: 404 with opener.open(r, timeout=100) as response:
374 pass 405 pass
375 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: 406 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
376 if try_again: 407 if try_again:
@@ -457,7 +488,7 @@ class Wget(FetchMethod):
457 f = tempfile.NamedTemporaryFile() 488 f = tempfile.NamedTemporaryFile()
458 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: 489 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
459 fetchcmd = self.basecmd 490 fetchcmd = self.basecmd
460 fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'" 491 fetchcmd += " --output-document=%s '%s'" % (f.name, uri)
461 try: 492 try:
462 self._runwget(ud, d, fetchcmd, True, workdir=workdir) 493 self._runwget(ud, d, fetchcmd, True, workdir=workdir)
463 fetchresult = f.read() 494 fetchresult = f.read()
@@ -617,13 +648,17 @@ class Wget(FetchMethod):
617 648
618 sanity check to ensure same name and type. 649 sanity check to ensure same name and type.
619 """ 650 """
620 package = ud.path.split("/")[-1] 651 if 'downloadfilename' in ud.parm:
652 package = ud.parm['downloadfilename']
653 else:
654 package = ud.path.split("/")[-1]
621 current_version = ['', d.getVar('PV'), ''] 655 current_version = ['', d.getVar('PV'), '']
622 656
623 """possible to have no version in pkg name, such as spectrum-fw""" 657 """possible to have no version in pkg name, such as spectrum-fw"""
624 if not re.search(r"\d+", package): 658 if not re.search(r"\d+", package):
625 current_version[1] = re.sub('_', '.', current_version[1]) 659 current_version[1] = re.sub('_', '.', current_version[1])
626 current_version[1] = re.sub('-', '.', current_version[1]) 660 current_version[1] = re.sub('-', '.', current_version[1])
661 bb.debug(3, "latest_versionstring: no version found in %s" % package)
627 return (current_version[1], '') 662 return (current_version[1], '')
628 663
629 package_regex = self._init_regexes(package, ud, d) 664 package_regex = self._init_regexes(package, ud, d)
diff --git a/bitbake/lib/bb/filter.py b/bitbake/lib/bb/filter.py
new file mode 100644
index 0000000000..0b5b5d92ca
--- /dev/null
+++ b/bitbake/lib/bb/filter.py
@@ -0,0 +1,142 @@
1#
2# Copyright (C) 2025 Garmin Ltd. or its subsidiaries
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import builtins
8
9# Purposely blank out __builtins__ which prevents users from
10# calling any normal builtin python functions
11FILTERS = {
12 "__builtins__": {},
13}
14
15CACHE = {}
16
17
18def apply_filters(val, expressions):
19 g = FILTERS.copy()
20
21 for e in expressions:
22 e = e.strip()
23 if not e:
24 continue
25
26 k = (val, e)
27 if k not in CACHE:
28 # Set val as a local so it can be cleared out while keeping the
29 # globals
30 l = {"val": val}
31
32 CACHE[k] = eval(e, g, l)
33
34 val = CACHE[k]
35
36 return val
37
38
39class Namespace(object):
40 """
41 Helper class to simulate a python namespace. The object properties can be
42 set as if it were a dictionary. Properties cannot be changed or deleted
43 through the object interface
44 """
45
46 def __getitem__(self, name):
47 return self.__dict__[name]
48
49 def __setitem__(self, name, value):
50 self.__dict__[name] = value
51
52 def __contains__(self, name):
53 return name in self.__dict__
54
55 def __setattr__(self, name, value):
56 raise AttributeError(f"Attribute {name!r} cannot be changed")
57
58 def __delattr__(self, name):
59 raise AttributeError(f"Attribute {name!r} cannot be deleted")
60
61
62def filter_proc(*, name=None):
63 """
64 Decorator to mark a function that can be called in `apply_filters`, either
65 directly in a filter expression, or indirectly. The `name` argument can be
66 used to specify an alternate name for the function if the actual name is
67 not desired. The `name` can be a fully qualified namespace if desired.
68
69 All functions must be "pure" in that they do not depend on global state and
70 have no global side effects (e.g. the output only depends on the input
71 arguments); the results of filter expressions are cached to optimize
72 repeated calls.
73 """
74
75 def inner(func):
76 global FILTERS
77 nonlocal name
78
79 if name is None:
80 name = func.__name__
81
82 ns = name.split(".")
83 o = FILTERS
84 for n in ns[:-1]:
85 if not n in o:
86 o[n] = Namespace()
87 o = o[n]
88
89 o[ns[-1]] = func
90
91 return func
92
93 return inner
94
95
96# A select set of builtins that are supported in filter expressions
97filter_proc()(all)
98filter_proc()(all)
99filter_proc()(any)
100filter_proc()(bin)
101filter_proc()(bool)
102filter_proc()(chr)
103filter_proc()(enumerate)
104filter_proc()(float)
105filter_proc()(format)
106filter_proc()(hex)
107filter_proc()(int)
108filter_proc()(len)
109filter_proc()(map)
110filter_proc()(max)
111filter_proc()(min)
112filter_proc()(oct)
113filter_proc()(ord)
114filter_proc()(pow)
115filter_proc()(str)
116filter_proc()(sum)
117
118
119@filter_proc()
120def suffix(val, suffix):
121 return " ".join(v + suffix for v in val.split())
122
123
124@filter_proc()
125def prefix(val, prefix):
126 return " ".join(prefix + v for v in val.split())
127
128
129@filter_proc()
130def sort(val):
131 return " ".join(sorted(val.split()))
132
133
134@filter_proc()
135def remove(val, remove, sep=None):
136 if isinstance(remove, str):
137 remove = remove.split(sep)
138 new = [i for i in val.split(sep) if not i in remove]
139
140 if not sep:
141 return " ".join(new)
142 return sep.join(new)
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py
index bca8ebfa09..597cb27846 100755
--- a/bitbake/lib/bb/main.py
+++ b/bitbake/lib/bb/main.py
@@ -208,8 +208,10 @@ def create_bitbake_parser():
208 "failed and anything depending on it cannot be built, as much as " 208 "failed and anything depending on it cannot be built, as much as "
209 "possible will be built before stopping.") 209 "possible will be built before stopping.")
210 210
211 exec_group.add_argument("-P", "--profile", action="store_true", 211 exec_group.add_argument("-P", "--profile", action="append",
212 help="Profile the command and save reports.") 212 default=[],
213 help="Profile the command and save reports. Specify 'main', 'idle' or 'parsing' "
214 "to indicate which bitbake code to profile.")
213 215
214 exec_group.add_argument("-S", "--dump-signatures", action="append", 216 exec_group.add_argument("-S", "--dump-signatures", action="append",
215 default=[], metavar="SIGNATURE_HANDLER", 217 default=[], metavar="SIGNATURE_HANDLER",
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 3e18596faa..4f616ff42e 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -89,10 +89,6 @@ class BBLogFormatter(logging.Formatter):
89 msg = logging.Formatter.format(self, record) 89 msg = logging.Formatter.format(self, record)
90 if hasattr(record, 'bb_exc_formatted'): 90 if hasattr(record, 'bb_exc_formatted'):
91 msg += '\n' + ''.join(record.bb_exc_formatted) 91 msg += '\n' + ''.join(record.bb_exc_formatted)
92 elif hasattr(record, 'bb_exc_info'):
93 etype, value, tb = record.bb_exc_info
94 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
95 msg += '\n' + ''.join(formatted)
96 return msg 92 return msg
97 93
98 def colorize(self, record): 94 def colorize(self, record):
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index a4358f1374..d428d8a4b4 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe):
49__mtime_cache = {} 49__mtime_cache = {}
50def cached_mtime(f): 50def cached_mtime(f):
51 if f not in __mtime_cache: 51 if f not in __mtime_cache:
52 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 52 res = os.stat(f)
53 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
53 return __mtime_cache[f] 54 return __mtime_cache[f]
54 55
55def cached_mtime_noerror(f): 56def cached_mtime_noerror(f):
56 if f not in __mtime_cache: 57 if f not in __mtime_cache:
57 try: 58 try:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 59 res = os.stat(f)
60 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
59 except OSError: 61 except OSError:
60 return 0 62 return 0
61 return __mtime_cache[f] 63 return __mtime_cache[f]
62 64
63def check_mtime(f, mtime): 65def check_mtime(f, mtime):
64 try: 66 try:
65 current_mtime = os.stat(f)[stat.ST_MTIME] 67 res = os.stat(f)
68 current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino)
66 __mtime_cache[f] = current_mtime 69 __mtime_cache[f] = current_mtime
67 except OSError: 70 except OSError:
68 current_mtime = 0 71 current_mtime = 0
@@ -70,7 +73,8 @@ def check_mtime(f, mtime):
70 73
71def update_mtime(f): 74def update_mtime(f):
72 try: 75 try:
73 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 76 res = os.stat(f)
77 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
74 except OSError: 78 except OSError:
75 if f in __mtime_cache: 79 if f in __mtime_cache:
76 del __mtime_cache[f] 80 del __mtime_cache[f]
@@ -172,4 +176,41 @@ def get_file_depends(d):
172 dep_files.append(os.path.abspath(fn)) 176 dep_files.append(os.path.abspath(fn))
173 return " ".join(dep_files) 177 return " ".join(dep_files)
174 178
179def vardeps(*varnames):
180 """
181 Function decorator that can be used to instruct the bitbake dependency
182 parsing to add a dependency on the specified variables names
183
184 Example:
185
186 @bb.parse.vardeps("FOO", "BAR")
187 def my_function():
188 ...
189
190 """
191 def inner(f):
192 if not hasattr(f, "bb_vardeps"):
193 f.bb_vardeps = set()
194 f.bb_vardeps |= set(varnames)
195 return f
196 return inner
197
198def vardepsexclude(*varnames):
199 """
200 Function decorator that can be used to instruct the bitbake dependency
201 parsing to ignore dependencies on the specified variable names in the code
202
203 Example:
204
205 @bb.parse.vardepsexclude("FOO", "BAR")
206 def my_function():
207 ...
208 """
209 def inner(f):
210 if not hasattr(f, "bb_vardepsexclude"):
211 f.bb_vardepsexclude = set()
212 f.bb_vardepsexclude |= set(varnames)
213 return f
214 return inner
215
175from bb.parse.parse_py import __version__, ConfHandler, BBHandler 216from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 7581d003fd..cfead466e1 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -43,6 +43,21 @@ class IncludeNode(AstNode):
43 else: 43 else:
44 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) 44 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
45 45
46class IncludeAllNode(AstNode):
47 def __init__(self, filename, lineno, what_file):
48 AstNode.__init__(self, filename, lineno)
49 self.what_file = what_file
50
51 def eval(self, data):
52 """
53 Include the file and evaluate the statements
54 """
55 s = data.expand(self.what_file)
56 logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s)
57
58 for path in data.getVar("BBPATH").split(":"):
59 bb.parse.ConfHandler.include(self.filename, os.path.join(path, s), self.lineno, data, False)
60
46class ExportNode(AstNode): 61class ExportNode(AstNode):
47 def __init__(self, filename, lineno, var): 62 def __init__(self, filename, lineno, var):
48 AstNode.__init__(self, filename, lineno) 63 AstNode.__init__(self, filename, lineno)
@@ -137,7 +152,10 @@ class DataNode(AstNode):
137 152
138 flag = None 153 flag = None
139 if 'flag' in groupd and groupd['flag'] is not None: 154 if 'flag' in groupd and groupd['flag'] is not None:
140 flag = groupd['flag'] 155 if groupd["lazyques"]:
156 flag = "_defaultval_flag_"+groupd['flag']
157 else:
158 flag = groupd['flag']
141 elif groupd["lazyques"]: 159 elif groupd["lazyques"]:
142 flag = "_defaultval" 160 flag = "_defaultval"
143 161
@@ -240,14 +258,16 @@ class ExportFuncsNode(AstNode):
240 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True) 258 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
241 259
242class AddTaskNode(AstNode): 260class AddTaskNode(AstNode):
243 def __init__(self, filename, lineno, func, before, after): 261 def __init__(self, filename, lineno, tasks, before, after):
244 AstNode.__init__(self, filename, lineno) 262 AstNode.__init__(self, filename, lineno)
245 self.func = func 263 self.tasks = tasks
246 self.before = before 264 self.before = before
247 self.after = after 265 self.after = after
248 266
249 def eval(self, data): 267 def eval(self, data):
250 bb.build.addtask(self.func, self.before, self.after, data) 268 tasks = self.tasks.split()
269 for task in tasks:
270 bb.build.addtask(task, self.before, self.after, data)
251 271
252class DelTaskNode(AstNode): 272class DelTaskNode(AstNode):
253 def __init__(self, filename, lineno, tasks): 273 def __init__(self, filename, lineno, tasks):
@@ -320,13 +340,84 @@ class InheritDeferredNode(AstNode):
320 self.inherit = (classes, filename, lineno) 340 self.inherit = (classes, filename, lineno)
321 341
322 def eval(self, data): 342 def eval(self, data):
323 inherits = data.getVar('__BBDEFINHERITS', False) or [] 343 bb.parse.BBHandler.inherit_defer(*self.inherit, data)
324 inherits.append(self.inherit) 344
325 data.setVar('__BBDEFINHERITS', inherits) 345class AddFragmentsNode(AstNode):
346 def __init__(self, filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable):
347 AstNode.__init__(self, filename, lineno)
348 self.fragments_path_prefix = fragments_path_prefix
349 self.fragments_variable = fragments_variable
350 self.flagged_variables_list_variable = flagged_variables_list_variable
351 self.builtin_fragments_variable = builtin_fragments_variable
352
353 def eval(self, data):
354 # No need to use mark_dependency since we would only match a fragment
355 # from a specific layer and there can only be a single layer with a
356 # given namespace.
357 def find_fragment(layers, layerid, full_fragment_name):
358 for layerpath in layers.split():
359 candidate_fragment_path = os.path.join(layerpath, full_fragment_name)
360 if os.path.exists(candidate_fragment_path) and bb.utils.get_file_layer(candidate_fragment_path, data) == layerid:
361 return candidate_fragment_path
362 return None
363
364 def check_and_set_builtin_fragment(fragment, data, builtin_fragments):
365 prefix, value = fragment.split('/', 1)
366 if prefix in builtin_fragments.keys():
367 # parsing=True since we want to emulate X=Y and allow X:override=Z to continue to exist
368 data.setVar(builtin_fragments[prefix], value, parsing=True)
369 return True
370 return False
371
372 fragments = data.getVar(self.fragments_variable)
373 layers = data.getVar('BBLAYERS')
374 flagged_variables = data.getVar(self.flagged_variables_list_variable).split()
375 builtin_fragments = {f[0]:f[1] for f in [f.split(':') for f in data.getVar(self.builtin_fragments_variable).split()] }
376
377 if not fragments:
378 return
379
380 # Check for multiple builtin fragments setting the same variable
381 for builtin_fragment_key in builtin_fragments.keys():
382 builtin_fragments_list = list(
383 filter(
384 lambda f: f.startswith(builtin_fragment_key + "/"),
385 fragments.split(),
386 )
387 )
388 if len(builtin_fragments_list) > 1:
389 bb.warn(
390 ("Multiple builtin fragments are enabled for %s via variable %s: %s. "
391 "This likely points to a mis-configuration in the metadata, as only "
392 "one of them should be set. The build will use the last value.")
393 % (
394 builtin_fragment_key,
395 self.fragments_variable,
396 " ".join(builtin_fragments_list),
397 )
398 )
399
400 for f in fragments.split():
401 if check_and_set_builtin_fragment(f, data, builtin_fragments):
402 continue
403 layerid, fragment_name = f.split('/', 1)
404 full_fragment_name = data.expand("{}/{}.conf".format(self.fragments_path_prefix, fragment_name))
405 fragment_path = find_fragment(layers, layerid, full_fragment_name)
406 if fragment_path:
407 bb.parse.ConfHandler.include(self.filename, fragment_path, self.lineno, data, "include fragment")
408 for flagged_var in flagged_variables:
409 val = data.getVar(flagged_var)
410 data.setVarFlag(flagged_var, f, val)
411 data.setVar(flagged_var, None)
412 else:
413 bb.error("Could not find fragment {} in enabled layers: {}".format(f, layers))
326 414
327def handleInclude(statements, filename, lineno, m, force): 415def handleInclude(statements, filename, lineno, m, force):
328 statements.append(IncludeNode(filename, lineno, m.group(1), force)) 416 statements.append(IncludeNode(filename, lineno, m.group(1), force))
329 417
418def handleIncludeAll(statements, filename, lineno, m):
419 statements.append(IncludeAllNode(filename, lineno, m.group(1)))
420
330def handleExport(statements, filename, lineno, m): 421def handleExport(statements, filename, lineno, m):
331 statements.append(ExportNode(filename, lineno, m.group(1))) 422 statements.append(ExportNode(filename, lineno, m.group(1)))
332 423
@@ -348,21 +439,11 @@ def handlePythonMethod(statements, filename, lineno, funcname, modulename, body)
348def handleExportFuncs(statements, filename, lineno, m, classname): 439def handleExportFuncs(statements, filename, lineno, m, classname):
349 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) 440 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
350 441
351def handleAddTask(statements, filename, lineno, m): 442def handleAddTask(statements, filename, lineno, tasks, before, after):
352 func = m.group("func") 443 statements.append(AddTaskNode(filename, lineno, tasks, before, after))
353 before = m.group("before")
354 after = m.group("after")
355 if func is None:
356 return
357
358 statements.append(AddTaskNode(filename, lineno, func, before, after))
359 444
360def handleDelTask(statements, filename, lineno, m): 445def handleDelTask(statements, filename, lineno, tasks):
361 func = m.group(1) 446 statements.append(DelTaskNode(filename, lineno, tasks))
362 if func is None:
363 return
364
365 statements.append(DelTaskNode(filename, lineno, func))
366 447
367def handleBBHandlers(statements, filename, lineno, m): 448def handleBBHandlers(statements, filename, lineno, m):
368 statements.append(BBHandlerNode(filename, lineno, m.group(1))) 449 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
@@ -378,12 +459,43 @@ def handleInheritDeferred(statements, filename, lineno, m):
378 classes = m.group(1) 459 classes = m.group(1)
379 statements.append(InheritDeferredNode(filename, lineno, classes)) 460 statements.append(InheritDeferredNode(filename, lineno, classes))
380 461
462def handleAddFragments(statements, filename, lineno, m):
463 fragments_path_prefix = m.group(1)
464 fragments_variable = m.group(2)
465 flagged_variables_list_variable = m.group(3)
466 builtin_fragments_variable = m.group(4)
467 statements.append(AddFragmentsNode(filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable))
468
381def runAnonFuncs(d): 469def runAnonFuncs(d):
382 code = [] 470 code = []
383 for funcname in d.getVar("__BBANONFUNCS", False) or []: 471 for funcname in d.getVar("__BBANONFUNCS", False) or []:
384 code.append("%s(d)" % funcname) 472 code.append("%s(d)" % funcname)
385 bb.utils.better_exec("\n".join(code), {"d": d}) 473 bb.utils.better_exec("\n".join(code), {"d": d})
386 474
475# Handle recipe level PREFERRED_PROVIDERs
476def handleVirtRecipeProviders(tasklist, d):
477 depends = (d.getVar("DEPENDS") or "").split()
478 virtprovs = (d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS") or "").split()
479 newdeps = []
480 for dep in depends:
481 if dep in virtprovs:
482 newdep = d.getVar("PREFERRED_PROVIDER_" + dep)
483 if not newdep:
484 bb.fatal("Error, recipe virtual provider PREFERRED_PROVIDER_%s not set" % dep)
485 newdeps.append(newdep)
486 else:
487 newdeps.append(dep)
488 d.setVar("DEPENDS", " ".join(newdeps))
489 for task in tasklist:
490 taskdeps = (d.getVarFlag(task, "depends") or "").split()
491 remapped = []
492 for entry in taskdeps:
493 r, t = entry.split(":")
494 if r in virtprovs:
495 r = d.getVar("PREFERRED_PROVIDER_" + r)
496 remapped.append("%s:%s" % (r, t))
497 d.setVarFlag(task, "depends", " ".join(remapped))
498
387def finalize(fn, d, variant = None): 499def finalize(fn, d, variant = None):
388 saved_handlers = bb.event.get_handlers().copy() 500 saved_handlers = bb.event.get_handlers().copy()
389 try: 501 try:
@@ -391,6 +503,17 @@ def finalize(fn, d, variant = None):
391 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: 503 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
392 raise bb.BBHandledException() 504 raise bb.BBHandledException()
393 505
506 inherits = [x[0] for x in (d.getVar('__BBDEFINHERITS', False) or [('',)])]
507 bb.event.fire(bb.event.RecipePreDeferredInherits(fn, inherits), d)
508
509 while True:
510 inherits = d.getVar('__BBDEFINHERITS', False) or []
511 if not inherits:
512 break
513 inherit, filename, lineno = inherits.pop(0)
514 d.setVar('__BBDEFINHERITS', inherits)
515 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
516
394 for var in d.getVar('__BBHANDLERS', False) or []: 517 for var in d.getVar('__BBHANDLERS', False) or []:
395 # try to add the handler 518 # try to add the handler
396 handlerfn = d.getVarFlag(var, "filename", False) 519 handlerfn = d.getVarFlag(var, "filename", False)
@@ -409,6 +532,7 @@ def finalize(fn, d, variant = None):
409 532
410 tasklist = d.getVar('__BBTASKS', False) or [] 533 tasklist = d.getVar('__BBTASKS', False) or []
411 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) 534 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
535 handleVirtRecipeProviders(tasklist, d)
412 bb.build.add_tasks(tasklist, d) 536 bb.build.add_tasks(tasklist, d)
413 537
414 bb.parse.siggen.finalise(fn, d, variant) 538 bb.parse.siggen.finalise(fn, d, variant)
@@ -444,14 +568,6 @@ def multi_finalize(fn, d):
444 logger.debug("Appending .bbappend file %s to %s", append, fn) 568 logger.debug("Appending .bbappend file %s to %s", append, fn)
445 bb.parse.BBHandler.handle(append, d, True) 569 bb.parse.BBHandler.handle(append, d, True)
446 570
447 while True:
448 inherits = d.getVar('__BBDEFINHERITS', False) or []
449 if not inherits:
450 break
451 inherit, filename, lineno = inherits.pop(0)
452 d.setVar('__BBDEFINHERITS', inherits)
453 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
454
455 onlyfinalise = d.getVar("__ONLYFINALISE", False) 571 onlyfinalise = d.getVar("__ONLYFINALISE", False)
456 572
457 safe_d = d 573 safe_d = d
@@ -487,7 +603,7 @@ def multi_finalize(fn, d):
487 d.setVar("BBEXTENDVARIANT", variantmap[name]) 603 d.setVar("BBEXTENDVARIANT", variantmap[name])
488 else: 604 else:
489 d.setVar("PN", "%s-%s" % (pn, name)) 605 d.setVar("PN", "%s-%s" % (pn, name))
490 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) 606 bb.parse.BBHandler.inherit_defer(extendedmap[name], fn, 0, d)
491 607
492 safe_d.setVar("BBCLASSEXTEND", extended) 608 safe_d.setVar("BBCLASSEXTEND", extended)
493 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) 609 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index c13e4b9755..008fec2308 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -23,8 +23,8 @@ __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>faker
23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" ) 24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
26__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 26__addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)")
27__deltask_regexp__ = re.compile(r"deltask\s+(.+)") 27__deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)")
28__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) 28__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
29__def_regexp__ = re.compile(r"def\s+(\w+).*:" ) 29__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
30__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) 30__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
@@ -42,12 +42,22 @@ def supports(fn, d):
42 """Return True if fn has a supported extension""" 42 """Return True if fn has a supported extension"""
43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] 43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
44 44
45def inherit_defer(expression, fn, lineno, d):
46 inherit = (expression, fn, lineno)
47 inherits = d.getVar('__BBDEFINHERITS', False) or []
48 inherits.append(inherit)
49 d.setVar('__BBDEFINHERITS', inherits)
50
45def inherit(files, fn, lineno, d, deferred=False): 51def inherit(files, fn, lineno, d, deferred=False):
46 __inherit_cache = d.getVar('__inherit_cache', False) or [] 52 __inherit_cache = d.getVar('__inherit_cache', False) or []
47 #if "${" in files and not deferred: 53 #if "${" in files and not deferred:
48 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno)) 54 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
49 files = d.expand(files).split() 55 files = d.expand(files).split()
50 for file in files: 56 for file in files:
57 defer = (d.getVar("BB_DEFER_BBCLASSES") or "").split()
58 if not deferred and file in defer:
59 inherit_defer(file, fn, lineno, d)
60 continue
51 classtype = d.getVar("__bbclasstype", False) 61 classtype = d.getVar("__bbclasstype", False)
52 origfile = file 62 origfile = file
53 for t in ["classes-" + classtype, "classes"]: 63 for t in ["classes-" + classtype, "classes"]:
@@ -239,29 +249,38 @@ def feeder(lineno, s, fn, root, statements, eof=False):
239 249
240 m = __addtask_regexp__.match(s) 250 m = __addtask_regexp__.match(s)
241 if m: 251 if m:
242 if len(m.group().split()) == 2: 252 after = ""
243 # Check and warn for "addtask task1 task2" 253 before = ""
244 m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s) 254
245 if m2 and m2.group('ignores'): 255 # This code splits on 'before' and 'after' instead of on whitespace so we can defer
246 logger.warning('addtask ignored: "%s"' % m2.group('ignores')) 256 # evaluation to as late as possible.
247 257 tasks = m.group(1).split(" before ")[0].split(" after ")[0]
248 # Check and warn for "addtask task1 before task2 before task3", the 258
249 # similar to "after" 259 for exp in m.group(1).split(" before "):
250 taskexpression = s.split() 260 exp2 = exp.split(" after ")
251 for word in ('before', 'after'): 261 if len(exp2) > 1:
252 if taskexpression.count(word) > 1: 262 after = after + " ".join(exp2[1:])
253 logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
254 263
255 # Check and warn for having task with exprssion as part of task name 264 for exp in m.group(1).split(" after "):
265 exp2 = exp.split(" before ")
266 if len(exp2) > 1:
267 before = before + " ".join(exp2[1:])
268
269 # Check and warn for having task with a keyword as part of task name
270 taskexpression = s.split()
256 for te in taskexpression: 271 for te in taskexpression:
257 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): 272 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ):
258 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) 273 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn)
259 ast.handleAddTask(statements, fn, lineno, m) 274
275 if tasks is not None:
276 ast.handleAddTask(statements, fn, lineno, tasks, before, after)
260 return 277 return
261 278
262 m = __deltask_regexp__.match(s) 279 m = __deltask_regexp__.match(s)
263 if m: 280 if m:
264 ast.handleDelTask(statements, fn, lineno, m) 281 task = m.group(1)
282 if task is not None:
283 ast.handleDelTask(statements, fn, lineno, task)
265 return 284 return
266 285
267 m = __addhandler_regexp__.match(s) 286 m = __addhandler_regexp__.match(s)
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 7826dee7d3..9ddbae123d 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,10 +20,10 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]*?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@/]*)\])?
25 25
26 \s* ( 26 (?P<whitespace>\s*) (
27 (?P<colon>:=) | 27 (?P<colon>:=) |
28 (?P<lazyques>\?\?=) | 28 (?P<lazyques>\?\?=) |
29 (?P<ques>\?=) | 29 (?P<ques>\?=) |
@@ -32,7 +32,7 @@ __config_regexp__ = re.compile( r"""
32 (?P<predot>=\.) | 32 (?P<predot>=\.) |
33 (?P<postdot>\.=) | 33 (?P<postdot>\.=) |
34 = 34 =
35 ) \s* 35 ) (?P<whitespace2>\s*)
36 36
37 (?!'[^']*'[^']*'$) 37 (?!'[^']*'[^']*'$)
38 (?!\"[^\"]*\"[^\"]*\"$) 38 (?!\"[^\"]*\"[^\"]*\"$)
@@ -43,10 +43,12 @@ __config_regexp__ = re.compile( r"""
43 """, re.X) 43 """, re.X)
44__include_regexp__ = re.compile( r"include\s+(.+)" ) 44__include_regexp__ = re.compile( r"include\s+(.+)" )
45__require_regexp__ = re.compile( r"require\s+(.+)" ) 45__require_regexp__ = re.compile( r"require\s+(.+)" )
46__includeall_regexp__ = re.compile( r"include_all\s+(.+)" )
46__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 47__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
47__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 48__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
48__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" ) 49__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
49__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" ) 50__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
51__addfragments_regexp__ = re.compile(r"addfragments\s+(.+)\s+(.+)\s+(.+)\s+(.+)" )
50 52
51def init(data): 53def init(data):
52 return 54 return
@@ -164,6 +166,10 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
164 m = __config_regexp__.match(s) 166 m = __config_regexp__.match(s)
165 if m: 167 if m:
166 groupd = m.groupdict() 168 groupd = m.groupdict()
169 if groupd['var'] == "":
170 raise ParseError("Empty variable name in assignment: '%s'" % s, fn, lineno);
171 if not groupd['whitespace'] or not groupd['whitespace2']:
172 logger.warning("%s:%s has a lack of whitespace around the assignment: '%s'" % (fn, lineno, s))
167 ast.handleData(statements, fn, lineno, groupd) 173 ast.handleData(statements, fn, lineno, groupd)
168 return 174 return
169 175
@@ -177,6 +183,11 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
177 ast.handleInclude(statements, fn, lineno, m, True) 183 ast.handleInclude(statements, fn, lineno, m, True)
178 return 184 return
179 185
186 m = __includeall_regexp__.match(s)
187 if m:
188 ast.handleIncludeAll(statements, fn, lineno, m)
189 return
190
180 m = __export_regexp__.match(s) 191 m = __export_regexp__.match(s)
181 if m: 192 if m:
182 ast.handleExport(statements, fn, lineno, m) 193 ast.handleExport(statements, fn, lineno, m)
@@ -197,6 +208,11 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
197 ast.handlePyLib(statements, fn, lineno, m) 208 ast.handlePyLib(statements, fn, lineno, m)
198 return 209 return
199 210
211 m = __addfragments_regexp__.match(s)
212 if m:
213 ast.handleAddFragments(statements, fn, lineno, m)
214 return
215
200 raise ParseError("unparsed line: '%s'" % s, fn, lineno); 216 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
201 217
202# Add us to the handlers list 218# Add us to the handlers list
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index bcca791edf..0000000000
--- a/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,271 +0,0 @@
1"""BitBake Persistent Data Store
2
3Used to store data in a central location such that other threads/tasks can
4access them at some future date. Acts as a convenience wrapper around sqlite,
5currently, providing a key/value store accessed by 'domain'.
6"""
7
8# Copyright (C) 2007 Richard Purdie
9# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13
14import collections
15import collections.abc
16import contextlib
17import functools
18import logging
19import os.path
20import sqlite3
21import sys
22from collections.abc import Mapping
23
24sqlversion = sqlite3.sqlite_version_info
25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
26 raise Exception("sqlite3 version 3.3.0 or later is required.")
27
28
29logger = logging.getLogger("BitBake.PersistData")
30
31@functools.total_ordering
32class SQLTable(collections.abc.MutableMapping):
33 class _Decorators(object):
34 @staticmethod
35 def retry(*, reconnect=True):
36 """
37 Decorator that restarts a function if a database locked sqlite
38 exception occurs. If reconnect is True, the database connection
39 will be closed and reopened each time a failure occurs
40 """
41 def retry_wrapper(f):
42 def wrap_func(self, *args, **kwargs):
43 # Reconnect if necessary
44 if self.connection is None and reconnect:
45 self.reconnect()
46
47 count = 0
48 while True:
49 try:
50 return f(self, *args, **kwargs)
51 except sqlite3.OperationalError as exc:
52 if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
53 count = count + 1
54 if reconnect:
55 self.reconnect()
56 continue
57 raise
58 return wrap_func
59 return retry_wrapper
60
61 @staticmethod
62 def transaction(f):
63 """
64 Decorator that starts a database transaction and creates a database
65 cursor for performing queries. If no exception is thrown, the
66 database results are committed. If an exception occurs, the database
67 is rolled back. In all cases, the cursor is closed after the
68 function ends.
69
70 Note that the cursor is passed as an extra argument to the function
71 after `self` and before any of the normal arguments
72 """
73 def wrap_func(self, *args, **kwargs):
74 # Context manager will COMMIT the database on success,
75 # or ROLLBACK on an exception
76 with self.connection:
77 # Automatically close the cursor when done
78 with contextlib.closing(self.connection.cursor()) as cursor:
79 return f(self, cursor, *args, **kwargs)
80 return wrap_func
81
82 """Object representing a table/domain in the database"""
83 def __init__(self, cachefile, table):
84 self.cachefile = cachefile
85 self.table = table
86
87 self.connection = None
88 self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
89
90 @_Decorators.retry(reconnect=False)
91 @_Decorators.transaction
92 def _setup_database(self, cursor):
93 cursor.execute("pragma synchronous = off;")
94 # Enable WAL and keep the autocheckpoint length small (the default is
95 # usually 1000). Persistent caches are usually read-mostly, so keeping
96 # this short will keep readers running quickly
97 cursor.execute("pragma journal_mode = WAL;")
98 cursor.execute("pragma wal_autocheckpoint = 100;")
99
100 def reconnect(self):
101 if self.connection is not None:
102 self.connection.close()
103 self.connection = sqlite3.connect(self.cachefile, timeout=5)
104 self.connection.text_factory = str
105 self._setup_database()
106
107 @_Decorators.retry()
108 @_Decorators.transaction
109 def _execute_single(self, cursor, *query):
110 """
111 Executes a single query and discards the results. This correctly closes
112 the database cursor when finished
113 """
114 cursor.execute(*query)
115
116 @_Decorators.retry()
117 def _row_iter(self, f, *query):
118 """
119 Helper function that returns a row iterator. Each time __next__ is
120 called on the iterator, the provided function is evaluated to determine
121 the return value
122 """
123 class CursorIter(object):
124 def __init__(self, cursor):
125 self.cursor = cursor
126
127 def __iter__(self):
128 return self
129
130 def __next__(self):
131 row = self.cursor.fetchone()
132 if row is None:
133 self.cursor.close()
134 raise StopIteration
135 return f(row)
136
137 def __enter__(self):
138 return self
139
140 def __exit__(self, typ, value, traceback):
141 self.cursor.close()
142 return False
143
144 cursor = self.connection.cursor()
145 try:
146 cursor.execute(*query)
147 return CursorIter(cursor)
148 except:
149 cursor.close()
150
151 def __enter__(self):
152 self.connection.__enter__()
153 return self
154
155 def __exit__(self, *excinfo):
156 self.connection.__exit__(*excinfo)
157
158 @_Decorators.retry()
159 @_Decorators.transaction
160 def __getitem__(self, cursor, key):
161 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
162 row = cursor.fetchone()
163 if row is not None:
164 return row[1]
165 raise KeyError(key)
166
167 @_Decorators.retry()
168 @_Decorators.transaction
169 def __delitem__(self, cursor, key):
170 if key not in self:
171 raise KeyError(key)
172 cursor.execute("DELETE from %s where key=?;" % self.table, [key])
173
174 @_Decorators.retry()
175 @_Decorators.transaction
176 def __setitem__(self, cursor, key, value):
177 if not isinstance(key, str):
178 raise TypeError('Only string keys are supported')
179 elif not isinstance(value, str):
180 raise TypeError('Only string values are supported')
181
182 # Ensure the entire transaction (including SELECT) executes under write lock
183 cursor.execute("BEGIN EXCLUSIVE")
184
185 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
186 row = cursor.fetchone()
187 if row is not None:
188 cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
189 else:
190 cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
191
192 @_Decorators.retry()
193 @_Decorators.transaction
194 def __contains__(self, cursor, key):
195 cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
196 return cursor.fetchone() is not None
197
198 @_Decorators.retry()
199 @_Decorators.transaction
200 def __len__(self, cursor):
201 cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
202 row = cursor.fetchone()
203 if row is not None:
204 return row[0]
205
206 def __iter__(self):
207 return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
208
209 def __lt__(self, other):
210 if not isinstance(other, Mapping):
211 raise NotImplementedError()
212
213 return len(self) < len(other)
214
215 def get_by_pattern(self, pattern):
216 return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
217 self.table, [pattern])
218
219 def values(self):
220 return list(self.itervalues())
221
222 def itervalues(self):
223 return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
224 self.table)
225
226 def items(self):
227 return list(self.iteritems())
228
229 def iteritems(self):
230 return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
231 self.table)
232
233 @_Decorators.retry()
234 @_Decorators.transaction
235 def clear(self, cursor):
236 cursor.execute("DELETE FROM %s;" % self.table)
237
238 def has_key(self, key):
239 return key in self
240
241def persist(domain, d):
242 """Convenience factory for SQLTable objects based upon metadata"""
243 import bb.utils
244 cachedir = (d.getVar("PERSISTENT_DIR") or
245 d.getVar("CACHE"))
246 if not cachedir:
247 logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
248 sys.exit(1)
249
250 bb.utils.mkdirhier(cachedir)
251 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
252
253 try:
254 return SQLTable(cachefile, domain)
255 except sqlite3.OperationalError:
256 # Sqlite fails to open database when its path is too long.
257 # After testing, 504 is the biggest path length that can be opened by
258 # sqlite.
259 # Note: This code is called before sanity.bbclass and its path length
260 # check
261 max_len = 504
262 if len(cachefile) > max_len:
263 logger.critical("The path of the cache file is too long "
264 "({0} chars > {1}) to be opened by sqlite! "
265 "Your cache file is \"{2}\"".format(
266 len(cachefile),
267 max_len,
268 cachefile))
269 sys.exit(1)
270 else:
271 raise
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index bc7e18175d..63d4edd892 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -10,10 +10,12 @@ Handles preparation and execution of a queue of tasks
10# 10#
11 11
12import copy 12import copy
13import enum
13import os 14import os
14import sys 15import sys
15import stat 16import stat
16import errno 17import errno
18import itertools
17import logging 19import logging
18import re 20import re
19import bb 21import bb
@@ -124,14 +126,16 @@ class RunQueueStats:
124 def updateActiveSetscene(self, active): 126 def updateActiveSetscene(self, active):
125 self.setscene_active = active 127 self.setscene_active = active
126 128
127# These values indicate the next step due to be run in the 129
128# runQueue state machine 130# Indicates the next step due to run in the runQueue state machine
129runQueuePrepare = 2 131class RunQueueState(enum.Enum):
130runQueueSceneInit = 3 132 PREPARE = 0
131runQueueRunning = 6 133 SCENE_INIT = 1
132runQueueFailed = 7 134 DUMP_SIGS = 2
133runQueueCleanUp = 8 135 RUNNING = 3
134runQueueComplete = 9 136 FAILED = 4
137 CLEAN_UP = 5
138 COMPLETE = 6
135 139
136class RunQueueScheduler(object): 140class RunQueueScheduler(object):
137 """ 141 """
@@ -475,7 +479,6 @@ class RunQueueData:
475 self.runtaskentries = {} 479 self.runtaskentries = {}
476 480
477 def runq_depends_names(self, ids): 481 def runq_depends_names(self, ids):
478 import re
479 ret = [] 482 ret = []
480 for id in ids: 483 for id in ids:
481 nam = os.path.basename(id) 484 nam = os.path.basename(id)
@@ -677,7 +680,7 @@ class RunQueueData:
677 680
678 self.init_progress_reporter.start() 681 self.init_progress_reporter.start()
679 self.init_progress_reporter.next_stage() 682 self.init_progress_reporter.next_stage()
680 bb.event.check_for_interrupts(self.cooker.data) 683 bb.event.check_for_interrupts()
681 684
682 # Step A - Work out a list of tasks to run 685 # Step A - Work out a list of tasks to run
683 # 686 #
@@ -728,6 +731,8 @@ class RunQueueData:
728 if mc == frommc: 731 if mc == frommc:
729 fn = taskData[mcdep].build_targets[pn][0] 732 fn = taskData[mcdep].build_targets[pn][0]
730 newdep = '%s:%s' % (fn,deptask) 733 newdep = '%s:%s' % (fn,deptask)
734 if newdep not in taskData[mcdep].taskentries:
735 bb.fatal("Task mcdepends on non-existent task %s" % (newdep))
731 taskData[mc].taskentries[tid].tdepends.append(newdep) 736 taskData[mc].taskentries[tid].tdepends.append(newdep)
732 737
733 for mc in taskData: 738 for mc in taskData:
@@ -826,7 +831,7 @@ class RunQueueData:
826 #self.dump_data() 831 #self.dump_data()
827 832
828 self.init_progress_reporter.next_stage() 833 self.init_progress_reporter.next_stage()
829 bb.event.check_for_interrupts(self.cooker.data) 834 bb.event.check_for_interrupts()
830 835
831 # Resolve recursive 'recrdeptask' dependencies (Part B) 836 # Resolve recursive 'recrdeptask' dependencies (Part B)
832 # 837 #
@@ -923,7 +928,7 @@ class RunQueueData:
923 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref) 928 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
924 929
925 self.init_progress_reporter.next_stage() 930 self.init_progress_reporter.next_stage()
926 bb.event.check_for_interrupts(self.cooker.data) 931 bb.event.check_for_interrupts()
927 932
928 #self.dump_data() 933 #self.dump_data()
929 934
@@ -1005,7 +1010,7 @@ class RunQueueData:
1005 mark_active(tid, 1) 1010 mark_active(tid, 1)
1006 1011
1007 self.init_progress_reporter.next_stage() 1012 self.init_progress_reporter.next_stage()
1008 bb.event.check_for_interrupts(self.cooker.data) 1013 bb.event.check_for_interrupts()
1009 1014
1010 # Step C - Prune all inactive tasks 1015 # Step C - Prune all inactive tasks
1011 # 1016 #
@@ -1052,7 +1057,7 @@ class RunQueueData:
1052 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets))) 1057 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
1053 1058
1054 self.init_progress_reporter.next_stage() 1059 self.init_progress_reporter.next_stage()
1055 bb.event.check_for_interrupts(self.cooker.data) 1060 bb.event.check_for_interrupts()
1056 1061
1057 # Handle runonly 1062 # Handle runonly
1058 if self.cooker.configuration.runonly: 1063 if self.cooker.configuration.runonly:
@@ -1093,7 +1098,7 @@ class RunQueueData:
1093 logger.verbose("Assign Weightings") 1098 logger.verbose("Assign Weightings")
1094 1099
1095 self.init_progress_reporter.next_stage() 1100 self.init_progress_reporter.next_stage()
1096 bb.event.check_for_interrupts(self.cooker.data) 1101 bb.event.check_for_interrupts()
1097 1102
1098 # Generate a list of reverse dependencies to ease future calculations 1103 # Generate a list of reverse dependencies to ease future calculations
1099 for tid in self.runtaskentries: 1104 for tid in self.runtaskentries:
@@ -1101,7 +1106,7 @@ class RunQueueData:
1101 self.runtaskentries[dep].revdeps.add(tid) 1106 self.runtaskentries[dep].revdeps.add(tid)
1102 1107
1103 self.init_progress_reporter.next_stage() 1108 self.init_progress_reporter.next_stage()
1104 bb.event.check_for_interrupts(self.cooker.data) 1109 bb.event.check_for_interrupts()
1105 1110
1106 # Identify tasks at the end of dependency chains 1111 # Identify tasks at the end of dependency chains
1107 # Error on circular dependency loops (length two) 1112 # Error on circular dependency loops (length two)
@@ -1118,14 +1123,14 @@ class RunQueueData:
1118 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints)) 1123 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
1119 1124
1120 self.init_progress_reporter.next_stage() 1125 self.init_progress_reporter.next_stage()
1121 bb.event.check_for_interrupts(self.cooker.data) 1126 bb.event.check_for_interrupts()
1122 1127
1123 # Calculate task weights 1128 # Calculate task weights
1124 # Check of higher length circular dependencies 1129 # Check of higher length circular dependencies
1125 self.runq_weight = self.calculate_task_weights(endpoints) 1130 self.runq_weight = self.calculate_task_weights(endpoints)
1126 1131
1127 self.init_progress_reporter.next_stage() 1132 self.init_progress_reporter.next_stage()
1128 bb.event.check_for_interrupts(self.cooker.data) 1133 bb.event.check_for_interrupts()
1129 1134
1130 # Sanity Check - Check for multiple tasks building the same provider 1135 # Sanity Check - Check for multiple tasks building the same provider
1131 for mc in self.dataCaches: 1136 for mc in self.dataCaches:
@@ -1226,7 +1231,7 @@ class RunQueueData:
1226 1231
1227 self.init_progress_reporter.next_stage() 1232 self.init_progress_reporter.next_stage()
1228 self.init_progress_reporter.next_stage() 1233 self.init_progress_reporter.next_stage()
1229 bb.event.check_for_interrupts(self.cooker.data) 1234 bb.event.check_for_interrupts()
1230 1235
1231 # Iterate over the task list looking for tasks with a 'setscene' function 1236 # Iterate over the task list looking for tasks with a 'setscene' function
1232 self.runq_setscene_tids = set() 1237 self.runq_setscene_tids = set()
@@ -1239,7 +1244,7 @@ class RunQueueData:
1239 self.runq_setscene_tids.add(tid) 1244 self.runq_setscene_tids.add(tid)
1240 1245
1241 self.init_progress_reporter.next_stage() 1246 self.init_progress_reporter.next_stage()
1242 bb.event.check_for_interrupts(self.cooker.data) 1247 bb.event.check_for_interrupts()
1243 1248
1244 # Invalidate task if force mode active 1249 # Invalidate task if force mode active
1245 if self.cooker.configuration.force: 1250 if self.cooker.configuration.force:
@@ -1256,7 +1261,7 @@ class RunQueueData:
1256 invalidate_task(fn + ":" + st, True) 1261 invalidate_task(fn + ":" + st, True)
1257 1262
1258 self.init_progress_reporter.next_stage() 1263 self.init_progress_reporter.next_stage()
1259 bb.event.check_for_interrupts(self.cooker.data) 1264 bb.event.check_for_interrupts()
1260 1265
1261 # Create and print to the logs a virtual/xxxx -> PN (fn) table 1266 # Create and print to the logs a virtual/xxxx -> PN (fn) table
1262 for mc in taskData: 1267 for mc in taskData:
@@ -1269,31 +1274,45 @@ class RunQueueData:
1269 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc]) 1274 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
1270 1275
1271 self.init_progress_reporter.next_stage() 1276 self.init_progress_reporter.next_stage()
1272 bb.event.check_for_interrupts(self.cooker.data) 1277 bb.event.check_for_interrupts()
1273 1278
1274 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) 1279 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
1275 1280
1281 starttime = time.time()
1282 lasttime = starttime
1283
1276 # Iterate over the task list and call into the siggen code 1284 # Iterate over the task list and call into the siggen code
1277 dealtwith = set() 1285 dealtwith = set()
1278 todeal = set(self.runtaskentries) 1286 todeal = set(self.runtaskentries)
1279 while todeal: 1287 while todeal:
1288 ready = set()
1280 for tid in todeal.copy(): 1289 for tid in todeal.copy():
1281 if not (self.runtaskentries[tid].depends - dealtwith): 1290 if not (self.runtaskentries[tid].depends - dealtwith):
1282 dealtwith.add(tid) 1291 self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1283 todeal.remove(tid) 1292 # get_taskhash for a given tid *must* be called before get_unihash* below
1284 self.prepare_task_hash(tid) 1293 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1285 bb.event.check_for_interrupts(self.cooker.data) 1294 ready.add(tid)
1295 unihashes = bb.parse.siggen.get_unihashes(ready)
1296 for tid in ready:
1297 dealtwith.add(tid)
1298 todeal.remove(tid)
1299 self.runtaskentries[tid].unihash = unihashes[tid]
1300
1301 bb.event.check_for_interrupts()
1302
1303 if time.time() > (lasttime + 30):
1304 lasttime = time.time()
1305 hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime))
1306
1307 endtime = time.time()
1308 if (endtime-starttime > 60):
1309 hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime))
1286 1310
1287 bb.parse.siggen.writeout_file_checksum_cache() 1311 bb.parse.siggen.writeout_file_checksum_cache()
1288 1312
1289 #self.dump_data() 1313 #self.dump_data()
1290 return len(self.runtaskentries) 1314 return len(self.runtaskentries)
1291 1315
1292 def prepare_task_hash(self, tid):
1293 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1294 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1295 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1296
1297 def dump_data(self): 1316 def dump_data(self):
1298 """ 1317 """
1299 Dump some debug information on the internal data structures 1318 Dump some debug information on the internal data structures
@@ -1320,13 +1339,13 @@ class RunQueue:
1320 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None 1339 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
1321 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None 1340 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
1322 1341
1323 self.state = runQueuePrepare 1342 self.state = RunQueueState.PREPARE
1324 1343
1325 # For disk space monitor 1344 # For disk space monitor
1326 # Invoked at regular time intervals via the bitbake heartbeat event 1345 # Invoked at regular time intervals via the bitbake heartbeat event
1327 # while the build is running. We generate a unique name for the handler 1346 # while the build is running. We generate a unique name for the handler
1328 # here, just in case that there ever is more than one RunQueue instance, 1347 # here, just in case that there ever is more than one RunQueue instance,
1329 # start the handler when reaching runQueueSceneInit, and stop it when 1348 # start the handler when reaching RunQueueState.SCENE_INIT, and stop it when
1330 # done with the build. 1349 # done with the build.
1331 self.dm = monitordisk.diskMonitor(cfgData) 1350 self.dm = monitordisk.diskMonitor(cfgData)
1332 self.dm_event_handler_name = '_bb_diskmonitor_' + str(id(self)) 1351 self.dm_event_handler_name = '_bb_diskmonitor_' + str(id(self))
@@ -1538,9 +1557,9 @@ class RunQueue:
1538 """ 1557 """
1539 1558
1540 retval = True 1559 retval = True
1541 bb.event.check_for_interrupts(self.cooker.data) 1560 bb.event.check_for_interrupts()
1542 1561
1543 if self.state is runQueuePrepare: 1562 if self.state == RunQueueState.PREPARE:
1544 # NOTE: if you add, remove or significantly refactor the stages of this 1563 # NOTE: if you add, remove or significantly refactor the stages of this
1545 # process then you should recalculate the weightings here. This is quite 1564 # process then you should recalculate the weightings here. This is quite
1546 # easy to do - just change the next line temporarily to pass debug=True as 1565 # easy to do - just change the next line temporarily to pass debug=True as
@@ -1551,12 +1570,12 @@ class RunQueue:
1551 "Initialising tasks", 1570 "Initialising tasks",
1552 [43, 967, 4, 3, 1, 5, 3, 7, 13, 1, 2, 1, 1, 246, 35, 1, 38, 1, 35, 2, 338, 204, 142, 3, 3, 37, 244]) 1571 [43, 967, 4, 3, 1, 5, 3, 7, 13, 1, 2, 1, 1, 246, 35, 1, 38, 1, 35, 2, 338, 204, 142, 3, 3, 37, 244])
1553 if self.rqdata.prepare() == 0: 1572 if self.rqdata.prepare() == 0:
1554 self.state = runQueueComplete 1573 self.state = RunQueueState.COMPLETE
1555 else: 1574 else:
1556 self.state = runQueueSceneInit 1575 self.state = RunQueueState.SCENE_INIT
1557 bb.parse.siggen.save_unitaskhashes() 1576 bb.parse.siggen.save_unitaskhashes()
1558 1577
1559 if self.state is runQueueSceneInit: 1578 if self.state == RunQueueState.SCENE_INIT:
1560 self.rqdata.init_progress_reporter.next_stage() 1579 self.rqdata.init_progress_reporter.next_stage()
1561 1580
1562 # we are ready to run, emit dependency info to any UI or class which 1581 # we are ready to run, emit dependency info to any UI or class which
@@ -1567,24 +1586,29 @@ class RunQueue:
1567 1586
1568 if not self.dm_event_handler_registered: 1587 if not self.dm_event_handler_registered:
1569 res = bb.event.register(self.dm_event_handler_name, 1588 res = bb.event.register(self.dm_event_handler_name,
1570 lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False, 1589 lambda x, y: self.dm.check(self) if self.state in [RunQueueState.RUNNING, RunQueueState.CLEAN_UP] else False,
1571 ('bb.event.HeartbeatEvent',), data=self.cfgData) 1590 ('bb.event.HeartbeatEvent',), data=self.cfgData)
1572 self.dm_event_handler_registered = True 1591 self.dm_event_handler_registered = True
1573 1592
1574 self.rqdata.init_progress_reporter.next_stage() 1593 self.rqdata.init_progress_reporter.next_stage()
1575 self.rqexe = RunQueueExecute(self) 1594 self.rqexe = RunQueueExecute(self)
1576 1595
1577 dump = self.cooker.configuration.dump_signatures 1596 dumpsigs = self.cooker.configuration.dump_signatures
1578 if dump: 1597 if dumpsigs:
1579 self.rqdata.init_progress_reporter.finish() 1598 self.rqdata.init_progress_reporter.finish()
1580 if 'printdiff' in dump: 1599 if 'printdiff' in dumpsigs:
1581 invalidtasks = self.print_diffscenetasks() 1600 self.invalidtasks_dump = self.print_diffscenetasks()
1582 self.dump_signatures(dump) 1601 self.state = RunQueueState.DUMP_SIGS
1583 if 'printdiff' in dump: 1602
1584 self.write_diffscenetasks(invalidtasks) 1603 if self.state == RunQueueState.DUMP_SIGS:
1585 self.state = runQueueComplete 1604 dumpsigs = self.cooker.configuration.dump_signatures
1586 1605 retval = self.dump_signatures(dumpsigs)
1587 if self.state is runQueueSceneInit: 1606 if retval is False:
1607 if 'printdiff' in dumpsigs:
1608 self.write_diffscenetasks(self.invalidtasks_dump)
1609 self.state = RunQueueState.COMPLETE
1610
1611 if self.state == RunQueueState.SCENE_INIT:
1588 self.start_worker(self.rqexe) 1612 self.start_worker(self.rqexe)
1589 self.rqdata.init_progress_reporter.finish() 1613 self.rqdata.init_progress_reporter.finish()
1590 1614
@@ -1597,15 +1621,15 @@ class RunQueue:
1597 self.rqexe.tasks_notcovered.add(tid) 1621 self.rqexe.tasks_notcovered.add(tid)
1598 self.rqexe.sqdone = True 1622 self.rqexe.sqdone = True
1599 logger.info('Executing Tasks') 1623 logger.info('Executing Tasks')
1600 self.state = runQueueRunning 1624 self.state = RunQueueState.RUNNING
1601 1625
1602 if self.state is runQueueRunning: 1626 if self.state == RunQueueState.RUNNING:
1603 retval = self.rqexe.execute() 1627 retval = self.rqexe.execute()
1604 1628
1605 if self.state is runQueueCleanUp: 1629 if self.state == RunQueueState.CLEAN_UP:
1606 retval = self.rqexe.finish() 1630 retval = self.rqexe.finish()
1607 1631
1608 build_done = self.state is runQueueComplete or self.state is runQueueFailed 1632 build_done = self.state in [RunQueueState.COMPLETE, RunQueueState.FAILED]
1609 1633
1610 if build_done and self.dm_event_handler_registered: 1634 if build_done and self.dm_event_handler_registered:
1611 bb.event.remove(self.dm_event_handler_name, None, data=self.cfgData) 1635 bb.event.remove(self.dm_event_handler_name, None, data=self.cfgData)
@@ -1621,10 +1645,10 @@ class RunQueue:
1621 # Let's avoid the word "failed" if nothing actually did 1645 # Let's avoid the word "failed" if nothing actually did
1622 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped) 1646 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
1623 1647
1624 if self.state is runQueueFailed: 1648 if self.state == RunQueueState.FAILED:
1625 raise bb.runqueue.TaskFailure(self.rqexe.failed_tids) 1649 raise bb.runqueue.TaskFailure(self.rqexe.failed_tids)
1626 1650
1627 if self.state is runQueueComplete: 1651 if self.state == RunQueueState.COMPLETE:
1628 # All done 1652 # All done
1629 return False 1653 return False
1630 1654
@@ -1644,7 +1668,7 @@ class RunQueue:
1644 self.teardown_workers() 1668 self.teardown_workers()
1645 except: 1669 except:
1646 pass 1670 pass
1647 self.state = runQueueComplete 1671 self.state = RunQueueState.COMPLETE
1648 raise 1672 raise
1649 except Exception as err: 1673 except Exception as err:
1650 logger.exception("An uncaught exception occurred in runqueue") 1674 logger.exception("An uncaught exception occurred in runqueue")
@@ -1652,12 +1676,12 @@ class RunQueue:
1652 self.teardown_workers() 1676 self.teardown_workers()
1653 except: 1677 except:
1654 pass 1678 pass
1655 self.state = runQueueComplete 1679 self.state = RunQueueState.COMPLETE
1656 raise 1680 raise
1657 1681
1658 def finish_runqueue(self, now = False): 1682 def finish_runqueue(self, now = False):
1659 if not self.rqexe: 1683 if not self.rqexe:
1660 self.state = runQueueComplete 1684 self.state = RunQueueState.COMPLETE
1661 return 1685 return
1662 1686
1663 if now: 1687 if now:
@@ -1672,33 +1696,42 @@ class RunQueue:
1672 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True) 1696 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
1673 1697
1674 def dump_signatures(self, options): 1698 def dump_signatures(self, options):
1675 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset: 1699 if not hasattr(self, "dumpsigs_launched"):
1676 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled") 1700 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
1677 1701 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
1678 bb.note("Writing task signature files") 1702
1679 1703 bb.note("Writing task signature files")
1680 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) 1704
1681 def chunkify(l, n): 1705 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
1682 return [l[i::n] for i in range(n)] 1706 def chunkify(l, n):
1683 tids = chunkify(list(self.rqdata.runtaskentries), max_process) 1707 return [l[i::n] for i in range(n)]
1684 # We cannot use the real multiprocessing.Pool easily due to some local data 1708 dumpsigs_tids = chunkify(list(self.rqdata.runtaskentries), max_process)
1685 # that can't be pickled. This is a cheap multi-process solution. 1709
1686 launched = [] 1710 # We cannot use the real multiprocessing.Pool easily due to some local data
1687 while tids: 1711 # that can't be pickled. This is a cheap multi-process solution.
1688 if len(launched) < max_process: 1712 self.dumpsigs_launched = []
1689 p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), )) 1713
1714 for tids in dumpsigs_tids:
1715 p = Process(target=self._rq_dump_sigtid, args=(tids, ))
1690 p.start() 1716 p.start()
1691 launched.append(p) 1717 self.dumpsigs_launched.append(p)
1692 for q in launched: 1718
1693 # The finished processes are joined when calling is_alive() 1719 return 1.0
1694 if not q.is_alive(): 1720
1695 launched.remove(q) 1721 for q in self.dumpsigs_launched:
1696 for p in launched: 1722 # The finished processes are joined when calling is_alive()
1723 if not q.is_alive():
1724 self.dumpsigs_launched.remove(q)
1725
1726 if self.dumpsigs_launched:
1727 return 1.0
1728
1729 for p in self.dumpsigs_launched:
1697 p.join() 1730 p.join()
1698 1731
1699 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) 1732 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options)
1700 1733
1701 return 1734 return False
1702 1735
1703 def print_diffscenetasks(self): 1736 def print_diffscenetasks(self):
1704 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid): 1737 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
@@ -1971,14 +2004,14 @@ class RunQueueExecute:
1971 pass 2004 pass
1972 2005
1973 if self.failed_tids: 2006 if self.failed_tids:
1974 self.rq.state = runQueueFailed 2007 self.rq.state = RunQueueState.FAILED
1975 return 2008 return
1976 2009
1977 self.rq.state = runQueueComplete 2010 self.rq.state = RunQueueState.COMPLETE
1978 return 2011 return
1979 2012
1980 def finish(self): 2013 def finish(self):
1981 self.rq.state = runQueueCleanUp 2014 self.rq.state = RunQueueState.CLEAN_UP
1982 2015
1983 active = self.stats.active + len(self.sq_live) 2016 active = self.stats.active + len(self.sq_live)
1984 if active > 0: 2017 if active > 0:
@@ -1987,10 +2020,10 @@ class RunQueueExecute:
1987 return self.rq.active_fds() 2020 return self.rq.active_fds()
1988 2021
1989 if self.failed_tids: 2022 if self.failed_tids:
1990 self.rq.state = runQueueFailed 2023 self.rq.state = RunQueueState.FAILED
1991 return True 2024 return True
1992 2025
1993 self.rq.state = runQueueComplete 2026 self.rq.state = RunQueueState.COMPLETE
1994 return True 2027 return True
1995 2028
1996 # Used by setscene only 2029 # Used by setscene only
@@ -2109,7 +2142,7 @@ class RunQueueExecute:
2109 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData) 2142 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData)
2110 2143
2111 if self.rqdata.taskData[''].halt: 2144 if self.rqdata.taskData[''].halt:
2112 self.rq.state = runQueueCleanUp 2145 self.rq.state = RunQueueState.CLEAN_UP
2113 2146
2114 def task_skip(self, task, reason): 2147 def task_skip(self, task, reason):
2115 self.runq_running.add(task) 2148 self.runq_running.add(task)
@@ -2175,12 +2208,20 @@ class RunQueueExecute:
2175 if not hasattr(self, "sorted_setscene_tids"): 2208 if not hasattr(self, "sorted_setscene_tids"):
2176 # Don't want to sort this set every execution 2209 # Don't want to sort this set every execution
2177 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) 2210 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids)
2211 # Resume looping where we left off when we returned to feed the mainloop
2212 self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids)
2178 2213
2179 task = None 2214 task = None
2180 if not self.sqdone and self.can_start_task(): 2215 if not self.sqdone and self.can_start_task():
2181 # Find the next setscene to run 2216 loopcount = 0
2182 for nexttask in self.sorted_setscene_tids: 2217 # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute
2218 while loopcount < len(self.rqdata.runq_setscene_tids):
2219 loopcount += 1
2220 nexttask = next(self.setscene_tids_generator)
2183 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred: 2221 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
2222 if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete:
2223 # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds
2224 continue
2184 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \ 2225 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
2185 nexttask not in self.sq_needed_harddeps and \ 2226 nexttask not in self.sq_needed_harddeps and \
2186 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \ 2227 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
@@ -2210,8 +2251,7 @@ class RunQueueExecute:
2210 if t in self.runq_running and t not in self.runq_complete: 2251 if t in self.runq_running and t not in self.runq_complete:
2211 continue 2252 continue
2212 if nexttask in self.sq_deferred: 2253 if nexttask in self.sq_deferred:
2213 if self.sq_deferred[nexttask] not in self.runq_complete: 2254 # Deferred tasks that were still deferred were skipped above so we now need to process
2214 continue
2215 logger.debug("Task %s no longer deferred" % nexttask) 2255 logger.debug("Task %s no longer deferred" % nexttask)
2216 del self.sq_deferred[nexttask] 2256 del self.sq_deferred[nexttask]
2217 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) 2257 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
@@ -2296,17 +2336,17 @@ class RunQueueExecute:
2296 2336
2297 err = self.summarise_scenequeue_errors() 2337 err = self.summarise_scenequeue_errors()
2298 if err: 2338 if err:
2299 self.rq.state = runQueueFailed 2339 self.rq.state = RunQueueState.FAILED
2300 return True 2340 return True
2301 2341
2302 if self.cooker.configuration.setsceneonly: 2342 if self.cooker.configuration.setsceneonly:
2303 self.rq.state = runQueueComplete 2343 self.rq.state = RunQueueState.COMPLETE
2304 return True 2344 return True
2305 self.sqdone = True 2345 self.sqdone = True
2306 2346
2307 if self.stats.total == 0: 2347 if self.stats.total == 0:
2308 # nothing to do 2348 # nothing to do
2309 self.rq.state = runQueueComplete 2349 self.rq.state = RunQueueState.COMPLETE
2310 return True 2350 return True
2311 2351
2312 if self.cooker.configuration.setsceneonly: 2352 if self.cooker.configuration.setsceneonly:
@@ -2373,7 +2413,7 @@ class RunQueueExecute:
2373 self.rq.start_fakeworker(self, mc) 2413 self.rq.start_fakeworker(self, mc)
2374 except OSError as exc: 2414 except OSError as exc:
2375 logger.critical("Failed to spawn fakeroot worker to run %s: %s" % (task, str(exc))) 2415 logger.critical("Failed to spawn fakeroot worker to run %s: %s" % (task, str(exc)))
2376 self.rq.state = runQueueFailed 2416 self.rq.state = RunQueueState.FAILED
2377 self.stats.taskFailed() 2417 self.stats.taskFailed()
2378 return True 2418 return True
2379 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask") 2419 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
@@ -2401,7 +2441,7 @@ class RunQueueExecute:
2401 return True 2441 return True
2402 2442
2403 if self.failed_tids: 2443 if self.failed_tids:
2404 self.rq.state = runQueueFailed 2444 self.rq.state = RunQueueState.FAILED
2405 return True 2445 return True
2406 2446
2407 # Sanity Checks 2447 # Sanity Checks
@@ -2418,9 +2458,9 @@ class RunQueueExecute:
2418 err = True 2458 err = True
2419 2459
2420 if err: 2460 if err:
2421 self.rq.state = runQueueFailed 2461 self.rq.state = RunQueueState.FAILED
2422 else: 2462 else:
2423 self.rq.state = runQueueComplete 2463 self.rq.state = RunQueueState.COMPLETE
2424 2464
2425 return True 2465 return True
2426 2466
@@ -2438,14 +2478,17 @@ class RunQueueExecute:
2438 taskdepdata_cache = {} 2478 taskdepdata_cache = {}
2439 for task in self.rqdata.runtaskentries: 2479 for task in self.rqdata.runtaskentries:
2440 (mc, fn, taskname, taskfn) = split_tid_mcfn(task) 2480 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2441 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2481 taskdepdata_cache[task] = bb.TaskData(
2442 deps = self.rqdata.runtaskentries[task].depends 2482 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2443 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2483 taskname = taskname,
2444 taskhash = self.rqdata.runtaskentries[task].hash 2484 fn = fn,
2445 unihash = self.rqdata.runtaskentries[task].unihash 2485 deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends),
2446 deps = self.filtermcdeps(task, mc, deps) 2486 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2447 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2487 taskhash = self.rqdata.runtaskentries[task].hash,
2448 taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2488 unihash = self.rqdata.runtaskentries[task].unihash,
2489 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2490 taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps,
2491 )
2449 2492
2450 self.taskdepdata_cache = taskdepdata_cache 2493 self.taskdepdata_cache = taskdepdata_cache
2451 2494
@@ -2460,9 +2503,11 @@ class RunQueueExecute:
2460 while next: 2503 while next:
2461 additional = [] 2504 additional = []
2462 for revdep in next: 2505 for revdep in next:
2463 self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash 2506 self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace(
2507 unihash=self.rqdata.runtaskentries[revdep].unihash
2508 )
2464 taskdepdata[revdep] = self.taskdepdata_cache[revdep] 2509 taskdepdata[revdep] = self.taskdepdata_cache[revdep]
2465 for revdep2 in self.taskdepdata_cache[revdep][3]: 2510 for revdep2 in self.taskdepdata_cache[revdep].deps:
2466 if revdep2 not in taskdepdata: 2511 if revdep2 not in taskdepdata:
2467 additional.append(revdep2) 2512 additional.append(revdep2)
2468 next = additional 2513 next = additional
@@ -2531,9 +2576,6 @@ class RunQueueExecute:
2531 self.rqdata.runtaskentries[hashtid].unihash = unihash 2576 self.rqdata.runtaskentries[hashtid].unihash = unihash
2532 bb.parse.siggen.set_unihash(hashtid, unihash) 2577 bb.parse.siggen.set_unihash(hashtid, unihash)
2533 toprocess.add(hashtid) 2578 toprocess.add(hashtid)
2534 if torehash:
2535 # Need to save after set_unihash above
2536 bb.parse.siggen.save_unitaskhashes()
2537 2579
2538 # Work out all tasks which depend upon these 2580 # Work out all tasks which depend upon these
2539 total = set() 2581 total = set()
@@ -2556,17 +2598,28 @@ class RunQueueExecute:
2556 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): 2598 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
2557 next.add(p) 2599 next.add(p)
2558 2600
2601 starttime = time.time()
2602 lasttime = starttime
2603
2559 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled 2604 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled
2560 while next: 2605 while next:
2561 current = next.copy() 2606 current = next.copy()
2562 next = set() 2607 next = set()
2608 ready = {}
2563 for tid in current: 2609 for tid in current:
2564 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): 2610 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
2565 continue 2611 continue
2612 # get_taskhash for a given tid *must* be called before get_unihash* below
2613 ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
2614
2615 unihashes = bb.parse.siggen.get_unihashes(ready.keys())
2616
2617 for tid in ready:
2566 orighash = self.rqdata.runtaskentries[tid].hash 2618 orighash = self.rqdata.runtaskentries[tid].hash
2567 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) 2619 newhash = ready[tid]
2568 origuni = self.rqdata.runtaskentries[tid].unihash 2620 origuni = self.rqdata.runtaskentries[tid].unihash
2569 newuni = bb.parse.siggen.get_unihash(tid) 2621 newuni = unihashes[tid]
2622
2570 # FIXME, need to check it can come from sstate at all for determinism? 2623 # FIXME, need to check it can come from sstate at all for determinism?
2571 remapped = False 2624 remapped = False
2572 if newuni == origuni: 2625 if newuni == origuni:
@@ -2587,6 +2640,15 @@ class RunQueueExecute:
2587 next |= self.rqdata.runtaskentries[tid].revdeps 2640 next |= self.rqdata.runtaskentries[tid].revdeps
2588 total.remove(tid) 2641 total.remove(tid)
2589 next.intersection_update(total) 2642 next.intersection_update(total)
2643 bb.event.check_for_interrupts()
2644
2645 if time.time() > (lasttime + 30):
2646 lasttime = time.time()
2647 hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime))
2648
2649 endtime = time.time()
2650 if (endtime-starttime > 60):
2651 hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime))
2590 2652
2591 if changed: 2653 if changed:
2592 for mc in self.rq.worker: 2654 for mc in self.rq.worker:
@@ -2628,7 +2690,7 @@ class RunQueueExecute:
2628 if dep in self.runq_complete and dep not in self.runq_tasksrun: 2690 if dep in self.runq_complete and dep not in self.runq_tasksrun:
2629 bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep) 2691 bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep)
2630 self.failed_tids.append(tid) 2692 self.failed_tids.append(tid)
2631 self.rq.state = runQueueCleanUp 2693 self.rq.state = RunQueueState.CLEAN_UP
2632 return 2694 return
2633 2695
2634 if dep not in self.runq_complete: 2696 if dep not in self.runq_complete:
@@ -2712,8 +2774,12 @@ class RunQueueExecute:
2712 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2774 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2713 self.sq_task_failoutright(dep) 2775 self.sq_task_failoutright(dep)
2714 continue 2776 continue
2777
2778 # For performance, only compute allcovered once if needed
2779 if self.sqdata.sq_deps[task]:
2780 allcovered = self.scenequeue_covered | self.scenequeue_notcovered
2715 for dep in sorted(self.sqdata.sq_deps[task]): 2781 for dep in sorted(self.sqdata.sq_deps[task]):
2716 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): 2782 if self.sqdata.sq_revdeps[dep].issubset(allcovered):
2717 if dep not in self.sq_buildable: 2783 if dep not in self.sq_buildable:
2718 self.sq_buildable.add(dep) 2784 self.sq_buildable.add(dep)
2719 2785
@@ -2759,7 +2825,7 @@ class RunQueueExecute:
2759 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2825 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2760 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks): 2826 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2761 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene")) 2827 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene"))
2762 self.rq.state = runQueueCleanUp 2828 self.rq.state = RunQueueState.CLEAN_UP
2763 2829
2764 def sq_task_complete(self, task): 2830 def sq_task_complete(self, task):
2765 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) 2831 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -2806,13 +2872,19 @@ class RunQueueExecute:
2806 additional = [] 2872 additional = []
2807 for revdep in next: 2873 for revdep in next:
2808 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2874 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
2809 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2810 deps = getsetscenedeps(revdep) 2875 deps = getsetscenedeps(revdep)
2811 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2876
2812 taskhash = self.rqdata.runtaskentries[revdep].hash 2877 taskdepdata[revdep] = bb.TaskData(
2813 unihash = self.rqdata.runtaskentries[revdep].unihash 2878 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2814 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2879 taskname = taskname,
2815 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2880 fn = fn,
2881 deps = deps,
2882 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2883 taskhash = self.rqdata.runtaskentries[revdep].hash,
2884 unihash = self.rqdata.runtaskentries[revdep].unihash,
2885 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2886 taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps,
2887 )
2816 for revdep2 in deps: 2888 for revdep2 in deps:
2817 if revdep2 not in taskdepdata: 2889 if revdep2 not in taskdepdata:
2818 additional.append(revdep2) 2890 additional.append(revdep2)
@@ -2964,14 +3036,13 @@ def build_scenequeue_data(sqdata, rqdata, sqrq):
2964 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries)) 3036 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries))
2965 3037
2966 # Sanity check all dependencies could be changed to setscene task references 3038 # Sanity check all dependencies could be changed to setscene task references
2967 for taskcounter, tid in enumerate(rqdata.runtaskentries): 3039 for tid in rqdata.runtaskentries:
2968 if tid in rqdata.runq_setscene_tids: 3040 if tid in rqdata.runq_setscene_tids:
2969 pass 3041 pass
2970 elif sq_revdeps_squash[tid]: 3042 elif sq_revdeps_squash[tid]:
2971 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.") 3043 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.")
2972 else: 3044 else:
2973 del sq_revdeps_squash[tid] 3045 del sq_revdeps_squash[tid]
2974 rqdata.init_progress_reporter.update(taskcounter)
2975 3046
2976 rqdata.init_progress_reporter.next_stage() 3047 rqdata.init_progress_reporter.next_stage()
2977 3048
@@ -3261,7 +3332,7 @@ class runQueuePipe():
3261 3332
3262 start = len(self.queue) 3333 start = len(self.queue)
3263 try: 3334 try:
3264 self.queue.extend(self.input.read(102400) or b"") 3335 self.queue.extend(self.input.read(512 * 1024) or b"")
3265 except (OSError, IOError) as e: 3336 except (OSError, IOError) as e:
3266 if e.errno != errno.EAGAIN: 3337 if e.errno != errno.EAGAIN:
3267 raise 3338 raise
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index 76b189291d..d0f73590cc 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -13,7 +13,7 @@
13import bb 13import bb
14import bb.event 14import bb.event
15import logging 15import logging
16import multiprocessing 16from bb import multiprocessing
17import threading 17import threading
18import array 18import array
19import os 19import os
@@ -80,9 +80,6 @@ class idleFinish():
80 self.msg = msg 80 self.msg = msg
81 81
82class ProcessServer(): 82class ProcessServer():
83 profile_filename = "profile.log"
84 profile_processed_filename = "profile.log.processed"
85
86 def __init__(self, lock, lockname, sock, sockname, server_timeout, xmlrpcinterface): 83 def __init__(self, lock, lockname, sock, sockname, server_timeout, xmlrpcinterface):
87 self.command_channel = False 84 self.command_channel = False
88 self.command_channel_reply = False 85 self.command_channel_reply = False
@@ -140,23 +137,7 @@ class ProcessServer():
140 serverlog("Error writing to lock file: %s" % str(e)) 137 serverlog("Error writing to lock file: %s" % str(e))
141 pass 138 pass
142 139
143 if self.cooker.configuration.profile: 140 return bb.utils.profile_function("main" in self.cooker.configuration.profile, self.main, "profile-mainloop.log")
144 try:
145 import cProfile as profile
146 except:
147 import profile
148 prof = profile.Profile()
149
150 ret = profile.Profile.runcall(prof, self.main)
151
152 prof.dump_stats("profile.log")
153 bb.utils.process_profilelog("profile.log")
154 serverlog("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
155
156 else:
157 ret = self.main()
158
159 return ret
160 141
161 def _idle_check(self): 142 def _idle_check(self):
162 return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None 143 return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None
@@ -321,7 +302,22 @@ class ProcessServer():
321 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) 302 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout)
322 seendata = True 303 seendata = True
323 304
324 ready = self.idle_commands(.1, fds) 305 if not self.idle:
306 self.idle = threading.Thread(target=self.idle_thread)
307 self.idle.start()
308 elif self.idle and not self.idle.is_alive():
309 serverlog("Idle thread terminated, main thread exiting too")
310 bb.error("Idle thread terminated, main thread exiting too")
311 self.quit = True
312
313 nextsleep = 1.0
314 if self.xmlrpc:
315 nextsleep = self.xmlrpc.get_timeout(nextsleep)
316 try:
317 ready = select.select(fds,[],[],nextsleep)[0]
318 except InterruptedError:
319 # Ignore EINTR
320 ready = []
325 321
326 if self.idle: 322 if self.idle:
327 self.idle.join() 323 self.idle.join()
@@ -402,20 +398,7 @@ class ProcessServer():
402 serverlog("".join(msg)) 398 serverlog("".join(msg))
403 399
404 def idle_thread(self): 400 def idle_thread(self):
405 if self.cooker.configuration.profile: 401 bb.utils.profile_function("idle" in self.cooker.configuration.profile, self.idle_thread_internal, "profile-idleloop.log")
406 try:
407 import cProfile as profile
408 except:
409 import profile
410 prof = profile.Profile()
411
412 ret = profile.Profile.runcall(prof, self.idle_thread_internal)
413
414 prof.dump_stats("profile-mainloop.log")
415 bb.utils.process_profilelog("profile-mainloop.log")
416 serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
417 else:
418 self.idle_thread_internal()
419 402
420 def idle_thread_internal(self): 403 def idle_thread_internal(self):
421 def remove_idle_func(function): 404 def remove_idle_func(function):
@@ -424,7 +407,7 @@ class ProcessServer():
424 self.idle_cond.notify_all() 407 self.idle_cond.notify_all()
425 408
426 while not self.quit: 409 while not self.quit:
427 nextsleep = 0.1 410 nextsleep = 1.0
428 fds = [] 411 fds = []
429 412
430 with bb.utils.lock_timeout(self._idlefuncsLock): 413 with bb.utils.lock_timeout(self._idlefuncsLock):
@@ -462,7 +445,7 @@ class ProcessServer():
462 445
463 # Create new heartbeat event? 446 # Create new heartbeat event?
464 now = time.time() 447 now = time.time()
465 if bb.event._heartbeat_enabled and now >= self.next_heartbeat: 448 if items and bb.event._heartbeat_enabled and now >= self.next_heartbeat:
466 # We might have missed heartbeats. Just trigger once in 449 # We might have missed heartbeats. Just trigger once in
467 # that case and continue after the usual delay. 450 # that case and continue after the usual delay.
468 self.next_heartbeat += self.heartbeat_seconds 451 self.next_heartbeat += self.heartbeat_seconds
@@ -485,31 +468,6 @@ class ProcessServer():
485 if nextsleep is not None: 468 if nextsleep is not None:
486 select.select(fds,[],[],nextsleep)[0] 469 select.select(fds,[],[],nextsleep)[0]
487 470
488 def idle_commands(self, delay, fds=None):
489 nextsleep = delay
490 if not fds:
491 fds = []
492
493 if not self.idle:
494 self.idle = threading.Thread(target=self.idle_thread)
495 self.idle.start()
496 elif self.idle and not self.idle.is_alive():
497 serverlog("Idle thread terminated, main thread exiting too")
498 bb.error("Idle thread terminated, main thread exiting too")
499 self.quit = True
500
501 if nextsleep is not None:
502 if self.xmlrpc:
503 nextsleep = self.xmlrpc.get_timeout(nextsleep)
504 try:
505 return select.select(fds,[],[],nextsleep)[0]
506 except InterruptedError:
507 # Ignore EINTR
508 return []
509 else:
510 return select.select(fds,[],[],0)[0]
511
512
513class ServerCommunicator(): 471class ServerCommunicator():
514 def __init__(self, connection, recv): 472 def __init__(self, connection, recv):
515 self.connection = connection 473 self.connection = connection
@@ -642,7 +600,7 @@ class BitBakeServer(object):
642 os.set_inheritable(self.bitbake_lock.fileno(), True) 600 os.set_inheritable(self.bitbake_lock.fileno(), True)
643 os.set_inheritable(self.readypipein, True) 601 os.set_inheritable(self.readypipein, True)
644 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") 602 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
645 os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) 603 os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(list(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
646 604
647def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile): 605def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile):
648 606
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py
index 04b0b17db1..ebc271aca4 100644
--- a/bitbake/lib/bb/server/xmlrpcserver.py
+++ b/bitbake/lib/bb/server/xmlrpcserver.py
@@ -14,6 +14,8 @@ from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
14import bb.server.xmlrpcclient 14import bb.server.xmlrpcclient
15 15
16import bb 16import bb
17import bb.cooker
18import bb.event
17 19
18# This request handler checks if the request has a "Bitbake-token" header 20# This request handler checks if the request has a "Bitbake-token" header
19# field (this comes from the client side) and compares it with its internal 21# field (this comes from the client side) and compares it with its internal
@@ -54,7 +56,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
54 56
55 def __init__(self, interface, cooker, parent): 57 def __init__(self, interface, cooker, parent):
56 # Use auto port configuration 58 # Use auto port configuration
57 if (interface[1] == -1): 59 if interface[1] == -1:
58 interface = (interface[0], 0) 60 interface = (interface[0], 0)
59 SimpleXMLRPCServer.__init__(self, interface, 61 SimpleXMLRPCServer.__init__(self, interface,
60 requestHandler=BitBakeXMLRPCRequestHandler, 62 requestHandler=BitBakeXMLRPCRequestHandler,
@@ -87,11 +89,12 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
87 def handle_requests(self): 89 def handle_requests(self):
88 self._handle_request_noblock() 90 self._handle_request_noblock()
89 91
90class BitBakeXMLRPCServerCommands(): 92class BitBakeXMLRPCServerCommands:
91 93
92 def __init__(self, server): 94 def __init__(self, server):
93 self.server = server 95 self.server = server
94 self.has_client = False 96 self.has_client = False
97 self.event_handle = None
95 98
96 def registerEventHandler(self, host, port): 99 def registerEventHandler(self, host, port):
97 """ 100 """
@@ -100,8 +103,8 @@ class BitBakeXMLRPCServerCommands():
100 s, t = bb.server.xmlrpcclient._create_server(host, port) 103 s, t = bb.server.xmlrpcclient._create_server(host, port)
101 104
102 # we don't allow connections if the cooker is running 105 # we don't allow connections if the cooker is running
103 if (self.server.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]): 106 if self.server.cooker.state in [bb.cooker.State.PARSING, bb.cooker.State.RUNNING]:
104 return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.server.cooker.state) 107 return None, f"Cooker is busy: {self.server.cooker.state.name}"
105 108
106 self.event_handle = bb.event.register_UIHhandler(s, True) 109 self.event_handle = bb.event.register_UIHhandler(s, True)
107 return self.event_handle, 'OK' 110 return self.event_handle, 'OK'
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 8ab08ec961..41eb643012 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -201,9 +201,6 @@ class SignatureGenerator(object):
201 def save_unitaskhashes(self): 201 def save_unitaskhashes(self):
202 return 202 return
203 203
204 def copy_unitaskhashes(self, targetdir):
205 return
206
207 def set_setscene_tasks(self, setscene_tasks): 204 def set_setscene_tasks(self, setscene_tasks):
208 return 205 return
209 206
@@ -381,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
381 self.taints[tid] = taint 378 self.taints[tid] = taint
382 logger.warning("%s is tainted from a forced run" % tid) 379 logger.warning("%s is tainted from a forced run" % tid)
383 380
384 return 381 return set(dep for _, dep in self.runtaskdeps[tid])
385 382
386 def get_taskhash(self, tid, deps, dataCaches): 383 def get_taskhash(self, tid, deps, dataCaches):
387 384
@@ -418,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
418 def save_unitaskhashes(self): 415 def save_unitaskhashes(self):
419 self.unihash_cache.save(self.unitaskhashes) 416 self.unihash_cache.save(self.unitaskhashes)
420 417
421 def copy_unitaskhashes(self, targetdir):
422 self.unihash_cache.copyfile(targetdir)
423
424 def dump_sigtask(self, mcfn, task, stampbase, runtime): 418 def dump_sigtask(self, mcfn, task, stampbase, runtime):
425 tid = mcfn + ":" + task 419 tid = mcfn + ":" + task
426 mc = bb.runqueue.mc_from_tid(mcfn) 420 mc = bb.runqueue.mc_from_tid(mcfn)
@@ -540,7 +534,7 @@ class SignatureGeneratorUniHashMixIn(object):
540 def __init__(self, data): 534 def __init__(self, data):
541 self.extramethod = {} 535 self.extramethod = {}
542 # NOTE: The cache only tracks hashes that exist. Hashes that don't 536 # NOTE: The cache only tracks hashes that exist. Hashes that don't
543 # exist are always queries from the server since it is possible for 537 # exist are always queried from the server since it is possible for
544 # hashes to appear over time, but much less likely for them to 538 # hashes to appear over time, but much less likely for them to
545 # disappear 539 # disappear
546 self.unihash_exists_cache = set() 540 self.unihash_exists_cache = set()
@@ -558,11 +552,11 @@ class SignatureGeneratorUniHashMixIn(object):
558 super().__init__(data) 552 super().__init__(data)
559 553
560 def get_taskdata(self): 554 def get_taskdata(self):
561 return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() 555 return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
562 556
563 def set_taskdata(self, data): 557 def set_taskdata(self, data):
564 self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] 558 self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
565 super().set_taskdata(data[7:]) 559 super().set_taskdata(data[6:])
566 560
567 def get_hashserv_creds(self): 561 def get_hashserv_creds(self):
568 if self.username and self.password: 562 if self.username and self.password:
@@ -595,13 +589,6 @@ class SignatureGeneratorUniHashMixIn(object):
595 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) 589 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
596 yield self._client 590 yield self._client
597 591
598 @contextmanager
599 def client_pool(self):
600 with self._client_env():
601 if getattr(self, '_client_pool', None) is None:
602 self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
603 yield self._client_pool
604
605 def reset(self, data): 592 def reset(self, data):
606 self.__close_clients() 593 self.__close_clients()
607 return super().reset(data) 594 return super().reset(data)
@@ -678,25 +665,20 @@ class SignatureGeneratorUniHashMixIn(object):
678 if len(query) == 0: 665 if len(query) == 0:
679 return {} 666 return {}
680 667
681 uncached_query = {} 668 query_keys = []
682 result = {} 669 result = {}
683 for key, unihash in query.items(): 670 for key, unihash in query.items():
684 if unihash in self.unihash_exists_cache: 671 if unihash in self.unihash_exists_cache:
685 result[key] = True 672 result[key] = True
686 else: 673 else:
687 uncached_query[key] = unihash 674 query_keys.append(key)
688 675
689 if self.max_parallel <= 1 or len(uncached_query) <= 1: 676 if query_keys:
690 # No parallelism required. Make the query serially with the single client
691 with self.client() as client: 677 with self.client() as client:
692 uncached_result = { 678 query_result = client.unihash_exists_batch(query[k] for k in query_keys)
693 key: client.unihash_exists(value) for key, value in uncached_query.items()
694 }
695 else:
696 with self.client_pool() as client_pool:
697 uncached_result = client_pool.unihashes_exist(uncached_query)
698 679
699 for key, exists in uncached_result.items(): 680 for idx, key in enumerate(query_keys):
681 exists = query_result[idx]
700 if exists: 682 if exists:
701 self.unihash_exists_cache.add(query[key]) 683 self.unihash_exists_cache.add(query[key])
702 result[key] = exists 684 result[key] = exists
@@ -712,29 +694,24 @@ class SignatureGeneratorUniHashMixIn(object):
712 unihash 694 unihash
713 """ 695 """
714 result = {} 696 result = {}
715 queries = {} 697 query_tids = []
716 query_result = {}
717 698
718 for tid in tids: 699 for tid in tids:
719 unihash = self.get_cached_unihash(tid) 700 unihash = self.get_cached_unihash(tid)
720 if unihash: 701 if unihash:
721 result[tid] = unihash 702 result[tid] = unihash
722 else: 703 else:
723 queries[tid] = (self._get_method(tid), self.taskhash[tid]) 704 query_tids.append(tid)
724
725 if len(queries) == 0:
726 return result
727 705
728 if self.max_parallel <= 1 or len(queries) <= 1: 706 if query_tids:
729 # No parallelism required. Make the query serially with the single client 707 unihashes = []
730 with self.client() as client: 708 try:
731 for tid, args in queries.items(): 709 with self.client() as client:
732 query_result[tid] = client.get_unihash(*args) 710 unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
733 else: 711 except (ConnectionError, FileNotFoundError, EOFError) as e:
734 with self.client_pool() as client_pool: 712 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
735 query_result = client_pool.get_unihashes(queries)
736 713
737 for tid, unihash in query_result.items(): 714 for idx, tid in enumerate(query_tids):
738 # In the absence of being able to discover a unique hash from the 715 # In the absence of being able to discover a unique hash from the
739 # server, make it be equivalent to the taskhash. The unique "hash" only 716 # server, make it be equivalent to the taskhash. The unique "hash" only
740 # really needs to be a unique string (not even necessarily a hash), but 717 # really needs to be a unique string (not even necessarily a hash), but
@@ -749,7 +726,9 @@ class SignatureGeneratorUniHashMixIn(object):
749 # to the server, there is a better chance that they will agree on 726 # to the server, there is a better chance that they will agree on
750 # the unique hash. 727 # the unique hash.
751 taskhash = self.taskhash[tid] 728 taskhash = self.taskhash[tid]
752 if unihash: 729
730 if unihashes and unihashes[idx]:
731 unihash = unihashes[idx]
753 # A unique hash equal to the taskhash is not very interesting, 732 # A unique hash equal to the taskhash is not very interesting,
754 # so it is reported it at debug level 2. If they differ, that 733 # so it is reported it at debug level 2. If they differ, that
755 # is much more interesting, so it is reported at debug level 1 734 # is much more interesting, so it is reported at debug level 1
@@ -758,7 +737,6 @@ class SignatureGeneratorUniHashMixIn(object):
758 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 737 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
759 unihash = taskhash 738 unihash = taskhash
760 739
761
762 self.set_unihash(tid, unihash) 740 self.set_unihash(tid, unihash)
763 self.unihash[tid] = unihash 741 self.unihash[tid] = unihash
764 result[tid] = unihash 742 result[tid] = unihash
@@ -839,7 +817,7 @@ class SignatureGeneratorUniHashMixIn(object):
839 d.setVar('BB_UNIHASH', new_unihash) 817 d.setVar('BB_UNIHASH', new_unihash)
840 else: 818 else:
841 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 819 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
842 except ConnectionError as e: 820 except (ConnectionError, FileNotFoundError, EOFError) as e:
843 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 821 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
844 finally: 822 finally:
845 if sigfile: 823 if sigfile:
@@ -881,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object):
881 # TODO: What to do here? 859 # TODO: What to do here?
882 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 860 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
883 861
884 except ConnectionError as e: 862 except (ConnectionError, FileNotFoundError, EOFError) as e:
885 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 863 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
886 864
887 return False 865 return False
@@ -895,13 +873,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
895 super().init_rundepcheck(data) 873 super().init_rundepcheck(data)
896 self.server = data.getVar('BB_HASHSERVE') 874 self.server = data.getVar('BB_HASHSERVE')
897 self.method = "sstate_output_hash" 875 self.method = "sstate_output_hash"
898 self.max_parallel = 1
899 876
900def clean_checksum_file_path(file_checksum_tuple): 877def clean_checksum_file_path(file_checksum_tuple):
901 f, cs = file_checksum_tuple 878 f, cs = file_checksum_tuple
902 if "/./" in f: 879 if "/./" in f:
903 return "./" + f.split("/./")[1] 880 return "./" + f.split("/./")[1]
904 return f 881 return os.path.basename(f)
905 882
906def dump_this_task(outfile, d): 883def dump_this_task(outfile, d):
907 import bb.parse 884 import bb.parse
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index f6585fb3aa..c0d1362a0c 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc
106 self.parseExpression("foo=$(echo bar)") 106 self.parseExpression("foo=$(echo bar)")
107 self.assertExecs(set(["echo"])) 107 self.assertExecs(set(["echo"]))
108 108
109 def test_assign_subshell_expansion_quotes(self):
110 self.parseExpression('foo="$(echo bar)"')
111 self.assertExecs(set(["echo"]))
112
113 def test_assign_subshell_expansion_nested(self):
114 self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"')
115 self.assertExecs(set(["func1", "func2", "func3"]))
116
117 def test_assign_subshell_expansion_multiple(self):
118 self.parseExpression('foo="$(func1 "$(func2)") $(func3)"')
119 self.assertExecs(set(["func1", "func2", "func3"]))
120
121 def test_assign_subshell_expansion_escaped_quotes(self):
122 self.parseExpression('foo="\\"fo\\"o$(func1)"')
123 self.assertExecs(set(["func1"]))
124
125 def test_assign_subshell_expansion_empty(self):
126 self.parseExpression('foo="bar$()foo"')
127 self.assertExecs(set())
128
129 def test_assign_subshell_backticks(self):
130 self.parseExpression("foo=`echo bar`")
131 self.assertExecs(set(["echo"]))
132
133 def test_assign_subshell_backticks_quotes(self):
134 self.parseExpression('foo="`echo bar`"')
135 self.assertExecs(set(["echo"]))
136
137 def test_assign_subshell_backticks_multiple(self):
138 self.parseExpression('foo="`func1 bar` `func2`"')
139 self.assertExecs(set(["func1", "func2"]))
140
141 def test_assign_subshell_backticks_escaped_quotes(self):
142 self.parseExpression('foo="\\"fo\\"o`func1`"')
143 self.assertExecs(set(["func1"]))
144
145 def test_assign_subshell_backticks_empty(self):
146 self.parseExpression('foo="bar``foo"')
147 self.assertExecs(set())
148
109 def test_shell_unexpanded(self): 149 def test_shell_unexpanded(self):
110 self.setEmptyVars(["QT_BASE_NAME"]) 150 self.setEmptyVars(["QT_BASE_NAME"])
111 self.parseExpression('echo "${QT_BASE_NAME}"') 151 self.parseExpression('echo "${QT_BASE_NAME}"')
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py
index 95af3f96d7..16c297b315 100644
--- a/bitbake/lib/bb/tests/compression.py
+++ b/bitbake/lib/bb/tests/compression.py
@@ -66,8 +66,8 @@ class CompressionTests(object):
66 66
67class LZ4Tests(CompressionTests, unittest.TestCase): 67class LZ4Tests(CompressionTests, unittest.TestCase):
68 def setUp(self): 68 def setUp(self):
69 if shutil.which("lz4c") is None: 69 if shutil.which("lz4") is None:
70 self.skipTest("'lz4c' not found") 70 self.skipTest("'lz4' not found")
71 super().setUp() 71 super().setUp()
72 72
73 @contextlib.contextmanager 73 @contextlib.contextmanager
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index cbc7c1ecd4..a895f6a58e 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -450,17 +450,64 @@ class TestFlags(unittest.TestCase):
450 self.d = bb.data.init() 450 self.d = bb.data.init()
451 self.d.setVar("foo", "value of foo") 451 self.d.setVar("foo", "value of foo")
452 self.d.setVarFlag("foo", "flag1", "value of flag1") 452 self.d.setVarFlag("foo", "flag1", "value of flag1")
453 self.d.setVarFlag("foo", "_defaultval_flag_flag1", "default of flag1")
453 self.d.setVarFlag("foo", "flag2", "value of flag2") 454 self.d.setVarFlag("foo", "flag2", "value of flag2")
455 self.d.setVarFlag("foo", "_defaultval_flag_flag2", "default of flag2")
456 self.d.setVarFlag("foo", "flag3", "value of flag3")
457 self.d.setVarFlag("foo", "_defaultval_flag_flagnovalue", "default of flagnovalue")
454 458
455 def test_setflag(self): 459 def test_setflag(self):
456 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 460 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
457 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2") 461 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
462 self.assertDictEqual(
463 self.d.getVarFlags("foo"),
464 {
465 "flag1": "value of flag1",
466 "flag2": "value of flag2",
467 "flag3": "value of flag3",
468 "flagnovalue": "default of flagnovalue",
469 }
470 )
471 self.assertDictEqual(
472 self.d.getVarFlags("foo", internalflags=True),
473 {
474 "_content": "value of foo",
475 "flag1": "value of flag1",
476 "flag2": "value of flag2",
477 "flag3": "value of flag3",
478 "_defaultval_flag_flag1": "default of flag1",
479 "_defaultval_flag_flag2": "default of flag2",
480 "_defaultval_flag_flagnovalue": "default of flagnovalue",
481 }
482 )
458 483
459 def test_delflag(self): 484 def test_delflag(self):
460 self.d.delVarFlag("foo", "flag2") 485 self.d.delVarFlag("foo", "flag2")
486 self.d.delVarFlag("foo", "flag3")
461 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 487 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
462 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) 488 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
463 489 self.assertDictEqual(
490 self.d.getVarFlags("foo"),
491 {
492 "flag1": "value of flag1",
493 "flagnovalue": "default of flagnovalue",
494 }
495 )
496 self.assertDictEqual(
497 self.d.getVarFlags("foo", internalflags=True),
498 {
499 "_content": "value of foo",
500 "flag1": "value of flag1",
501 "_defaultval_flag_flag1": "default of flag1",
502 "_defaultval_flag_flagnovalue": "default of flagnovalue",
503 }
504 )
505
506 def test_delvar(self):
507 self.d.delVar("foo")
508 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), None)
509 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
510 self.assertEqual(self.d.getVarFlags("foo", internalflags=True), None)
464 511
465class Contains(unittest.TestCase): 512class Contains(unittest.TestCase):
466 def setUp(self): 513 def setUp(self):
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
new file mode 100644
index 0000000000..e27ee134f2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
@@ -0,0 +1,3528 @@
1<?xml version="1.0" encoding="UTF-8"?>
2<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
3 "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
4<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
5<head>
6<title>MiniUPnP download zone</title>
7<link href="../css/miniupnp.css" rel="stylesheet" type="text/css"/>
8<meta name="description" content="files download of the miniupnp project"/>
9<meta name="keywords" content="upnp,download,openbsd,freebsd,linux,windows"/>
10<meta name="viewport" content="width=device-width" />
11<link href="rss.php" title="MiniUPnPd, MiniUPnPc and MiniSSDPd Files" type="application/rss+xml" rel="alternate" />
12<link rel="canonical" href="http://miniupnp.free.fr/files/" />
13<link rel="alternate" hreflang="fr" href="/files/index_fr.php" />
14<script async="async" src="//pagead2.googlesyndication.com/pagead/js/adsbygoogle.js" type="text/javascript"></script>
15<script type="text/javascript">
16 (adsbygoogle = window.adsbygoogle || []).push({
17 google_ad_client: "ca-pub-6883148866513192",
18 enable_page_level_ads: true
19 });
20</script>
21</head>
22<body>
23<h2>MiniUPnP Project</h2>
24
25<p align="center">
26<a href="../">Home</a> |
27<b>Downloads</b> |
28<a href="../devicelist.php">Compatibility list</a> |
29<a href="../libnatpmp.html">libnatpmp</a> |
30<a href="../minissdpd.html">MiniSSDPd</a> |
31<a href="../xchat-upnp.html">xchat upnp patch</a> |
32<a href="../search.html">Search</a> |
33<a href="https://miniupnp.tuxfamily.org/forum/">Forum</a>
34</p>
35<p align="center">
36<b>English</b> | <a href="/files/index_fr.php">Fran&ccedil;ais</a>
37</p>
38
39<div align="center">
40<script type="text/javascript"><!--
41google_ad_client = "pub-6883148866513192";
42/* 728x90, created 7/10/08 */
43google_ad_slot = "0774293141";
44google_ad_width = 728;
45google_ad_height = 90;
46//-->
47</script>
48<script type="text/javascript"
49src="https://pagead2.googlesyndication.com/pagead/show_ads.js">
50</script>
51</div>
52
53<h2>MiniUPnP download zone</h2>
54<p>
55Find on this page the source of miniupnp and
56some related files. You will also find precompiled binaries
57of the UPnP client sample program for windows compiled using
58<a href="https://mingw.osdn.io/">MinGW</a>. There are also Windows
59binaries (including python module) automatically built using
60<a href="https://ci.appveyor.com/project/miniupnp/miniupnp/build/artifacts">AppVeyor</a>.
61</p>
62<p>If you just need one of the software installed on your machine,
63you probably don't need to download and compile the source files.
64It is very likely that a package/port already exists for
65your system/distribution. Refer to your system documentation
66to find how to search and install a package/port.
67Mac OS X does have port systems too : see
68<a href="http://www.macports.org/">MacPorts</a> or
69<a href="http://mxcl.github.com/homebrew/">Homebrew</a> or
70<a href="http://www.finkproject.org/">Fink</a>.
71</p>
72<p>
73The miniupnpc (client) sources have been successfully compiled
74under Windows XP/vista/7/10/etc. (using
75<a href="https://mingw.osdn.io/">MinGW</a>,
76<a href="https://www.mingw-w64.org/">Mingw-w64</a>
77or <a href="http://www.cygwin.com/">Cygwin</a>),
78Linux, OpenBSD, FreeBSD, NetBSD, DragonFlyBSD,
79Solaris, MacOS X and AmigaOS. <br/>
80The Makefile of the client is made for GNU make :
81check which version your system have
82with the command "make --version". On some systems, such as OpenBSD,
83you have to use "gmake". Under Windows with MinGW, GNU make is
84called "mingw32-make" and a sligthly modified version of the Makefile
85should be used : Makefile.mingw. Run "mingw32make.bat" to compile. <br/>
86If you have any compatibility problem, please post on the
87<a href="https://miniupnp.tuxfamily.org/forum/">forum</a>
88or contact me by email.
89</p>
90<!--
91<p>A devoted user compiled miniupnp<strong>c</strong> for
92Openwrt (currently Kamikaze 7.09)
93and his work is available here :
94<a href="http://replay.waybackmachine.org/20081120030628/http://www.myantihero.net/pub/openwrt/packages/">http://myantihero.net/pub/openwrt/packages/</a>.</p>
95-->
96<p>Get miniupnpc under AmigaOS 4 on
97<a href="http://os4depot.net/index.php?function=showfile&amp;file=network/misc/miniupnpc.lha">OS4Depot</a>.
98</p>
99<p>
100Dario Meloni has made a Ruby Gem embedding miniupnpc :
101<a href="https://rubygems.org/gems/mupnp">https://rubygems.org/gems/mupnp</a>.
102</p>
103<p>
104The python module is available on pypi.org :
105<a href="https://pypi.org/project/miniupnpc/">pip install miniupnpc</a>.
106</p>
107<p>
108The daemon (starting in November 2006) compiles with BSD make under BSD
109and Solaris.<br/>
110To compile the daemon under linux, use "make -f Makefile.linux"<br/>
111To compile for <a href="http://openwrt.org/">OpenWRT</a>
112please read the README.openwrt file, or use the packages
113<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpc</a> and
114<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpd</a>.
115<!-- The
116<a href="http://www.x-wrt.org/">X-Wrt</a> project is providing
117precompiled ipkg packages for OpenWrt for both OpenWrt
118<a href="ftp://ftp.berlios.de/pub/xwrt/packages/">White Russian</a>
119and OpenWrt
120<a href="ftp://ftp.berlios.de/pub/xwrt/kamikaze/packages">kamikaze</a>.
121Check
122<a href="ftp://ftp.berlios.de/pub/xwrt/">ftp://ftp.berlios.de/pub/xwrt/</a>.
123For White Russian, take a look at
124<a href="http://jackassofalltrades.com/openwrt/">this</a>. -->
125<br/>
126<a href="http://pfsense.com">pfSense</a> users are advised to use the
127miniupnpd port available for their system. Recent versions of
128pfSense include MiniUPnPd in the base system.
129<br/>
130For <a href="http://en.wikipedia.org/wiki/WRT54G">Linksys WRT54G</a>
131and WRT54GL owners,
132<a href="http://sourceforge.net/projects/tarifa/">Tarifa firmware</a>
133is another alternative to get miniUPnPd running on the router.
134</p>
135<p>
136Please read README and
137LICENCE files included with the distribution for further informations.
138</p>
139<p>
140The MiniUPnP daemon (miniupnpd) is working under
141<a href="http://www.openbsd.org/">OpenBSD</a>,
142<a href="http://www.netbsd.org/">NetBSD</a>,
143<a href="http://www.freebsd.org/">FreeBSD</a>,
144<a href="http://www.dragonflybsd.org/">DragonFlyBSD</a>,
145<a href="http://www.apple.com/macosx/">Mac OS X</a> and
146(<a href="https://en.wikipedia.org/wiki/OpenSolaris">Open</a>)<a href="http://www.oracle.com/us/products/servers-storage/solaris/solaris11/overview/index.html">Solaris</a>
147with <a href="http://www.openbsd.org/faq/pf/">pf</a>,
148with <a href="https://en.wikipedia.org/wiki/IPFilter">IP Filter</a> or
149with <a href="http://en.wikipedia.org/wiki/Ipfirewall">ipfw</a>.
150The linux version uses either libiptc which permits to access
151<a href="http://netfilter.org/">netfilter</a>
152rules inside the kernel the same way as
153<a href="https://www.netfilter.org/projects/iptables/index.html">iptables</a>, or
154<a href="https://www.netfilter.org/projects/libnftnl/index.html">libnftnl</a>
155which is the equivalent for
156<a href="https://www.netfilter.org/projects/nftables/index.html">nftables</a>.
157</p>
158
159<p>Releases are now GPG signed with the key <a href="../A31ACAAF.asc">A31ACAAF</a>.
160Previous signing key was <a href="../A5C0863C.asc">A5C0863C</a>.
161Get it from your favorite
162<a href="https://pgp.mit.edu/pks/lookup?search=0xA31ACAAF&amp;op=index&amp;fingerprint=on">key server</a>.</p>
163
164<h4>REST API</h4>
165<p>You can use the REST API to get the latest releases available:</p>
166<ul>
167<li><a href="rest.php/tags/miniupnpd?count=1">rest.php/tags/miniupnpd?count=1</a>: latest miniupnpd.</li>
168<li><a href="rest.php/tags?count=1">rest.php/tags?count=1</a>: miniupnpc, miniupnpd and minissdpd.</li>
169</ul>
170
171<h4>You can help !</h4>
172<p>If you make a package/port for your favorite OS distribution,
173inform me so I can upload the package here or add a link to your
174repository.
175</p>
176
177<h4>Latest files</h4>
178<table>
179<tr><th>name</th>
180<th>size</th>
181<th>date</th>
182<th>comment</th>
183<th><!-- Changelog --></th>
184<th><!-- Signature --></th>
185</tr>
186<tr>
187 <td class="filename"><a href='miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
188 <td class="filesize">140137</td>
189 <td class="filedate">05/03/2025 10:31</td>
190 <td class="comment">MiniUPnP client release source code</td>
191 <td><a href="changelog.php?file=miniupnpc-2.3.2.tar.gz">changelog</a></td>
192 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
193</tr>
194<tr>
195 <td class="filename"><a href='miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
196 <td class="filesize">265329</td>
197 <td class="filedate">22/06/2024 22:31</td>
198 <td class="comment">MiniUPnP daemon release source code</td>
199 <td><a href="changelog.php?file=miniupnpd-2.3.7.tar.gz">changelog</a></td>
200 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
201</tr>
202<tr>
203 <td class="filename"><a href='libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
204 <td class="filesize">26506</td>
205 <td class="filedate">23/04/2023 11:02</td>
206 <td class="comment">latest libnatpmp source code</td>
207 <td><a href="changelog.php?file=libnatpmp-20230423.tar.gz">changelog</a></td>
208 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
209</tr>
210<tr>
211 <td class="filename"><a href='minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
212 <td class="filesize">39077</td>
213 <td class="filedate">22/10/2022 18:41</td>
214 <td class="comment">MiniSSDPd release source code</td>
215 <td><a href="changelog.php?file=minissdpd-1.6.0.tar.gz">changelog</a></td>
216 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
217</tr>
218<tr>
219 <td class="filename"><a href='upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
220 <td class="filesize">69503</td>
221 <td class="filedate">15/05/2022 14:31</td>
222 <td class="comment">Windows executable</td>
223 <td><a href="changelog.php?file=upnpc-exe-win32-20220515.zip">changelog</a></td>
224 <td></td>
225</tr>
226<tr>
227 <td class="filename"><a href='minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
228 <td class="filesize">38870</td>
229 <td class="filedate">04/11/2021 23:34</td>
230 <td class="comment">latest MiniSSDPd source code</td>
231 <td><a href="changelog.php?file=minissdpd-1.5.20211105.tar.gz">changelog</a></td>
232 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
233</tr>
234<tr>
235 <td class="filename"><a href='miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
236 <td class="filesize">97682</td>
237 <td class="filedate">15/10/2020 22:31</td>
238 <td class="comment">latest MiniUPnP client source code</td>
239 <td><a href="changelog.php?file=miniupnpc-2.1.20201016.tar.gz">changelog</a></td>
240 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
241</tr>
242<tr>
243 <td class="filename"><a href='miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
244 <td class="filesize">245426</td>
245 <td class="filedate">10/05/2020 18:23</td>
246 <td class="comment">latest MiniUPnP daemon source code</td>
247 <td><a href="changelog.php?file=miniupnpd-2.1.20200510.tar.gz">changelog</a></td>
248 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
249</tr>
250<tr>
251 <td class="filename"><a href='xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
252 <td class="filesize">10329</td>
253 <td class="filedate">11/08/2011 15:18</td>
254 <td class="comment">Patch to add UPnP capabilities to xchat</td>
255 <td><a href="changelog.php?file=xchat-upnp20110811.patch">changelog</a></td>
256 <td></td>
257</tr>
258<tr>
259 <td class="filename"><a href='minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
260 <td class="filesize">7598</td>
261 <td class="filedate">25/07/2011 14:57</td>
262 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
263 <td><a href="changelog.php?file=minidlna_1.0.21.minissdp1.patch">changelog</a></td>
264 <td></td>
265</tr>
266<tr>
267 <td class="filename"><a href='miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
268 <td class="filesize">14840</td>
269 <td class="filedate">04/11/2006 18:16</td>
270 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
271 <td><a href="changelog.php?file=miniupnpc-new20060630.tar.gz">changelog</a></td>
272 <td></td>
273</tr>
274</table>
275
276<h4>All files</h4>
277<table>
278<tr><th>name</th>
279<th>size</th>
280<th>date</th>
281<th>comment</th>
282<th><!-- signature --></th>
283</tr>
284<tr>
285 <td class="filename"><a href='download.php?file=miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
286 <td class="filesize">140137</td>
287 <td class="filedate">05/03/2025 10:31:36 +0000</td>
288 <td class="comment">MiniUPnP client release source code</td>
289 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
290</tr>
291<tr>
292 <td class="filename"><a href='download.php?file=miniupnpc-2.3.1.tar.gz'>miniupnpc-2.3.1.tar.gz</a></td>
293 <td class="filesize">139499</td>
294 <td class="filedate">23/02/2025 16:44:16 +0000</td>
295 <td class="comment">MiniUPnP client release source code</td>
296 <td><a href="miniupnpc-2.3.1.tar.gz.sig">Signature</a></td>
297</tr>
298<tr>
299 <td class="filename"><a href='download.php?file=miniupnpc-2.3.0.tar.gz'>miniupnpc-2.3.0.tar.gz</a></td>
300 <td class="filesize">105071</td>
301 <td class="filedate">10/01/2025 23:16:45 +0000</td>
302 <td class="comment">MiniUPnP client release source code</td>
303 <td><a href="miniupnpc-2.3.0.tar.gz.sig">Signature</a></td>
304</tr>
305<tr>
306 <td class="filename"><a href='download.php?file=miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
307 <td class="filesize">265329</td>
308 <td class="filedate">22/06/2024 22:31:38 +0000</td>
309 <td class="comment">MiniUPnP daemon release source code</td>
310 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
311</tr>
312<tr>
313 <td class="filename"><a href='download.php?file=miniupnpc-2.2.8.tar.gz'>miniupnpc-2.2.8.tar.gz</a></td>
314 <td class="filesize">104603</td>
315 <td class="filedate">08/06/2024 22:13:39 +0000</td>
316 <td class="comment">MiniUPnP client release source code</td>
317 <td><a href="miniupnpc-2.2.8.tar.gz.sig">Signature</a></td>
318</tr>
319<tr>
320 <td class="filename"><a href='download.php?file=miniupnpd-2.3.6.tar.gz'>miniupnpd-2.3.6.tar.gz</a></td>
321 <td class="filesize">263018</td>
322 <td class="filedate">19/03/2024 23:39:51 +0000</td>
323 <td class="comment">MiniUPnP daemon release source code</td>
324 <td><a href="miniupnpd-2.3.6.tar.gz.sig">Signature</a></td>
325</tr>
326<tr>
327 <td class="filename"><a href='download.php?file=miniupnpc-2.2.7.tar.gz'>miniupnpc-2.2.7.tar.gz</a></td>
328 <td class="filesize">104258</td>
329 <td class="filedate">19/03/2024 23:25:18 +0000</td>
330 <td class="comment">MiniUPnP client release source code</td>
331 <td><a href="miniupnpc-2.2.7.tar.gz.sig">Signature</a></td>
332</tr>
333<tr>
334 <td class="filename"><a href='download.php?file=miniupnpd-2.3.5.tar.gz'>miniupnpd-2.3.5.tar.gz</a></td>
335 <td class="filesize">261952</td>
336 <td class="filedate">02/03/2024 11:04:07 +0000</td>
337 <td class="comment">MiniUPnP daemon release source code</td>
338 <td><a href="miniupnpd-2.3.5.tar.gz.sig">Signature</a></td>
339</tr>
340<tr>
341 <td class="filename"><a href='download.php?file=miniupnpd-2.3.4.tar.gz'>miniupnpd-2.3.4.tar.gz</a></td>
342 <td class="filesize">260810</td>
343 <td class="filedate">04/01/2024 00:53:17 +0000</td>
344 <td class="comment">MiniUPnP daemon release source code</td>
345 <td><a href="miniupnpd-2.3.4.tar.gz.sig">Signature</a></td>
346</tr>
347<tr>
348 <td class="filename"><a href='download.php?file=miniupnpc-2.2.6.tar.gz'>miniupnpc-2.2.6.tar.gz</a></td>
349 <td class="filesize">103949</td>
350 <td class="filedate">04/01/2024 00:27:14 +0000</td>
351 <td class="comment">MiniUPnP client release source code</td>
352 <td><a href="miniupnpc-2.2.6.tar.gz.sig">Signature</a></td>
353</tr>
354<tr>
355 <td class="filename"><a href='download.php?file=miniupnpc-2.2.5.tar.gz'>miniupnpc-2.2.5.tar.gz</a></td>
356 <td class="filesize">103654</td>
357 <td class="filedate">11/06/2023 23:14:56 +0000</td>
358 <td class="comment">MiniUPnP client release source code</td>
359 <td><a href="miniupnpc-2.2.5.tar.gz.sig">Signature</a></td>
360</tr>
361<tr>
362 <td class="filename"><a href='download.php?file=libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
363 <td class="filesize">26506</td>
364 <td class="filedate">23/04/2023 11:02:09 +0000</td>
365 <td class="comment">libnatpmp source code</td>
366 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
367</tr>
368<tr>
369 <td class="filename"><a href='download.php?file=miniupnpd-2.3.3.tar.gz'>miniupnpd-2.3.3.tar.gz</a></td>
370 <td class="filesize">260079</td>
371 <td class="filedate">17/02/2023 03:07:46 +0000</td>
372 <td class="comment">MiniUPnP daemon release source code</td>
373 <td><a href="miniupnpd-2.3.3.tar.gz.sig">Signature</a></td>
374</tr>
375<tr>
376 <td class="filename"><a href='download.php?file=miniupnpd-2.3.2.tar.gz'>miniupnpd-2.3.2.tar.gz</a></td>
377 <td class="filesize">259686</td>
378 <td class="filedate">19/01/2023 23:18:08 +0000</td>
379 <td class="comment">MiniUPnP daemon release source code</td>
380 <td><a href="miniupnpd-2.3.2.tar.gz.sig">Signature</a></td>
381</tr>
382<tr>
383 <td class="filename"><a href='download.php?file=minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
384 <td class="filesize">39077</td>
385 <td class="filedate">22/10/2022 18:41:54 +0000</td>
386 <td class="comment">MiniSSDPd release source code</td>
387 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
388</tr>
389<tr>
390 <td class="filename"><a href='download.php?file=miniupnpc-2.2.4.tar.gz'>miniupnpc-2.2.4.tar.gz</a></td>
391 <td class="filesize">102932</td>
392 <td class="filedate">21/10/2022 21:01:01 +0000</td>
393 <td class="comment">MiniUPnP client release source code</td>
394 <td><a href="miniupnpc-2.2.4.tar.gz.sig">Signature</a></td>
395</tr>
396<tr>
397 <td class="filename"><a href='download.php?file=miniupnpd-2.3.1.tar.gz'>miniupnpd-2.3.1.tar.gz</a></td>
398 <td class="filesize">258050</td>
399 <td class="filedate">16/10/2022 05:58:44 +0000</td>
400 <td class="comment">MiniUPnP daemon release source code</td>
401 <td><a href="miniupnpd-2.3.1.tar.gz.sig">Signature</a></td>
402</tr>
403<tr>
404 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
405 <td class="filesize">69503</td>
406 <td class="filedate">15/05/2022 14:31:25 +0000</td>
407 <td class="comment">Windows executable</td>
408 <td></td>
409</tr>
410<tr>
411 <td class="filename"><a href='download.php?file=hexchat-2.16.patch'>hexchat-2.16.patch</a></td>
412 <td class="filesize">8147</td>
413 <td class="filedate">19/03/2022 16:52:05 +0000</td>
414 <td class="comment"></td>
415 <td></td>
416</tr>
417<tr>
418 <td class="filename"><a href='download.php?file=miniupnpd-2.3.0.tar.gz'>miniupnpd-2.3.0.tar.gz</a></td>
419 <td class="filesize">256069</td>
420 <td class="filedate">23/01/2022 00:23:32 +0000</td>
421 <td class="comment">MiniUPnP daemon release source code</td>
422 <td><a href="miniupnpd-2.3.0.tar.gz.sig">Signature</a></td>
423</tr>
424<tr>
425 <td class="filename"><a href='download.php?file=minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
426 <td class="filesize">38870</td>
427 <td class="filedate">04/11/2021 23:34:49 +0000</td>
428 <td class="comment">MiniSSDPd source code</td>
429 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
430</tr>
431<tr>
432 <td class="filename"><a href='download.php?file=miniupnpc-2.2.3.tar.gz'>miniupnpc-2.2.3.tar.gz</a></td>
433 <td class="filesize">101360</td>
434 <td class="filedate">28/09/2021 21:43:32 +0000</td>
435 <td class="comment">MiniUPnP client release source code</td>
436 <td><a href="miniupnpc-2.2.3.tar.gz.sig">Signature</a></td>
437</tr>
438<tr>
439 <td class="filename"><a href='download.php?file=miniupnpd-2.2.3.tar.gz'>miniupnpd-2.2.3.tar.gz</a></td>
440 <td class="filesize">254752</td>
441 <td class="filedate">21/08/2021 08:35:13 +0000</td>
442 <td class="comment">MiniUPnP daemon release source code</td>
443 <td><a href="miniupnpd-2.2.3.tar.gz.sig">Signature</a></td>
444</tr>
445<tr>
446 <td class="filename"><a href='download.php?file=miniupnpd-2.2.2.tar.gz'>miniupnpd-2.2.2.tar.gz</a></td>
447 <td class="filesize">250649</td>
448 <td class="filedate">13/05/2021 11:30:11 +0000</td>
449 <td class="comment">MiniUPnP daemon release source code</td>
450 <td><a href="miniupnpd-2.2.2.tar.gz.sig">Signature</a></td>
451</tr>
452<tr>
453 <td class="filename"><a href='download.php?file=miniupnpc-2.2.2.tar.gz'>miniupnpc-2.2.2.tar.gz</a></td>
454 <td class="filesize">100008</td>
455 <td class="filedate">02/03/2021 23:44:52 +0000</td>
456 <td class="comment">MiniUPnP client release source code</td>
457 <td><a href="miniupnpc-2.2.2.tar.gz.sig">Signature</a></td>
458</tr>
459<tr>
460 <td class="filename"><a href='download.php?file=miniupnpd-2.2.1.tar.gz'>miniupnpd-2.2.1.tar.gz</a></td>
461 <td class="filesize">250023</td>
462 <td class="filedate">20/12/2020 18:08:08 +0000</td>
463 <td class="comment">MiniUPnP daemon release source code</td>
464 <td><a href="miniupnpd-2.2.1.tar.gz.sig">Signature</a></td>
465</tr>
466<tr>
467 <td class="filename"><a href='download.php?file=miniupnpc-2.2.1.tar.gz'>miniupnpc-2.2.1.tar.gz</a></td>
468 <td class="filesize">99595</td>
469 <td class="filedate">20/12/2020 18:08:02 +0000</td>
470 <td class="comment">MiniUPnP client release source code</td>
471 <td><a href="miniupnpc-2.2.1.tar.gz.sig">Signature</a></td>
472</tr>
473<tr>
474 <td class="filename"><a href='download.php?file=miniupnpc-2.2.0.tar.gz'>miniupnpc-2.2.0.tar.gz</a></td>
475 <td class="filesize">98348</td>
476 <td class="filedate">09/11/2020 19:51:24 +0000</td>
477 <td class="comment">MiniUPnP client release source code</td>
478 <td><a href="miniupnpc-2.2.0.tar.gz.sig">Signature</a></td>
479</tr>
480<tr>
481 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0.tar.gz'>miniupnpd-2.2.0.tar.gz</a></td>
482 <td class="filesize">249858</td>
483 <td class="filedate">31/10/2020 09:20:59 +0000</td>
484 <td class="comment">MiniUPnP daemon release source code</td>
485 <td><a href="miniupnpd-2.2.0.tar.gz.sig">Signature</a></td>
486</tr>
487<tr>
488 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC3.tar.gz'>miniupnpd-2.2.0-RC3.tar.gz</a></td>
489 <td class="filesize">249879</td>
490 <td class="filedate">30/10/2020 21:49:49 +0000</td>
491 <td class="comment">MiniUPnP daemon release source code</td>
492 <td><a href="miniupnpd-2.2.0-RC3.tar.gz.sig">Signature</a></td>
493</tr>
494<tr>
495 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
496 <td class="filesize">97682</td>
497 <td class="filedate">15/10/2020 22:31:09 +0000</td>
498 <td class="comment">MiniUPnP client source code</td>
499 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
500</tr>
501<tr>
502 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC2.tar.gz'>miniupnpd-2.2.0-RC2.tar.gz</a></td>
503 <td class="filesize">248756</td>
504 <td class="filedate">28/09/2020 21:57:22 +0000</td>
505 <td class="comment">MiniUPnP daemon release source code</td>
506 <td><a href="miniupnpd-2.2.0-RC2.tar.gz.sig">Signature</a></td>
507</tr>
508<tr>
509 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20200928.tar.gz'>miniupnpc-2.1.20200928.tar.gz</a></td>
510 <td class="filesize">96508</td>
511 <td class="filedate">28/09/2020 21:56:09 +0000</td>
512 <td class="comment">MiniUPnP client source code</td>
513 <td><a href="miniupnpc-2.1.20200928.tar.gz.sig">Signature</a></td>
514</tr>
515<tr>
516 <td class="filename"><a href='download.php?file=minissdpd-1.5.20200928.tar.gz'>minissdpd-1.5.20200928.tar.gz</a></td>
517 <td class="filesize">37860</td>
518 <td class="filedate">28/09/2020 21:55:40 +0000</td>
519 <td class="comment">MiniSSDPd source code</td>
520 <td><a href="minissdpd-1.5.20200928.tar.gz.sig">Signature</a></td>
521</tr>
522<tr>
523 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC1.tar.gz'>miniupnpd-2.2.0-RC1.tar.gz</a></td>
524 <td class="filesize">247772</td>
525 <td class="filedate">06/06/2020 18:34:50 +0000</td>
526 <td class="comment">MiniUPnP daemon release source code</td>
527 <td><a href="miniupnpd-2.2.0-RC1.tar.gz.sig">Signature</a></td>
528</tr>
529<tr>
530 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC0.tar.gz'>miniupnpd-2.2.0-RC0.tar.gz</a></td>
531 <td class="filesize">245507</td>
532 <td class="filedate">16/05/2020 18:03:17 +0000</td>
533 <td class="comment">MiniUPnP daemon release source code</td>
534 <td><a href="miniupnpd-2.2.0-RC0.tar.gz.sig">Signature</a></td>
535</tr>
536<tr>
537 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
538 <td class="filesize">245426</td>
539 <td class="filedate">10/05/2020 18:23:13 +0000</td>
540 <td class="comment">MiniUPnP daemon source code</td>
541 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
542</tr>
543<tr>
544 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200329.tar.gz'>miniupnpd-2.1.20200329.tar.gz</a></td>
545 <td class="filesize">243725</td>
546 <td class="filedate">29/03/2020 09:11:02 +0000</td>
547 <td class="comment">MiniUPnP daemon source code</td>
548 <td><a href="miniupnpd-2.1.20200329.tar.gz.sig">Signature</a></td>
549</tr>
550<tr>
551 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20191224.tar.gz'>miniupnpc-2.1.20191224.tar.gz</a></td>
552 <td class="filesize">94740</td>
553 <td class="filedate">23/12/2019 23:37:32 +0000</td>
554 <td class="comment">MiniUPnP client source code</td>
555 <td><a href="miniupnpc-2.1.20191224.tar.gz.sig">Signature</a></td>
556</tr>
557<tr>
558 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191006.tar.gz'>miniupnpd-2.1.20191006.tar.gz</a></td>
559 <td class="filesize">243255</td>
560 <td class="filedate">06/10/2019 21:02:31 +0000</td>
561 <td class="comment">MiniUPnP daemon source code</td>
562 <td><a href="miniupnpd-2.1.20191006.tar.gz.sig">Signature</a></td>
563</tr>
564<tr>
565 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191005.tar.gz'>miniupnpd-2.1.20191005.tar.gz</a></td>
566 <td class="filesize">244100</td>
567 <td class="filedate">05/10/2019 21:33:08 +0000</td>
568 <td class="comment">MiniUPnP daemon source code</td>
569 <td><a href="miniupnpd-2.1.20191005.tar.gz.sig">Signature</a></td>
570</tr>
571<tr>
572 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191003.tar.gz'>miniupnpd-2.1.20191003.tar.gz</a></td>
573 <td class="filesize">243287</td>
574 <td class="filedate">02/10/2019 22:23:51 +0000</td>
575 <td class="comment">MiniUPnP daemon source code</td>
576 <td><a href="miniupnpd-2.1.20191003.tar.gz.sig">Signature</a></td>
577</tr>
578<tr>
579 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190924.tar.gz'>miniupnpd-2.1.20190924.tar.gz</a></td>
580 <td class="filesize">241008</td>
581 <td class="filedate">24/09/2019 11:58:15 +0000</td>
582 <td class="comment">MiniUPnP daemon source code</td>
583 <td><a href="miniupnpd-2.1.20190924.tar.gz.sig">Signature</a></td>
584</tr>
585<tr>
586 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190902.tar.gz'>miniupnpd-2.1.20190902.tar.gz</a></td>
587 <td class="filesize">240742</td>
588 <td class="filedate">01/09/2019 23:03:03 +0000</td>
589 <td class="comment">MiniUPnP daemon source code</td>
590 <td><a href="miniupnpd-2.1.20190902.tar.gz.sig">Signature</a></td>
591</tr>
592<tr>
593 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190824.tar.gz'>miniupnpd-2.1.20190824.tar.gz</a></td>
594 <td class="filesize">240490</td>
595 <td class="filedate">24/08/2019 09:21:52 +0000</td>
596 <td class="comment">MiniUPnP daemon source code</td>
597 <td><a href="miniupnpd-2.1.20190824.tar.gz.sig">Signature</a></td>
598</tr>
599<tr>
600 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190824.tar.gz'>minissdpd-1.5.20190824.tar.gz</a></td>
601 <td class="filesize">37300</td>
602 <td class="filedate">24/08/2019 09:17:32 +0000</td>
603 <td class="comment">MiniSSDPd source code</td>
604 <td><a href="minissdpd-1.5.20190824.tar.gz.sig">Signature</a></td>
605</tr>
606<tr>
607 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190824.tar.gz'>miniupnpc-2.1.20190824.tar.gz</a></td>
608 <td class="filesize">94564</td>
609 <td class="filedate">24/08/2019 09:12:50 +0000</td>
610 <td class="comment">MiniUPnP client source code</td>
611 <td><a href="miniupnpc-2.1.20190824.tar.gz.sig">Signature</a></td>
612</tr>
613<tr>
614 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190630.tar.gz'>miniupnpd-2.1.20190630.tar.gz</a></td>
615 <td class="filesize">240466</td>
616 <td class="filedate">30/06/2019 20:27:38 +0000</td>
617 <td class="comment">MiniUPnP daemon source code</td>
618 <td><a href="miniupnpd-2.1.20190630.tar.gz.sig">Signature</a></td>
619</tr>
620<tr>
621 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190625.tar.gz'>miniupnpd-2.1.20190625.tar.gz</a></td>
622 <td class="filesize">240120</td>
623 <td class="filedate">25/06/2019 21:33:49 +0000</td>
624 <td class="comment">MiniUPnP daemon source code</td>
625 <td><a href="miniupnpd-2.1.20190625.tar.gz.sig">Signature</a></td>
626</tr>
627<tr>
628 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190625.tar.gz'>miniupnpc-2.1.20190625.tar.gz</a></td>
629 <td class="filesize">94461</td>
630 <td class="filedate">25/06/2019 21:33:26 +0000</td>
631 <td class="comment">MiniUPnP client source code</td>
632 <td><a href="miniupnpc-2.1.20190625.tar.gz.sig">Signature</a></td>
633</tr>
634<tr>
635 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190502.tar.gz'>miniupnpd-2.1.20190502.tar.gz</a></td>
636 <td class="filesize">236052</td>
637 <td class="filedate">02/05/2019 17:22:23 +0000</td>
638 <td class="comment">MiniUPnP daemon source code</td>
639 <td><a href="miniupnpd-2.1.20190502.tar.gz.sig">Signature</a></td>
640</tr>
641<tr>
642 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190408.tar.gz'>miniupnpc-2.1.20190408.tar.gz</a></td>
643 <td class="filesize">94216</td>
644 <td class="filedate">08/04/2019 12:50:21 +0000</td>
645 <td class="comment">MiniUPnP client source code</td>
646 <td><a href="miniupnpc-2.1.20190408.tar.gz.sig">Signature</a></td>
647</tr>
648<tr>
649 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190408.tar.gz'>miniupnpd-2.1.20190408.tar.gz</a></td>
650 <td class="filesize">235989</td>
651 <td class="filedate">08/04/2019 12:50:01 +0000</td>
652 <td class="comment">MiniUPnP daemon source code</td>
653 <td><a href="miniupnpd-2.1.20190408.tar.gz.sig">Signature</a></td>
654</tr>
655<tr>
656 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190403.tar.gz'>miniupnpc-2.1.20190403.tar.gz</a></td>
657 <td class="filesize">94204</td>
658 <td class="filedate">03/04/2019 15:41:36 +0000</td>
659 <td class="comment">MiniUPnP client source code</td>
660 <td><a href="miniupnpc-2.1.20190403.tar.gz.sig">Signature</a></td>
661</tr>
662<tr>
663 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190403.tar.gz'>miniupnpd-2.1.20190403.tar.gz</a></td>
664 <td class="filesize">235909</td>
665 <td class="filedate">03/04/2019 15:41:17 +0000</td>
666 <td class="comment">MiniUPnP daemon source code</td>
667 <td><a href="miniupnpd-2.1.20190403.tar.gz.sig">Signature</a></td>
668</tr>
669<tr>
670 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190210.tar.gz'>minissdpd-1.5.20190210.tar.gz</a></td>
671 <td class="filesize">37227</td>
672 <td class="filedate">10/02/2019 15:21:49 +0000</td>
673 <td class="comment">MiniSSDPd source code</td>
674 <td><a href="minissdpd-1.5.20190210.tar.gz.sig">Signature</a></td>
675</tr>
676<tr>
677 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190210.tar.gz'>miniupnpc-2.1.20190210.tar.gz</a></td>
678 <td class="filesize">94125</td>
679 <td class="filedate">10/02/2019 12:46:09 +0000</td>
680 <td class="comment">MiniUPnP client source code</td>
681 <td><a href="miniupnpc-2.1.20190210.tar.gz.sig">Signature</a></td>
682</tr>
683<tr>
684 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190210.tar.gz'>miniupnpd-2.1.20190210.tar.gz</a></td>
685 <td class="filesize">235093</td>
686 <td class="filedate">10/02/2019 11:20:11 +0000</td>
687 <td class="comment">MiniUPnP daemon source code</td>
688 <td><a href="miniupnpd-2.1.20190210.tar.gz.sig">Signature</a></td>
689</tr>
690<tr>
691 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20180706.tar.gz'>miniupnpd-2.1.20180706.tar.gz</a></td>
692 <td class="filesize">233675</td>
693 <td class="filedate">06/07/2018 12:44:24 +0000</td>
694 <td class="comment">MiniUPnP daemon source code</td>
695 <td><a href="miniupnpd-2.1.20180706.tar.gz.sig">Signature</a></td>
696</tr>
697<tr>
698 <td class="filename"><a href='download.php?file=miniupnpd-2.1.tar.gz'>miniupnpd-2.1.tar.gz</a></td>
699 <td class="filesize">225458</td>
700 <td class="filedate">08/05/2018 21:50:32 +0000</td>
701 <td class="comment">MiniUPnP daemon release source code</td>
702 <td><a href="miniupnpd-2.1.tar.gz.sig">Signature</a></td>
703</tr>
704<tr>
705 <td class="filename"><a href='download.php?file=miniupnpc-2.1.tar.gz'>miniupnpc-2.1.tar.gz</a></td>
706 <td class="filesize">91914</td>
707 <td class="filedate">07/05/2018 11:10:59 +0000</td>
708 <td class="comment">MiniUPnP client release source code</td>
709 <td><a href="miniupnpc-2.1.tar.gz.sig">Signature</a></td>
710</tr>
711<tr>
712 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180503.tar.gz'>miniupnpd-2.0.20180503.tar.gz</a></td>
713 <td class="filesize">225454</td>
714 <td class="filedate">03/05/2018 08:33:10 +0000</td>
715 <td class="comment">MiniUPnP daemon source code</td>
716 <td></td>
717</tr>
718<tr>
719 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180503.tar.gz'>miniupnpc-2.0.20180503.tar.gz</a></td>
720 <td class="filesize">88207</td>
721 <td class="filedate">03/05/2018 08:31:22 +0000</td>
722 <td class="comment">MiniUPnP client source code</td>
723 <td></td>
724</tr>
725<tr>
726 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180422.tar.gz'>miniupnpd-2.0.20180422.tar.gz</a></td>
727 <td class="filesize">224942</td>
728 <td class="filedate">22/04/2018 19:48:54 +0000</td>
729 <td class="comment">MiniUPnP daemon source code</td>
730 <td></td>
731</tr>
732<tr>
733 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180412.tar.gz'>miniupnpd-2.0.20180412.tar.gz</a></td>
734 <td class="filesize">224831</td>
735 <td class="filedate">12/04/2018 08:16:25 +0000</td>
736 <td class="comment">MiniUPnP daemon source code</td>
737 <td></td>
738</tr>
739<tr>
740 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180410.tar.gz'>miniupnpd-2.0.20180410.tar.gz</a></td>
741 <td class="filesize">224736</td>
742 <td class="filedate">10/04/2018 07:58:28 +0000</td>
743 <td class="comment">MiniUPnP daemon source code</td>
744 <td></td>
745</tr>
746<tr>
747 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180410.tar.gz'>miniupnpc-2.0.20180410.tar.gz</a></td>
748 <td class="filesize">87363</td>
749 <td class="filedate">10/04/2018 07:52:55 +0000</td>
750 <td class="comment">MiniUPnP client source code</td>
751 <td></td>
752</tr>
753<tr>
754 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180406.tar.gz'>miniupnpc-2.0.20180406.tar.gz</a></td>
755 <td class="filesize">87374</td>
756 <td class="filedate">06/04/2018 10:55:21 +0000</td>
757 <td class="comment">MiniUPnP client source code</td>
758 <td></td>
759</tr>
760<tr>
761 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180223.tar.gz'>minissdpd-1.5.20180223.tar.gz</a></td>
762 <td class="filesize">36179</td>
763 <td class="filedate">23/02/2018 14:24:07 +0000</td>
764 <td class="comment">MiniSSDPd source code</td>
765 <td></td>
766</tr>
767<tr>
768 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180222.tar.gz'>miniupnpc-2.0.20180222.tar.gz</a></td>
769 <td class="filesize">87018</td>
770 <td class="filedate">22/02/2018 15:09:24 +0000</td>
771 <td class="comment">MiniUPnP client source code</td>
772 <td></td>
773</tr>
774<tr>
775 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180222.tar.gz'>miniupnpd-2.0.20180222.tar.gz</a></td>
776 <td class="filesize">223697</td>
777 <td class="filedate">22/02/2018 15:09:14 +0000</td>
778 <td class="comment">MiniUPnP daemon source code</td>
779 <td></td>
780</tr>
781<tr>
782 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180203.tar.gz'>miniupnpd-2.0.20180203.tar.gz</a></td>
783 <td class="filesize">223084</td>
784 <td class="filedate">03/02/2018 22:34:46 +0000</td>
785 <td class="comment">MiniUPnP daemon source code</td>
786 <td></td>
787</tr>
788<tr>
789 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180203.tar.gz'>miniupnpc-2.0.20180203.tar.gz</a></td>
790 <td class="filesize">86772</td>
791 <td class="filedate">03/02/2018 22:34:32 +0000</td>
792 <td class="comment">MiniUPnP client source code</td>
793 <td></td>
794</tr>
795<tr>
796 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180203.tar.gz'>minissdpd-1.5.20180203.tar.gz</a></td>
797 <td class="filesize">35848</td>
798 <td class="filedate">03/02/2018 22:33:08 +0000</td>
799 <td class="comment">MiniSSDPd source code</td>
800 <td></td>
801</tr>
802<tr>
803 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171212.tar.gz'>miniupnpc-2.0.20171212.tar.gz</a></td>
804 <td class="filesize">86607</td>
805 <td class="filedate">12/12/2017 12:03:38 +0000</td>
806 <td class="comment">MiniUPnP client source code</td>
807 <td></td>
808</tr>
809<tr>
810 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20171212.tar.gz'>miniupnpd-2.0.20171212.tar.gz</a></td>
811 <td class="filesize">222617</td>
812 <td class="filedate">12/12/2017 12:03:32 +0000</td>
813 <td class="comment">MiniUPnP daemon source code</td>
814 <td></td>
815</tr>
816<tr>
817 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171102.tar.gz'>miniupnpc-2.0.20171102.tar.gz</a></td>
818 <td class="filesize">86363</td>
819 <td class="filedate">02/11/2017 17:58:34 +0000</td>
820 <td class="comment">MiniUPnP client source code</td>
821 <td></td>
822</tr>
823<tr>
824 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170509.tar.gz'>miniupnpc-2.0.20170509.tar.gz</a></td>
825 <td class="filesize">86055</td>
826 <td class="filedate">09/05/2017 10:14:56 +0000</td>
827 <td class="comment">MiniUPnP client source code</td>
828 <td></td>
829</tr>
830<tr>
831 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170421.tar.gz'>miniupnpc-2.0.20170421.tar.gz</a></td>
832 <td class="filesize">85984</td>
833 <td class="filedate">21/04/2017 12:02:26 +0000</td>
834 <td class="comment">MiniUPnP client source code</td>
835 <td></td>
836</tr>
837<tr>
838 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20170421.tar.gz'>miniupnpd-2.0.20170421.tar.gz</a></td>
839 <td class="filesize">219191</td>
840 <td class="filedate">21/04/2017 12:02:06 +0000</td>
841 <td class="comment">MiniUPnP daemon source code</td>
842 <td></td>
843</tr>
844<tr>
845 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20161216.tar.gz'>miniupnpd-2.0.20161216.tar.gz</a></td>
846 <td class="filesize">218119</td>
847 <td class="filedate">16/12/2016 09:34:08 +0000</td>
848 <td class="comment">MiniUPnP daemon source code</td>
849 <td></td>
850</tr>
851<tr>
852 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20161216.tar.gz'>miniupnpc-2.0.20161216.tar.gz</a></td>
853 <td class="filesize">85780</td>
854 <td class="filedate">16/12/2016 09:34:03 +0000</td>
855 <td class="comment">MiniUPnP client source code</td>
856 <td></td>
857</tr>
858<tr>
859 <td class="filename"><a href='download.php?file=minissdpd-1.5.20161216.tar.gz'>minissdpd-1.5.20161216.tar.gz</a></td>
860 <td class="filesize">35078</td>
861 <td class="filedate">16/12/2016 09:33:59 +0000</td>
862 <td class="comment">MiniSSDPd source code</td>
863 <td></td>
864</tr>
865<tr>
866 <td class="filename"><a href='download.php?file=miniupnpd-2.0.tar.gz'>miniupnpd-2.0.tar.gz</a></td>
867 <td class="filesize">217802</td>
868 <td class="filedate">19/04/2016 21:12:01 +0000</td>
869 <td class="comment">MiniUPnP daemon release source code</td>
870 <td><a href="miniupnpd-2.0.tar.gz.sig">Signature</a></td>
871</tr>
872<tr>
873 <td class="filename"><a href='download.php?file=miniupnpc-2.0.tar.gz'>miniupnpc-2.0.tar.gz</a></td>
874 <td class="filesize">85287</td>
875 <td class="filedate">19/04/2016 21:07:52 +0000</td>
876 <td class="comment">MiniUPnP client release source code</td>
877 <td></td>
878</tr>
879<tr>
880 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160301.tar.gz'>minissdpd-1.5.20160301.tar.gz</a></td>
881 <td class="filesize">34827</td>
882 <td class="filedate">01/03/2016 18:08:23 +0000</td>
883 <td class="comment">MiniSSDPd source code</td>
884 <td></td>
885</tr>
886<tr>
887 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160222.tar.gz'>miniupnpd-1.9.20160222.tar.gz</a></td>
888 <td class="filesize">217541</td>
889 <td class="filedate">22/02/2016 10:21:40 +0000</td>
890 <td class="comment">MiniUPnP daemon source code</td>
891 <td></td>
892</tr>
893<tr>
894 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160216.tar.gz'>miniupnpd-1.9.20160216.tar.gz</a></td>
895 <td class="filesize">217007</td>
896 <td class="filedate">16/02/2016 12:41:44 +0000</td>
897 <td class="comment">MiniUPnP daemon source code</td>
898 <td></td>
899</tr>
900<tr>
901 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160212.tar.gz'>miniupnpd-1.9.20160212.tar.gz</a></td>
902 <td class="filesize">215866</td>
903 <td class="filedate">12/02/2016 15:22:04 +0000</td>
904 <td class="comment">MiniUPnP daemon source code</td>
905 <td></td>
906</tr>
907<tr>
908 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160209.tar.gz'>miniupnpd-1.9.20160209.tar.gz</a></td>
909 <td class="filesize">213416</td>
910 <td class="filedate">09/02/2016 09:47:03 +0000</td>
911 <td class="comment">MiniUPnP daemon source code</td>
912 <td></td>
913</tr>
914<tr>
915 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20160209.tar.gz'>miniupnpc-1.9.20160209.tar.gz</a></td>
916 <td class="filesize">85268</td>
917 <td class="filedate">09/02/2016 09:44:50 +0000</td>
918 <td class="comment">MiniUPnP client source code</td>
919 <td></td>
920</tr>
921<tr>
922 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160119.tar.gz'>minissdpd-1.5.20160119.tar.gz</a></td>
923 <td class="filesize">34711</td>
924 <td class="filedate">19/01/2016 13:39:51 +0000</td>
925 <td class="comment">MiniSSDPd source code</td>
926 <td></td>
927</tr>
928<tr>
929 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160113.tar.gz'>miniupnpd-1.9.20160113.tar.gz</a></td>
930 <td class="filesize">211437</td>
931 <td class="filedate">13/01/2016 16:03:14 +0000</td>
932 <td class="comment">MiniUPnP daemon source code</td>
933 <td></td>
934</tr>
935<tr>
936 <td class="filename"><a href='download.php?file=minissdpd-1.5.tar.gz'>minissdpd-1.5.tar.gz</a></td>
937 <td class="filesize">34404</td>
938 <td class="filedate">13/01/2016 15:26:53 +0000</td>
939 <td class="comment">MiniSSDPd release source code</td>
940 <td></td>
941</tr>
942<tr>
943 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151212.tar.gz'>miniupnpd-1.9.20151212.tar.gz</a></td>
944 <td class="filesize">210912</td>
945 <td class="filedate">12/12/2015 10:06:07 +0000</td>
946 <td class="comment">MiniUPnP daemon source code</td>
947 <td></td>
948</tr>
949<tr>
950 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151118.tar.gz'>miniupnpd-1.9.20151118.tar.gz</a></td>
951 <td class="filesize">210322</td>
952 <td class="filedate">18/11/2015 08:59:46 +0000</td>
953 <td class="comment">MiniUPnP daemon source code</td>
954 <td></td>
955</tr>
956<tr>
957 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151026.tar.gz'>miniupnpc-1.9.20151026.tar.gz</a></td>
958 <td class="filesize">84208</td>
959 <td class="filedate">26/10/2015 17:07:34 +0000</td>
960 <td class="comment">MiniUPnP client source code</td>
961 <td></td>
962</tr>
963<tr>
964 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151008.tar.gz'>miniupnpc-1.9.20151008.tar.gz</a></td>
965 <td class="filesize">83538</td>
966 <td class="filedate">08/10/2015 16:22:28 +0000</td>
967 <td class="comment">MiniUPnP client source code</td>
968 <td></td>
969</tr>
970<tr>
971 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150922.tar.gz'>miniupnpd-1.9.20150922.tar.gz</a></td>
972 <td class="filesize">208700</td>
973 <td class="filedate">22/09/2015 10:21:50 +0000</td>
974 <td class="comment">MiniUPnP daemon source code</td>
975 <td></td>
976</tr>
977<tr>
978 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150918.zip'>upnpc-exe-win32-20150918.zip</a></td>
979 <td class="filesize">100004</td>
980 <td class="filedate">18/09/2015 12:50:51 +0000</td>
981 <td class="comment">Windows executable</td>
982 <td></td>
983</tr>
984<tr>
985 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150917.tar.gz'>miniupnpc-1.9.20150917.tar.gz</a></td>
986 <td class="filesize">82609</td>
987 <td class="filedate">17/09/2015 14:09:14 +0000</td>
988 <td class="comment">MiniUPnP client source code</td>
989 <td></td>
990</tr>
991<tr>
992 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150824.zip'>upnpc-exe-win32-20150824.zip</a></td>
993 <td class="filesize">99520</td>
994 <td class="filedate">24/08/2015 15:25:18 +0000</td>
995 <td class="comment">Windows executable</td>
996 <td></td>
997</tr>
998<tr>
999 <td class="filename"><a href='download.php?file=minissdpd-1.4.tar.gz'>minissdpd-1.4.tar.gz</a></td>
1000 <td class="filesize">32017</td>
1001 <td class="filedate">06/08/2015 13:38:37 +0000</td>
1002 <td class="comment">MiniSSDPd release source code</td>
1003 <td></td>
1004</tr>
1005<tr>
1006 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150730.tar.gz'>miniupnpc-1.9.20150730.tar.gz</a></td>
1007 <td class="filesize">81431</td>
1008 <td class="filedate">29/07/2015 22:10:00 +0000</td>
1009 <td class="comment">MiniUPnP client source code</td>
1010 <td></td>
1011</tr>
1012<tr>
1013 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150721.tar.gz'>miniupnpd-1.9.20150721.tar.gz</a></td>
1014 <td class="filesize">207562</td>
1015 <td class="filedate">21/07/2015 13:35:51 +0000</td>
1016 <td class="comment">MiniUPnP daemon source code</td>
1017 <td></td>
1018</tr>
1019<tr>
1020 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150721.tar.gz'>miniupnpc-1.9.20150721.tar.gz</a></td>
1021 <td class="filesize">80521</td>
1022 <td class="filedate">21/07/2015 13:27:00 +0000</td>
1023 <td class="comment">MiniUPnP client source code</td>
1024 <td></td>
1025</tr>
1026<tr>
1027 <td class="filename"><a href='download.php?file=libnatpmp-20150609.tar.gz'>libnatpmp-20150609.tar.gz</a></td>
1028 <td class="filesize">24392</td>
1029 <td class="filedate">09/06/2015 15:40:28 +0000</td>
1030 <td class="comment">libnatpmp source code</td>
1031 <td></td>
1032</tr>
1033<tr>
1034 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150609.tar.gz'>miniupnpc-1.9.20150609.tar.gz</a></td>
1035 <td class="filesize">79311</td>
1036 <td class="filedate">09/06/2015 15:39:48 +0000</td>
1037 <td class="comment">MiniUPnP client source code</td>
1038 <td></td>
1039</tr>
1040<tr>
1041 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150609.tar.gz'>miniupnpd-1.9.20150609.tar.gz</a></td>
1042 <td class="filesize">207088</td>
1043 <td class="filedate">09/06/2015 15:39:36 +0000</td>
1044 <td class="comment">MiniUPnP daemon source code</td>
1045 <td></td>
1046</tr>
1047<tr>
1048 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150527.tar.gz'>minissdpd-1.3.20150527.tar.gz</a></td>
1049 <td class="filesize">31025</td>
1050 <td class="filedate">27/05/2015 09:17:15 +0000</td>
1051 <td class="comment">MiniSSDPd source code</td>
1052 <td></td>
1053</tr>
1054<tr>
1055 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150522.tar.gz'>miniupnpc-1.9.20150522.tar.gz</a></td>
1056 <td class="filesize">79080</td>
1057 <td class="filedate">22/05/2015 11:02:27 +0000</td>
1058 <td class="comment">MiniUPnP client source code</td>
1059 <td></td>
1060</tr>
1061<tr>
1062 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150522.tar.gz'>minissdpd-1.3.20150522.tar.gz</a></td>
1063 <td class="filesize">30334</td>
1064 <td class="filedate">22/05/2015 11:02:04 +0000</td>
1065 <td class="comment">MiniSSDPd source code</td>
1066 <td></td>
1067</tr>
1068<tr>
1069 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150430.tar.gz'>miniupnpd-1.9.20150430.tar.gz</a></td>
1070 <td class="filesize">205930</td>
1071 <td class="filedate">30/04/2015 09:09:27 +0000</td>
1072 <td class="comment">MiniUPnP daemon source code</td>
1073 <td></td>
1074</tr>
1075<tr>
1076 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150430.tar.gz'>miniupnpc-1.9.20150430.tar.gz</a></td>
1077 <td class="filesize">78459</td>
1078 <td class="filedate">30/04/2015 08:39:31 +0000</td>
1079 <td class="comment">MiniUPnP client source code</td>
1080 <td></td>
1081</tr>
1082<tr>
1083 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150427.tar.gz'>miniupnpc-1.9.20150427.tar.gz</a></td>
1084 <td class="filesize">78424</td>
1085 <td class="filedate">27/04/2015 16:08:42 +0000</td>
1086 <td class="comment">MiniUPnP client source code</td>
1087 <td></td>
1088</tr>
1089<tr>
1090 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150427.tar.gz'>miniupnpd-1.9.20150427.tar.gz</a></td>
1091 <td class="filesize">191157</td>
1092 <td class="filedate">27/04/2015 16:08:27 +0000</td>
1093 <td class="comment">MiniUPnP daemon source code</td>
1094 <td></td>
1095</tr>
1096<tr>
1097 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150307.tar.gz'>miniupnpd-1.9.20150307.tar.gz</a></td>
1098 <td class="filesize">190913</td>
1099 <td class="filedate">07/03/2015 16:11:51 +0000</td>
1100 <td class="comment">MiniUPnP daemon source code</td>
1101 <td></td>
1102</tr>
1103<tr>
1104 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150206.tar.gz'>miniupnpc-1.9.20150206.tar.gz</a></td>
1105 <td class="filesize">76864</td>
1106 <td class="filedate">06/02/2015 14:38:00 +0000</td>
1107 <td class="comment">MiniUPnP client source code</td>
1108 <td></td>
1109</tr>
1110<tr>
1111 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141209.tar.gz'>miniupnpd-1.9.20141209.tar.gz</a></td>
1112 <td class="filesize">193183</td>
1113 <td class="filedate">09/12/2014 09:58:34 +0000</td>
1114 <td class="comment">MiniUPnP daemon source code</td>
1115 <td></td>
1116</tr>
1117<tr>
1118 <td class="filename"><a href='download.php?file=minissdpd-1.3.tar.gz'>minissdpd-1.3.tar.gz</a></td>
1119 <td class="filesize">30326</td>
1120 <td class="filedate">09/12/2014 09:57:30 +0000</td>
1121 <td class="comment">MiniSSDPd release source code</td>
1122 <td></td>
1123</tr>
1124<tr>
1125 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141204.tar.gz'>minissdpd-1.2.20141204.tar.gz</a></td>
1126 <td class="filesize">26978</td>
1127 <td class="filedate">04/12/2014 10:55:26 +0000</td>
1128 <td class="comment">MiniSSDPd source code</td>
1129 <td></td>
1130</tr>
1131<tr>
1132 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141204.tar.gz'>miniupnpd-1.9.20141204.tar.gz</a></td>
1133 <td class="filesize">192597</td>
1134 <td class="filedate">04/12/2014 10:55:03 +0000</td>
1135 <td class="comment">MiniUPnP daemon source code</td>
1136 <td></td>
1137</tr>
1138<tr>
1139 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141128.tar.gz'>minissdpd-1.2.20141128.tar.gz</a></td>
1140 <td class="filesize">26795</td>
1141 <td class="filedate">28/11/2014 16:33:10 +0000</td>
1142 <td class="comment">MiniSSDPd source code</td>
1143 <td></td>
1144</tr>
1145<tr>
1146 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141128.tar.gz'>miniupnpd-1.9.20141128.tar.gz</a></td>
1147 <td class="filesize">192558</td>
1148 <td class="filedate">28/11/2014 13:31:36 +0000</td>
1149 <td class="comment">MiniUPnP daemon source code</td>
1150 <td></td>
1151</tr>
1152<tr>
1153 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141128.tar.gz'>miniupnpc-1.9.20141128.tar.gz</a></td>
1154 <td class="filesize">76541</td>
1155 <td class="filedate">28/11/2014 13:31:15 +0000</td>
1156 <td class="comment">MiniUPnP client source code</td>
1157 <td></td>
1158</tr>
1159<tr>
1160 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141117.tar.gz'>miniupnpc-1.9.20141117.tar.gz</a></td>
1161 <td class="filesize">73865</td>
1162 <td class="filedate">17/11/2014 09:51:36 +0000</td>
1163 <td class="comment">MiniUPnP client source code</td>
1164 <td></td>
1165</tr>
1166<tr>
1167 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141113.tar.gz'>miniupnpc-1.9.20141113.tar.gz</a></td>
1168 <td class="filesize">72857</td>
1169 <td class="filedate">13/11/2014 10:36:44 +0000</td>
1170 <td class="comment">MiniUPnP client source code</td>
1171 <td></td>
1172</tr>
1173<tr>
1174 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141108.tar.gz'>minissdpd-1.2.20141108.tar.gz</a></td>
1175 <td class="filesize">22001</td>
1176 <td class="filedate">08/11/2014 13:55:41 +0000</td>
1177 <td class="comment">MiniSSDPd source code</td>
1178 <td></td>
1179</tr>
1180<tr>
1181 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141108.tar.gz'>miniupnpc-1.9.20141108.tar.gz</a></td>
1182 <td class="filesize">72781</td>
1183 <td class="filedate">08/11/2014 13:53:48 +0000</td>
1184 <td class="comment">MiniUPnP client source code</td>
1185 <td></td>
1186</tr>
1187<tr>
1188 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141108.tar.gz'>miniupnpd-1.9.20141108.tar.gz</a></td>
1189 <td class="filesize">192413</td>
1190 <td class="filedate">08/11/2014 13:53:38 +0000</td>
1191 <td class="comment">MiniUPnP daemon source code</td>
1192 <td></td>
1193</tr>
1194<tr>
1195 <td class="filename"><a href='download.php?file=miniupnpd-1.9.tar.gz'>miniupnpd-1.9.tar.gz</a></td>
1196 <td class="filesize">192183</td>
1197 <td class="filedate">27/10/2014 16:45:34 +0000</td>
1198 <td class="comment">MiniUPnP daemon release source code</td>
1199 <td></td>
1200</tr>
1201<tr>
1202 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141027.tar.gz'>miniupnpc-1.9.20141027.tar.gz</a></td>
1203 <td class="filesize">76763</td>
1204 <td class="filedate">27/10/2014 16:45:25 +0000</td>
1205 <td class="comment">MiniUPnP client source code</td>
1206 <td></td>
1207</tr>
1208<tr>
1209 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141022.tar.gz'>miniupnpd-1.8.20141022.tar.gz</a></td>
1210 <td class="filesize">191630</td>
1211 <td class="filedate">22/10/2014 09:17:41 +0000</td>
1212 <td class="comment">MiniUPnP daemon source code</td>
1213 <td></td>
1214</tr>
1215<tr>
1216 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141021.tar.gz'>miniupnpd-1.8.20141021.tar.gz</a></td>
1217 <td class="filesize">191270</td>
1218 <td class="filedate">21/10/2014 14:18:58 +0000</td>
1219 <td class="comment">MiniUPnP daemon source code</td>
1220 <td></td>
1221</tr>
1222<tr>
1223 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140911.tar.gz'>miniupnpc-1.9.20140911.tar.gz</a></td>
1224 <td class="filesize">76855</td>
1225 <td class="filedate">11/09/2014 14:15:23 +0000</td>
1226 <td class="comment">MiniUPnP client source code</td>
1227 <td></td>
1228</tr>
1229<tr>
1230 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140906.tar.gz'>minissdpd-1.2.20140906.tar.gz</a></td>
1231 <td class="filesize">21956</td>
1232 <td class="filedate">06/09/2014 08:34:10 +0000</td>
1233 <td class="comment">MiniSSDPd source code</td>
1234 <td></td>
1235</tr>
1236<tr>
1237 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140906.tar.gz'>miniupnpd-1.8.20140906.tar.gz</a></td>
1238 <td class="filesize">191183</td>
1239 <td class="filedate">06/09/2014 08:34:02 +0000</td>
1240 <td class="comment">MiniUPnP daemon source code</td>
1241 <td></td>
1242</tr>
1243<tr>
1244 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140906.tar.gz'>miniupnpc-1.9.20140906.tar.gz</a></td>
1245 <td class="filesize">76791</td>
1246 <td class="filedate">06/09/2014 08:33:45 +0000</td>
1247 <td class="comment">MiniUPnP client source code</td>
1248 <td></td>
1249</tr>
1250<tr>
1251 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140701.tar.gz'>miniupnpc-1.9.20140701.tar.gz</a></td>
1252 <td class="filesize">76735</td>
1253 <td class="filedate">01/07/2014 13:06:51 +0000</td>
1254 <td class="comment">MiniUPnP client source code</td>
1255 <td></td>
1256</tr>
1257<tr>
1258 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140610.tar.gz'>miniupnpc-1.9.20140610.tar.gz</a></td>
1259 <td class="filesize">76674</td>
1260 <td class="filedate">10/06/2014 10:28:27 +0000</td>
1261 <td class="comment">MiniUPnP client source code</td>
1262 <td></td>
1263</tr>
1264<tr>
1265 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140610.tar.gz'>minissdpd-1.2.20140610.tar.gz</a></td>
1266 <td class="filesize">21909</td>
1267 <td class="filedate">10/06/2014 10:03:29 +0000</td>
1268 <td class="comment">MiniSSDPd source code</td>
1269 <td></td>
1270</tr>
1271<tr>
1272 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140523.tar.gz'>miniupnpd-1.8.20140523.tar.gz</a></td>
1273 <td class="filesize">190936</td>
1274 <td class="filedate">23/05/2014 15:48:03 +0000</td>
1275 <td class="comment">MiniUPnP daemon source code</td>
1276 <td></td>
1277</tr>
1278<tr>
1279 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140422.zip'>upnpc-exe-win32-20140422.zip</a></td>
1280 <td class="filesize">97505</td>
1281 <td class="filedate">22/04/2014 10:10:07 +0000</td>
1282 <td class="comment">Windows executable</td>
1283 <td></td>
1284</tr>
1285<tr>
1286 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140422.tar.gz'>miniupnpd-1.8.20140422.tar.gz</a></td>
1287 <td class="filesize">187225</td>
1288 <td class="filedate">22/04/2014 08:58:56 +0000</td>
1289 <td class="comment">MiniUPnP daemon source code</td>
1290 <td></td>
1291</tr>
1292<tr>
1293 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140401.tar.gz'>miniupnpd-1.8.20140401.tar.gz</a></td>
1294 <td class="filesize">183131</td>
1295 <td class="filedate">01/04/2014 10:07:20 +0000</td>
1296 <td class="comment">MiniUPnP daemon source code</td>
1297 <td></td>
1298</tr>
1299<tr>
1300 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140401.tar.gz'>miniupnpc-1.9.20140401.tar.gz</a></td>
1301 <td class="filesize">74703</td>
1302 <td class="filedate">01/04/2014 09:49:46 +0000</td>
1303 <td class="comment">MiniUPnP client source code</td>
1304 <td></td>
1305</tr>
1306<tr>
1307 <td class="filename"><a href='download.php?file=libnatpmp-20140401.tar.gz'>libnatpmp-20140401.tar.gz</a></td>
1308 <td class="filesize">23302</td>
1309 <td class="filedate">01/04/2014 09:49:44 +0000</td>
1310 <td class="comment">libnatpmp source code</td>
1311 <td></td>
1312</tr>
1313<tr>
1314 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140313.tar.gz'>miniupnpd-1.8.20140313.tar.gz</a></td>
1315 <td class="filesize">177120</td>
1316 <td class="filedate">13/03/2014 10:39:11 +0000</td>
1317 <td class="comment">MiniUPnP daemon source code</td>
1318 <td></td>
1319</tr>
1320<tr>
1321 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140310.tar.gz'>miniupnpd-1.8.20140310.tar.gz</a></td>
1322 <td class="filesize">176585</td>
1323 <td class="filedate">09/03/2014 23:16:49 +0000</td>
1324 <td class="comment">MiniUPnP daemon source code</td>
1325 <td></td>
1326</tr>
1327<tr>
1328 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140225.tar.gz'>miniupnpd-1.8.20140225.tar.gz</a></td>
1329 <td class="filesize">175183</td>
1330 <td class="filedate">25/02/2014 11:01:29 +0000</td>
1331 <td class="comment">MiniUPnP daemon source code</td>
1332 <td></td>
1333</tr>
1334<tr>
1335 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140203.tar.gz'>miniupnpd-1.8.20140203.tar.gz</a></td>
1336 <td class="filesize">170112</td>
1337 <td class="filedate">03/02/2014 09:56:05 +0000</td>
1338 <td class="comment">MiniUPnP daemon source code</td>
1339 <td></td>
1340</tr>
1341<tr>
1342 <td class="filename"><a href='download.php?file=miniupnpc-1.9.tar.gz'>miniupnpc-1.9.tar.gz</a></td>
1343 <td class="filesize">74230</td>
1344 <td class="filedate">31/01/2014 13:57:40 +0000</td>
1345 <td class="comment">MiniUPnP client release source code</td>
1346 <td></td>
1347</tr>
1348<tr>
1349 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140127.tar.gz'>miniupnpd-1.8.20140127.tar.gz</a></td>
1350 <td class="filesize">170467</td>
1351 <td class="filedate">27/01/2014 11:25:34 +0000</td>
1352 <td class="comment">MiniUPnP daemon source code</td>
1353 <td></td>
1354</tr>
1355<tr>
1356 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140117.zip'>upnpc-exe-win32-20140117.zip</a></td>
1357 <td class="filesize">97270</td>
1358 <td class="filedate">17/01/2014 11:37:53 +0000</td>
1359 <td class="comment">Windows executable</td>
1360 <td></td>
1361</tr>
1362<tr>
1363 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131216.tar.gz'>miniupnpd-1.8.20131216.tar.gz</a></td>
1364 <td class="filesize">170277</td>
1365 <td class="filedate">16/12/2013 16:15:40 +0000</td>
1366 <td class="comment">MiniUPnP daemon source code</td>
1367 <td></td>
1368</tr>
1369<tr>
1370 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131213.tar.gz'>miniupnpd-1.8.20131213.tar.gz</a></td>
1371 <td class="filesize">169753</td>
1372 <td class="filedate">13/12/2013 16:18:10 +0000</td>
1373 <td class="comment">MiniUPnP daemon source code</td>
1374 <td></td>
1375</tr>
1376<tr>
1377 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131209.tar.gz'>miniupnpc-1.8.20131209.tar.gz</a></td>
1378 <td class="filesize">73900</td>
1379 <td class="filedate">09/12/2013 20:52:54 +0000</td>
1380 <td class="comment">MiniUPnP client source code</td>
1381 <td></td>
1382</tr>
1383<tr>
1384 <td class="filename"><a href='download.php?file=libnatpmp-20131126.tar.gz'>libnatpmp-20131126.tar.gz</a></td>
1385 <td class="filesize">22972</td>
1386 <td class="filedate">26/11/2013 08:51:36 +0000</td>
1387 <td class="comment">libnatpmp source code</td>
1388 <td></td>
1389</tr>
1390<tr>
1391 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131007.tar.gz'>miniupnpc-1.8.20131007.tar.gz</a></td>
1392 <td class="filesize">73750</td>
1393 <td class="filedate">07/10/2013 10:10:25 +0000</td>
1394 <td class="comment">MiniUPnP client source code</td>
1395 <td></td>
1396</tr>
1397<tr>
1398 <td class="filename"><a href='download.php?file=libnatpmp-20130911.tar.gz'>libnatpmp-20130911.tar.gz</a></td>
1399 <td class="filesize">18744</td>
1400 <td class="filedate">11/09/2013 07:35:51 +0000</td>
1401 <td class="comment">libnatpmp source code</td>
1402 <td></td>
1403</tr>
1404<tr>
1405 <td class="filename"><a href='download.php?file=libnatpmp-20130910.tar.gz'>libnatpmp-20130910.tar.gz</a></td>
1406 <td class="filesize">18734</td>
1407 <td class="filedate">10/09/2013 20:15:34 +0000</td>
1408 <td class="comment">libnatpmp source code</td>
1409 <td></td>
1410</tr>
1411<tr>
1412 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130907.tar.gz'>minissdpd-1.2.20130907.tar.gz</a></td>
1413 <td class="filesize">20237</td>
1414 <td class="filedate">07/09/2013 06:46:31 +0000</td>
1415 <td class="comment">MiniSSDPd source code</td>
1416 <td></td>
1417</tr>
1418<tr>
1419 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130819.tar.gz'>minissdpd-1.2.20130819.tar.gz</a></td>
1420 <td class="filesize">20772</td>
1421 <td class="filedate">19/08/2013 16:50:29 +0000</td>
1422 <td class="comment">MiniSSDPd source code</td>
1423 <td></td>
1424</tr>
1425<tr>
1426 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130801.tar.gz'>miniupnpc-1.8.20130801.tar.gz</a></td>
1427 <td class="filesize">73426</td>
1428 <td class="filedate">01/08/2013 21:38:05 +0000</td>
1429 <td class="comment">MiniUPnP client source code</td>
1430 <td></td>
1431</tr>
1432<tr>
1433 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130730.tar.gz'>miniupnpd-1.8.20130730.tar.gz</a></td>
1434 <td class="filesize">149904</td>
1435 <td class="filedate">30/07/2013 11:37:48 +0000</td>
1436 <td class="comment">MiniUPnP daemon source code</td>
1437 <td></td>
1438</tr>
1439<tr>
1440 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130607.tar.gz'>miniupnpd-1.8.20130607.tar.gz</a></td>
1441 <td class="filesize">149521</td>
1442 <td class="filedate">07/06/2013 08:46:17 +0000</td>
1443 <td class="comment">MiniUPnP daemon source code</td>
1444 <td></td>
1445</tr>
1446<tr>
1447 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130521.tar.gz'>miniupnpd-1.8.20130521.tar.gz</a></td>
1448 <td class="filesize">149276</td>
1449 <td class="filedate">21/05/2013 09:01:33 +0000</td>
1450 <td class="comment">MiniUPnP daemon source code</td>
1451 <td></td>
1452</tr>
1453<tr>
1454 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130503.tar.gz'>miniupnpd-1.8.20130503.tar.gz</a></td>
1455 <td class="filesize">148420</td>
1456 <td class="filedate">03/05/2013 19:27:16 +0000</td>
1457 <td class="comment">MiniUPnP daemon source code</td>
1458 <td></td>
1459</tr>
1460<tr>
1461 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130503.tar.gz'>miniupnpc-1.8.20130503.tar.gz</a></td>
1462 <td class="filesize">71858</td>
1463 <td class="filedate">03/05/2013 19:27:07 +0000</td>
1464 <td class="comment">MiniUPnP client source code</td>
1465 <td></td>
1466</tr>
1467<tr>
1468 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130426.tar.gz'>miniupnpd-1.8.20130426.tar.gz</a></td>
1469 <td class="filesize">147890</td>
1470 <td class="filedate">26/04/2013 16:57:20 +0000</td>
1471 <td class="comment">MiniUPnP daemon source code</td>
1472 <td></td>
1473</tr>
1474<tr>
1475 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130211.tar.gz'>miniupnpc-1.8.20130211.tar.gz</a></td>
1476 <td class="filesize">70723</td>
1477 <td class="filedate">11/02/2013 10:32:44 +0000</td>
1478 <td class="comment">MiniUPnP client source code</td>
1479 <td></td>
1480</tr>
1481<tr>
1482 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130207.tar.gz'>miniupnpd-1.8.20130207.tar.gz</a></td>
1483 <td class="filesize">147325</td>
1484 <td class="filedate">07/02/2013 12:29:32 +0000</td>
1485 <td class="comment">MiniUPnP daemon source code</td>
1486 <td></td>
1487</tr>
1488<tr>
1489 <td class="filename"><a href='download.php?file=miniupnpc-1.8.tar.gz'>miniupnpc-1.8.tar.gz</a></td>
1490 <td class="filesize">70624</td>
1491 <td class="filedate">06/02/2013 14:31:06 +0000</td>
1492 <td class="comment">MiniUPnP client release source code</td>
1493 <td></td>
1494</tr>
1495<tr>
1496 <td class="filename"><a href='download.php?file=miniupnpd-1.8.tar.gz'>miniupnpd-1.8.tar.gz</a></td>
1497 <td class="filesize">146679</td>
1498 <td class="filedate">06/02/2013 14:30:59 +0000</td>
1499 <td class="comment">MiniUPnP daemon release source code</td>
1500 <td></td>
1501</tr>
1502<tr>
1503 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20121009.zip'>upnpc-exe-win32-20121009.zip</a></td>
1504 <td class="filesize">96513</td>
1505 <td class="filedate">09/10/2012 17:54:12 +0000</td>
1506 <td class="comment">Windows executable</td>
1507 <td></td>
1508</tr>
1509<tr>
1510 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20121005.tar.gz'>miniupnpd-1.7.20121005.tar.gz</a></td>
1511 <td class="filesize">144393</td>
1512 <td class="filedate">04/10/2012 22:39:05 +0000</td>
1513 <td class="comment">MiniUPnP daemon source code</td>
1514 <td></td>
1515</tr>
1516<tr>
1517 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120830.tar.gz'>miniupnpc-1.7.20120830.tar.gz</a></td>
1518 <td class="filesize">70074</td>
1519 <td class="filedate">30/08/2012 08:41:51 +0000</td>
1520 <td class="comment">MiniUPnP client source code</td>
1521 <td></td>
1522</tr>
1523<tr>
1524 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120824.tar.gz'>miniupnpd-1.7.20120824.tar.gz</a></td>
1525 <td class="filesize">141960</td>
1526 <td class="filedate">24/08/2012 18:15:01 +0000</td>
1527 <td class="comment">MiniUPnP daemon source code</td>
1528 <td></td>
1529</tr>
1530<tr>
1531 <td class="filename"><a href='download.php?file=libnatpmp-20120821.tar.gz'>libnatpmp-20120821.tar.gz</a></td>
1532 <td class="filesize">17832</td>
1533 <td class="filedate">21/08/2012 17:24:46 +0000</td>
1534 <td class="comment">libnatpmp source code</td>
1535 <td></td>
1536</tr>
1537<tr>
1538 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120714.tar.gz'>miniupnpc-1.7.20120714.tar.gz</a></td>
1539 <td class="filesize">69570</td>
1540 <td class="filedate">14/07/2012 14:40:47 +0000</td>
1541 <td class="comment">MiniUPnP client source code</td>
1542 <td></td>
1543</tr>
1544<tr>
1545 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120711.tar.gz'>miniupnpc-1.7.20120711.tar.gz</a></td>
1546 <td class="filesize">69580</td>
1547 <td class="filedate">10/07/2012 22:27:05 +0000</td>
1548 <td class="comment">MiniUPnP client source code</td>
1549 <td></td>
1550</tr>
1551<tr>
1552 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120711.tar.gz'>miniupnpd-1.7.20120711.tar.gz</a></td>
1553 <td class="filesize">141380</td>
1554 <td class="filedate">10/07/2012 22:26:58 +0000</td>
1555 <td class="comment">MiniUPnP daemon source code</td>
1556 <td></td>
1557</tr>
1558<tr>
1559 <td class="filename"><a href='download.php?file=miniupnpd-1.7.tar.gz'>miniupnpd-1.7.tar.gz</a></td>
1560 <td class="filesize">138047</td>
1561 <td class="filedate">27/05/2012 23:13:30 +0000</td>
1562 <td class="comment">MiniUPnP daemon release source code</td>
1563 <td></td>
1564</tr>
1565<tr>
1566 <td class="filename"><a href='download.php?file=miniupnpc-1.7.tar.gz'>miniupnpc-1.7.tar.gz</a></td>
1567 <td class="filesize">68327</td>
1568 <td class="filedate">24/05/2012 18:17:48 +0000</td>
1569 <td class="comment">MiniUPnP client release source code</td>
1570 <td></td>
1571</tr>
1572<tr>
1573 <td class="filename"><a href='download.php?file=minissdpd-1.2.tar.gz'>minissdpd-1.2.tar.gz</a></td>
1574 <td class="filesize">19874</td>
1575 <td class="filedate">24/05/2012 18:06:24 +0000</td>
1576 <td class="comment">MiniSSDPd release source code</td>
1577 <td></td>
1578</tr>
1579<tr>
1580 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120509.tar.gz'>miniupnpd-1.6.20120509.tar.gz</a></td>
1581 <td class="filesize">137147</td>
1582 <td class="filedate">09/05/2012 10:45:44 +0000</td>
1583 <td class="comment">MiniUPnP daemon source code</td>
1584 <td></td>
1585</tr>
1586<tr>
1587 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120509.tar.gz'>miniupnpc-1.6.20120509.tar.gz</a></td>
1588 <td class="filesize">68205</td>
1589 <td class="filedate">09/05/2012 10:45:41 +0000</td>
1590 <td class="comment">MiniUPnP client source code</td>
1591 <td></td>
1592</tr>
1593<tr>
1594 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120509.tar.gz'>minissdpd-1.1.20120509.tar.gz</a></td>
1595 <td class="filesize">18123</td>
1596 <td class="filedate">09/05/2012 10:45:39 +0000</td>
1597 <td class="comment">MiniSSDPd source code</td>
1598 <td></td>
1599</tr>
1600<tr>
1601 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120502.tar.gz'>miniupnpd-1.6.20120502.tar.gz</a></td>
1602 <td class="filesize">136688</td>
1603 <td class="filedate">01/05/2012 22:51:18 +0000</td>
1604 <td class="comment">MiniUPnP daemon source code</td>
1605 <td></td>
1606</tr>
1607<tr>
1608 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120502.tar.gz'>miniupnpc-1.6.20120502.tar.gz</a></td>
1609 <td class="filesize">68170</td>
1610 <td class="filedate">01/05/2012 22:51:11 +0000</td>
1611 <td class="comment">MiniUPnP client source code</td>
1612 <td></td>
1613</tr>
1614<tr>
1615 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120426.tar.gz'>miniupnpd-1.6.20120426.tar.gz</a></td>
1616 <td class="filesize">134764</td>
1617 <td class="filedate">26/04/2012 16:24:29 +0000</td>
1618 <td class="comment">MiniUPnP daemon source code</td>
1619 <td></td>
1620</tr>
1621<tr>
1622 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120424.tar.gz'>miniupnpd-1.6.20120424.tar.gz</a></td>
1623 <td class="filesize">132522</td>
1624 <td class="filedate">23/04/2012 22:43:17 +0000</td>
1625 <td class="comment">MiniUPnP daemon source code</td>
1626 <td></td>
1627</tr>
1628<tr>
1629 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120424.tar.gz'>miniupnpc-1.6.20120424.tar.gz</a></td>
1630 <td class="filesize">68067</td>
1631 <td class="filedate">23/04/2012 22:43:10 +0000</td>
1632 <td class="comment">MiniUPnP client source code</td>
1633 <td></td>
1634</tr>
1635<tr>
1636 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120420.tar.gz'>miniupnpd-1.6.20120420.tar.gz</a></td>
1637 <td class="filesize">131972</td>
1638 <td class="filedate">20/04/2012 14:58:57 +0000</td>
1639 <td class="comment">MiniUPnP daemon source code</td>
1640 <td></td>
1641</tr>
1642<tr>
1643 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120420.tar.gz'>miniupnpc-1.6.20120420.tar.gz</a></td>
1644 <td class="filesize">68068</td>
1645 <td class="filedate">20/04/2012 14:58:39 +0000</td>
1646 <td class="comment">MiniUPnP client source code</td>
1647 <td></td>
1648</tr>
1649<tr>
1650 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120419.tar.gz'>miniupnpd-1.6.20120419.tar.gz</a></td>
1651 <td class="filesize">131088</td>
1652 <td class="filedate">18/04/2012 23:41:36 +0000</td>
1653 <td class="comment">MiniUPnP daemon source code</td>
1654 <td></td>
1655</tr>
1656<tr>
1657 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120418.tar.gz'>miniupnpd-1.6.20120418.tar.gz</a></td>
1658 <td class="filesize">130879</td>
1659 <td class="filedate">18/04/2012 21:01:10 +0000</td>
1660 <td class="comment">MiniUPnP daemon source code</td>
1661 <td></td>
1662</tr>
1663<tr>
1664 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120410.tar.gz'>minissdpd-1.1.20120410.tar.gz</a></td>
1665 <td class="filesize">18059</td>
1666 <td class="filedate">09/04/2012 22:45:38 +0000</td>
1667 <td class="comment">MiniSSDPd source code</td>
1668 <td></td>
1669</tr>
1670<tr>
1671 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120410.tar.gz'>miniupnpc-1.6.20120410.tar.gz</a></td>
1672 <td class="filesize">67934</td>
1673 <td class="filedate">09/04/2012 22:45:10 +0000</td>
1674 <td class="comment">MiniUPnP client source code</td>
1675 <td></td>
1676</tr>
1677<tr>
1678 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120406.tar.gz'>miniupnpd-1.6.20120406.tar.gz</a></td>
1679 <td class="filesize">128992</td>
1680 <td class="filedate">06/04/2012 17:52:57 +0000</td>
1681 <td class="comment">MiniUPnP daemon source code</td>
1682 <td></td>
1683</tr>
1684<tr>
1685 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120320.tar.gz'>miniupnpc-1.6.20120320.tar.gz</a></td>
1686 <td class="filesize">67374</td>
1687 <td class="filedate">20/03/2012 16:55:48 +0000</td>
1688 <td class="comment">MiniUPnP client source code</td>
1689 <td></td>
1690</tr>
1691<tr>
1692 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120320.tar.gz'>miniupnpd-1.6.20120320.tar.gz</a></td>
1693 <td class="filesize">127968</td>
1694 <td class="filedate">20/03/2012 16:46:07 +0000</td>
1695 <td class="comment">MiniUPnP daemon source code</td>
1696 <td></td>
1697</tr>
1698<tr>
1699 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120305.tar.gz'>miniupnpd-1.6.20120305.tar.gz</a></td>
1700 <td class="filesize">126985</td>
1701 <td class="filedate">05/03/2012 20:42:01 +0000</td>
1702 <td class="comment">MiniUPnP daemon source code</td>
1703 <td></td>
1704</tr>
1705<tr>
1706 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120207.tar.gz'>miniupnpd-1.6.20120207.tar.gz</a></td>
1707 <td class="filesize">127425</td>
1708 <td class="filedate">07/02/2012 10:21:16 +0000</td>
1709 <td class="comment">MiniUPnP daemon source code</td>
1710 <td></td>
1711</tr>
1712<tr>
1713 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120203.tar.gz'>miniupnpd-1.6.20120203.tar.gz</a></td>
1714 <td class="filesize">126599</td>
1715 <td class="filedate">03/02/2012 15:14:13 +0000</td>
1716 <td class="comment">MiniUPnP daemon source code</td>
1717 <td></td>
1718</tr>
1719<tr>
1720 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120125.tar.gz'>miniupnpc-1.6.20120125.tar.gz</a></td>
1721 <td class="filesize">67354</td>
1722 <td class="filedate">25/01/2012 21:12:28 +0000</td>
1723 <td class="comment">MiniUPnP client source code</td>
1724 <td></td>
1725</tr>
1726<tr>
1727 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120121.tar.gz'>miniupnpc-1.6.20120121.tar.gz</a></td>
1728 <td class="filesize">67347</td>
1729 <td class="filedate">21/01/2012 14:07:41 +0000</td>
1730 <td class="comment">MiniUPnP client source code</td>
1731 <td></td>
1732</tr>
1733<tr>
1734 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120121.tar.gz'>miniupnpd-1.6.20120121.tar.gz</a></td>
1735 <td class="filesize">126021</td>
1736 <td class="filedate">21/01/2012 14:07:33 +0000</td>
1737 <td class="comment">MiniUPnP daemon source code</td>
1738 <td></td>
1739</tr>
1740<tr>
1741 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120121.tar.gz'>minissdpd-1.1.20120121.tar.gz</a></td>
1742 <td class="filesize">17762</td>
1743 <td class="filedate">21/01/2012 14:07:16 +0000</td>
1744 <td class="comment">MiniSSDPd source code</td>
1745 <td></td>
1746</tr>
1747<tr>
1748 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20120121.zip'>upnpc-exe-win32-20120121.zip</a></td>
1749 <td class="filesize">94575</td>
1750 <td class="filedate">21/01/2012 13:59:11 +0000</td>
1751 <td class="comment">Windows executable</td>
1752 <td></td>
1753</tr>
1754<tr>
1755 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20111212.zip'>upnpc-exe-win32-20111212.zip</a></td>
1756 <td class="filesize">94507</td>
1757 <td class="filedate">12/12/2011 12:33:48 +0000</td>
1758 <td class="comment">Windows executable</td>
1759 <td></td>
1760</tr>
1761<tr>
1762 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20111118.tar.gz'>miniupnpd-1.6.20111118.tar.gz</a></td>
1763 <td class="filesize">125683</td>
1764 <td class="filedate">18/11/2011 11:26:12 +0000</td>
1765 <td class="comment">MiniUPnP daemon source code</td>
1766 <td></td>
1767</tr>
1768<tr>
1769 <td class="filename"><a href='download.php?file=minissdpd-1.1.20111007.tar.gz'>minissdpd-1.1.20111007.tar.gz</a></td>
1770 <td class="filesize">17611</td>
1771 <td class="filedate">07/10/2011 09:47:51 +0000</td>
1772 <td class="comment">MiniSSDPd source code</td>
1773 <td></td>
1774</tr>
1775<tr>
1776 <td class="filename"><a href='download.php?file=xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
1777 <td class="filesize">10329</td>
1778 <td class="filedate">11/08/2011 15:18:25 +0000</td>
1779 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1780 <td></td>
1781</tr>
1782<tr>
1783 <td class="filename"><a href='download.php?file=xchat-upnp20110811-2.8.8.patch'>xchat-upnp20110811-2.8.8.patch</a></td>
1784 <td class="filesize">11529</td>
1785 <td class="filedate">11/08/2011 15:18:23 +0000</td>
1786 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1787 <td></td>
1788</tr>
1789<tr>
1790 <td class="filename"><a href='download.php?file=libnatpmp-20110808.tar.gz'>libnatpmp-20110808.tar.gz</a></td>
1791 <td class="filesize">17762</td>
1792 <td class="filedate">08/08/2011 21:21:34 +0000</td>
1793 <td class="comment">libnatpmp source code</td>
1794 <td></td>
1795</tr>
1796<tr>
1797 <td class="filename"><a href='download.php?file=libnatpmp-20110730.tar.gz'>libnatpmp-20110730.tar.gz</a></td>
1798 <td class="filesize">17687</td>
1799 <td class="filedate">30/07/2011 13:19:31 +0000</td>
1800 <td class="comment">libnatpmp source code</td>
1801 <td></td>
1802</tr>
1803<tr>
1804 <td class="filename"><a href='download.php?file=minissdpd-1.1.tar.gz'>minissdpd-1.1.tar.gz</a></td>
1805 <td class="filesize">17481</td>
1806 <td class="filedate">30/07/2011 13:17:30 +0000</td>
1807 <td class="comment">MiniSSDPd release source code</td>
1808 <td></td>
1809</tr>
1810<tr>
1811 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20110730.tar.gz'>miniupnpd-1.6.20110730.tar.gz</a></td>
1812 <td class="filesize">125583</td>
1813 <td class="filedate">30/07/2011 13:17:09 +0000</td>
1814 <td class="comment">MiniUPnP daemon source code</td>
1815 <td></td>
1816</tr>
1817<tr>
1818 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110729.tar.gz'>minissdpd-1.0.20110729.tar.gz</a></td>
1819 <td class="filesize">15898</td>
1820 <td class="filedate">29/07/2011 08:47:26 +0000</td>
1821 <td class="comment">MiniSSDPd source code</td>
1822 <td></td>
1823</tr>
1824<tr>
1825 <td class="filename"><a href='download.php?file=miniupnpc-1.6.tar.gz'>miniupnpc-1.6.tar.gz</a></td>
1826 <td class="filesize">66454</td>
1827 <td class="filedate">25/07/2011 18:03:09 +0000</td>
1828 <td class="comment">MiniUPnP client release source code</td>
1829 <td></td>
1830</tr>
1831<tr>
1832 <td class="filename"><a href='download.php?file=miniupnpd-1.6.tar.gz'>miniupnpd-1.6.tar.gz</a></td>
1833 <td class="filesize">124917</td>
1834 <td class="filedate">25/07/2011 16:37:57 +0000</td>
1835 <td class="comment">MiniUPnP daemon release source code</td>
1836 <td></td>
1837</tr>
1838<tr>
1839 <td class="filename"><a href='download.php?file=minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
1840 <td class="filesize">7598</td>
1841 <td class="filedate">25/07/2011 14:57:50 +0000</td>
1842 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1843 <td></td>
1844</tr>
1845<tr>
1846 <td class="filename"><a href='download.php?file=libnatpmp-20110715.tar.gz'>libnatpmp-20110715.tar.gz</a></td>
1847 <td class="filesize">17943</td>
1848 <td class="filedate">15/07/2011 08:31:40 +0000</td>
1849 <td class="comment">libnatpmp source code</td>
1850 <td></td>
1851</tr>
1852<tr>
1853 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110715.tar.gz'>miniupnpd-1.5.20110715.tar.gz</a></td>
1854 <td class="filesize">124519</td>
1855 <td class="filedate">15/07/2011 07:55:17 +0000</td>
1856 <td class="comment">MiniUPnP daemon source code</td>
1857 <td></td>
1858</tr>
1859<tr>
1860 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110714.zip'>upnpc-exe-win32-20110714.zip</a></td>
1861 <td class="filesize">94236</td>
1862 <td class="filedate">13/07/2011 23:16:01 +0000</td>
1863 <td class="comment">Windows executable</td>
1864 <td></td>
1865</tr>
1866<tr>
1867 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110623.tar.gz'>miniupnpd-1.5.20110623.tar.gz</a></td>
1868 <td class="filesize">123529</td>
1869 <td class="filedate">22/06/2011 22:29:15 +0000</td>
1870 <td class="comment">MiniUPnP daemon source code</td>
1871 <td></td>
1872</tr>
1873<tr>
1874 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110620.tar.gz'>miniupnpd-1.5.20110620.tar.gz</a></td>
1875 <td class="filesize">123221</td>
1876 <td class="filedate">20/06/2011 14:11:11 +0000</td>
1877 <td class="comment">MiniUPnP daemon source code</td>
1878 <td></td>
1879</tr>
1880<tr>
1881 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110618.tar.gz'>miniupnpd-1.5.20110618.tar.gz</a></td>
1882 <td class="filesize">123176</td>
1883 <td class="filedate">17/06/2011 23:29:18 +0000</td>
1884 <td class="comment">MiniUPnP daemon source code</td>
1885 <td></td>
1886</tr>
1887<tr>
1888 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110618.tar.gz'>miniupnpc-1.5.20110618.tar.gz</a></td>
1889 <td class="filesize">66401</td>
1890 <td class="filedate">17/06/2011 23:29:17 +0000</td>
1891 <td class="comment">MiniUPnP client source code</td>
1892 <td></td>
1893</tr>
1894<tr>
1895 <td class="filename"><a href='download.php?file=libnatpmp-20110618.tar.gz'>libnatpmp-20110618.tar.gz</a></td>
1896 <td class="filesize">17901</td>
1897 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1898 <td class="comment">libnatpmp source code</td>
1899 <td></td>
1900</tr>
1901<tr>
1902 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110618.tar.gz'>minissdpd-1.0.20110618.tar.gz</a></td>
1903 <td class="filesize">15193</td>
1904 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1905 <td class="comment">MiniSSDPd source code</td>
1906 <td></td>
1907</tr>
1908<tr>
1909 <td class="filename" colspan="2"><a href='download.php?file=minidlna_cvs20110529_minissdp1.patch'>minidlna_cvs20110529_minissdp1.patch</a></td>
1910 <td class="filedate">29/05/2011 21:19:09 +0000</td>
1911 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1912 <td></td>
1913</tr>
1914<tr>
1915 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110528.tar.gz'>miniupnpd-1.5.20110528.tar.gz</a></td>
1916 <td class="filesize">121985</td>
1917 <td class="filedate">28/05/2011 09:39:04 +0000</td>
1918 <td class="comment">MiniUPnP daemon source code</td>
1919 <td></td>
1920</tr>
1921<tr>
1922 <td class="filename"><a href='download.php?file=minidlna_1.0.19_minissdp1.patch'>minidlna_1.0.19_minissdp1.patch</a></td>
1923 <td class="filesize">9080</td>
1924 <td class="filedate">27/05/2011 09:55:04 +0000</td>
1925 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1926 <td></td>
1927</tr>
1928<tr>
1929 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110527.tar.gz'>miniupnpd-1.5.20110527.tar.gz</a></td>
1930 <td class="filesize">120896</td>
1931 <td class="filedate">27/05/2011 08:28:35 +0000</td>
1932 <td class="comment">MiniUPnP daemon source code</td>
1933 <td></td>
1934</tr>
1935<tr>
1936 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110527.tar.gz'>miniupnpc-1.5.20110527.tar.gz</a></td>
1937 <td class="filesize">66279</td>
1938 <td class="filedate">27/05/2011 08:28:34 +0000</td>
1939 <td class="comment">MiniUPnP client source code</td>
1940 <td></td>
1941</tr>
1942<tr>
1943 <td class="filename"><a href='download.php?file=libnatpmp-20110527.tar.gz'>libnatpmp-20110527.tar.gz</a></td>
1944 <td class="filesize">17627</td>
1945 <td class="filedate">27/05/2011 08:28:33 +0000</td>
1946 <td class="comment">libnatpmp source code</td>
1947 <td></td>
1948</tr>
1949<tr>
1950 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110523.tar.gz'>minissdpd-1.0.20110523.tar.gz</a></td>
1951 <td class="filesize">15024</td>
1952 <td class="filedate">23/05/2011 12:55:31 +0000</td>
1953 <td class="comment">MiniSSDPd source code</td>
1954 <td></td>
1955</tr>
1956<tr>
1957 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110520.tar.gz'>miniupnpd-1.5.20110520.tar.gz</a></td>
1958 <td class="filesize">119227</td>
1959 <td class="filedate">20/05/2011 18:00:41 +0000</td>
1960 <td class="comment">MiniUPnP daemon source code</td>
1961 <td></td>
1962</tr>
1963<tr>
1964 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110519.tar.gz'>miniupnpd-1.5.20110519.tar.gz</a></td>
1965 <td class="filesize">114735</td>
1966 <td class="filedate">18/05/2011 22:29:06 +0000</td>
1967 <td class="comment">MiniUPnP daemon source code</td>
1968 <td></td>
1969</tr>
1970<tr>
1971 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110516.tar.gz'>miniupnpd-1.5.20110516.tar.gz</a></td>
1972 <td class="filesize">113348</td>
1973 <td class="filedate">16/05/2011 09:32:51 +0000</td>
1974 <td class="comment">MiniUPnP daemon source code</td>
1975 <td></td>
1976</tr>
1977<tr>
1978 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110515.tar.gz'>miniupnpd-1.5.20110515.tar.gz</a></td>
1979 <td class="filesize">113135</td>
1980 <td class="filedate">15/05/2011 21:51:29 +0000</td>
1981 <td class="comment">MiniUPnP daemon source code</td>
1982 <td></td>
1983</tr>
1984<tr>
1985 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110515.tar.gz'>miniupnpc-1.5.20110515.tar.gz</a></td>
1986 <td class="filesize">66112</td>
1987 <td class="filedate">15/05/2011 21:51:28 +0000</td>
1988 <td class="comment">MiniUPnP client source code</td>
1989 <td></td>
1990</tr>
1991<tr>
1992 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110513.tar.gz'>miniupnpd-1.5.20110513.tar.gz</a></td>
1993 <td class="filesize">111029</td>
1994 <td class="filedate">13/05/2011 14:03:12 +0000</td>
1995 <td class="comment">MiniUPnP daemon source code</td>
1996 <td></td>
1997</tr>
1998<tr>
1999 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110506.tar.gz'>miniupnpc-1.5.20110506.tar.gz</a></td>
2000 <td class="filesize">65536</td>
2001 <td class="filedate">06/05/2011 16:35:38 +0000</td>
2002 <td class="comment">MiniUPnP client source code</td>
2003 <td></td>
2004</tr>
2005<tr>
2006 <td class="filename"><a href='download.php?file=miniupnpc-1.4-v6.20100505.zip'>miniupnpc-1.4-v6.20100505.zip</a></td>
2007 <td class="filesize">91833</td>
2008 <td class="filedate">18/04/2011 20:14:11 +0000</td>
2009 <td class="comment"></td>
2010 <td></td>
2011</tr>
2012<tr>
2013 <td class="filename"><a href='download.php?file=miniupnpd-1.4-v6.20100823.zip'>miniupnpd-1.4-v6.20100823.zip</a></td>
2014 <td class="filesize">222235</td>
2015 <td class="filedate">18/04/2011 20:14:07 +0000</td>
2016 <td class="comment"></td>
2017 <td></td>
2018</tr>
2019<tr>
2020 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110418.tar.gz'>miniupnpc-1.5.20110418.tar.gz</a></td>
2021 <td class="filesize">61820</td>
2022 <td class="filedate">18/04/2011 20:09:22 +0000</td>
2023 <td class="comment">MiniUPnP client source code</td>
2024 <td></td>
2025</tr>
2026<tr>
2027 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110418.zip'>upnpc-exe-win32-20110418.zip</a></td>
2028 <td class="filesize">94183</td>
2029 <td class="filedate">18/04/2011 17:53:26 +0000</td>
2030 <td class="comment">Windows executable</td>
2031 <td></td>
2032</tr>
2033<tr>
2034 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110314.tar.gz'>miniupnpc-1.5.20110314.tar.gz</a></td>
2035 <td class="filesize">57210</td>
2036 <td class="filedate">14/03/2011 14:27:29 +0000</td>
2037 <td class="comment">MiniUPnP client source code</td>
2038 <td></td>
2039</tr>
2040<tr>
2041 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110309.tar.gz'>miniupnpd-1.5.20110309.tar.gz</a></td>
2042 <td class="filesize">100073</td>
2043 <td class="filedate">09/03/2011 15:36:12 +0000</td>
2044 <td class="comment">MiniUPnP daemon source code</td>
2045 <td></td>
2046</tr>
2047<tr>
2048 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110302.tar.gz'>miniupnpd-1.5.20110302.tar.gz</a></td>
2049 <td class="filesize">100756</td>
2050 <td class="filedate">02/03/2011 16:17:44 +0000</td>
2051 <td class="comment">MiniUPnP daemon source code</td>
2052 <td></td>
2053</tr>
2054<tr>
2055 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110221.tar.gz'>miniupnpd-1.5.20110221.tar.gz</a></td>
2056 <td class="filesize">100092</td>
2057 <td class="filedate">20/02/2011 23:48:17 +0000</td>
2058 <td class="comment">MiniUPnP daemon source code</td>
2059 <td></td>
2060</tr>
2061<tr>
2062 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110215.zip'>upnpc-exe-win32-20110215.zip</a></td>
2063 <td class="filesize">55409</td>
2064 <td class="filedate">15/02/2011 23:05:00 +0000</td>
2065 <td class="comment">Windows executable</td>
2066 <td></td>
2067</tr>
2068<tr>
2069 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110215.tar.gz'>miniupnpc-1.5.20110215.tar.gz</a></td>
2070 <td class="filesize">54880</td>
2071 <td class="filedate">15/02/2011 11:16:04 +0000</td>
2072 <td class="comment">MiniUPnP client source code</td>
2073 <td></td>
2074</tr>
2075<tr>
2076 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110214.tar.gz'>miniupnpd-1.5.20110214.tar.gz</a></td>
2077 <td class="filesize">99629</td>
2078 <td class="filedate">14/02/2011 18:00:43 +0000</td>
2079 <td class="comment">MiniUPnP daemon source code</td>
2080 <td></td>
2081</tr>
2082<tr>
2083 <td class="filename"><a href='download.php?file=minidlna_1.0.18_minissdp1.patch'>minidlna_1.0.18_minissdp1.patch</a></td>
2084 <td class="filesize">9747</td>
2085 <td class="filedate">02/02/2011 15:12:19 +0000</td>
2086 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
2087 <td></td>
2088</tr>
2089<tr>
2090 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110127.tar.gz'>miniupnpd-1.5.20110127.tar.gz</a></td>
2091 <td class="filesize">97421</td>
2092 <td class="filedate">27/01/2011 17:51:25 +0000</td>
2093 <td class="comment">MiniUPnP daemon source code</td>
2094 <td></td>
2095</tr>
2096<tr>
2097 <td class="filename"><a href='download.php?file=miniupnpd-1.5.tar.gz'>miniupnpd-1.5.tar.gz</a></td>
2098 <td class="filesize">98993</td>
2099 <td class="filedate">04/01/2011 09:45:10 +0000</td>
2100 <td class="comment">MiniUPnP daemon release source code</td>
2101 <td></td>
2102</tr>
2103<tr>
2104 <td class="filename"><a href='download.php?file=miniupnpc-1.5.tar.gz'>miniupnpc-1.5.tar.gz</a></td>
2105 <td class="filesize">53309</td>
2106 <td class="filedate">04/01/2011 09:45:06 +0000</td>
2107 <td class="comment">MiniUPnP client release source code</td>
2108 <td></td>
2109</tr>
2110<tr>
2111 <td class="filename"><a href='download.php?file=libnatpmp-20110103.tar.gz'>libnatpmp-20110103.tar.gz</a></td>
2112 <td class="filesize">17529</td>
2113 <td class="filedate">03/01/2011 17:33:16 +0000</td>
2114 <td class="comment">libnatpmp source code</td>
2115 <td></td>
2116</tr>
2117<tr>
2118 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101221.tar.gz'>miniupnpc-1.4.20101221.tar.gz</a></td>
2119 <td class="filesize">52342</td>
2120 <td class="filedate">21/12/2010 16:15:38 +0000</td>
2121 <td class="comment">MiniUPnP client source code</td>
2122 <td></td>
2123</tr>
2124<tr>
2125 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20101213.zip'>upnpc-exe-win32-20101213.zip</a></td>
2126 <td class="filesize">52359</td>
2127 <td class="filedate">12/12/2010 23:44:01 +0000</td>
2128 <td class="comment">Windows executable</td>
2129 <td></td>
2130</tr>
2131<tr>
2132 <td class="filename"><a href='download.php?file=libnatpmp-20101211.tar.gz'>libnatpmp-20101211.tar.gz</a></td>
2133 <td class="filesize">17324</td>
2134 <td class="filedate">11/12/2010 17:20:36 +0000</td>
2135 <td class="comment">libnatpmp source code</td>
2136 <td></td>
2137</tr>
2138<tr>
2139 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101209.tar.gz'>miniupnpc-1.4.20101209.tar.gz</a></td>
2140 <td class="filesize">51900</td>
2141 <td class="filedate">09/12/2010 16:17:30 +0000</td>
2142 <td class="comment">MiniUPnP client source code</td>
2143 <td></td>
2144</tr>
2145<tr>
2146 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100921.tar.gz'>miniupnpd-1.4.20100921.tar.gz</a></td>
2147 <td class="filesize">95483</td>
2148 <td class="filedate">21/09/2010 15:50:00 +0000</td>
2149 <td class="comment">MiniUPnP daemon source code</td>
2150 <td></td>
2151</tr>
2152<tr>
2153 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100825.zip'>upnpc-exe-win32-20100825.zip</a></td>
2154 <td class="filesize">50636</td>
2155 <td class="filedate">25/08/2010 08:42:59 +0000</td>
2156 <td class="comment">Windows executable</td>
2157 <td></td>
2158</tr>
2159<tr>
2160 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100609.tar.gz'>miniupnpc-1.4.20100609.tar.gz</a></td>
2161 <td class="filesize">50390</td>
2162 <td class="filedate">09/06/2010 11:03:11 +0000</td>
2163 <td class="comment">MiniUPnP client source code</td>
2164 <td></td>
2165</tr>
2166<tr>
2167 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100513.zip'>upnpc-exe-win32-20100513.zip</a></td>
2168 <td class="filesize">50950</td>
2169 <td class="filedate">13/05/2010 16:54:33 +0000</td>
2170 <td class="comment">Windows executable</td>
2171 <td></td>
2172</tr>
2173<tr>
2174 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100511.tar.gz'>miniupnpd-1.4.20100511.tar.gz</a></td>
2175 <td class="filesize">93281</td>
2176 <td class="filedate">11/05/2010 16:22:33 +0000</td>
2177 <td class="comment">MiniUPnP daemon source code</td>
2178 <td></td>
2179</tr>
2180<tr>
2181 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100418.zip'>upnpc-exe-win32-20100418.zip</a></td>
2182 <td class="filesize">40758</td>
2183 <td class="filedate">17/04/2010 23:00:37 +0000</td>
2184 <td class="comment">Windows executable</td>
2185 <td></td>
2186</tr>
2187<tr>
2188 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100418.tar.gz'>miniupnpc-1.4.20100418.tar.gz</a></td>
2189 <td class="filesize">50245</td>
2190 <td class="filedate">17/04/2010 22:18:31 +0000</td>
2191 <td class="comment">MiniUPnP client source code</td>
2192 <td></td>
2193</tr>
2194<tr>
2195 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100412.tar.gz'>miniupnpc-1.4.20100412.tar.gz</a></td>
2196 <td class="filesize">50145</td>
2197 <td class="filedate">12/04/2010 20:42:53 +0000</td>
2198 <td class="comment">MiniUPnP client source code</td>
2199 <td></td>
2200</tr>
2201<tr>
2202 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100407.tar.gz'>miniupnpc-1.4.20100407.tar.gz</a></td>
2203 <td class="filesize">49756</td>
2204 <td class="filedate">07/04/2010 10:05:08 +0000</td>
2205 <td class="comment">MiniUPnP client source code</td>
2206 <td></td>
2207</tr>
2208<tr>
2209 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100405.tar.gz'>miniupnpc-1.4.20100405.tar.gz</a></td>
2210 <td class="filesize">49549</td>
2211 <td class="filedate">05/04/2010 14:34:38 +0000</td>
2212 <td class="comment">MiniUPnP client source code</td>
2213 <td></td>
2214</tr>
2215<tr>
2216 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100308.tar.gz'>miniupnpd-1.4.20100308.tar.gz</a></td>
2217 <td class="filesize">92889</td>
2218 <td class="filedate">08/03/2010 17:18:00 +0000</td>
2219 <td class="comment">MiniUPnP daemon source code</td>
2220 <td></td>
2221</tr>
2222<tr>
2223 <td class="filename"><a href='download.php?file=libnatpmp-20100202.tar.gz'>libnatpmp-20100202.tar.gz</a></td>
2224 <td class="filesize">17231</td>
2225 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2226 <td class="comment">libnatpmp source code</td>
2227 <td></td>
2228</tr>
2229<tr>
2230 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100202.tar.gz'>miniupnpc-1.4.20100202.tar.gz</a></td>
2231 <td class="filesize">46710</td>
2232 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2233 <td class="comment">MiniUPnP client source code</td>
2234 <td></td>
2235</tr>
2236<tr>
2237 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100106.tar.gz'>miniupnpc-1.4.20100106.tar.gz</a></td>
2238 <td class="filesize">46659</td>
2239 <td class="filedate">06/01/2010 10:08:21 +0000</td>
2240 <td class="comment">MiniUPnP client source code</td>
2241 <td></td>
2242</tr>
2243<tr>
2244 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091222.tar.gz'>miniupnpd-1.4.20091222.tar.gz</a></td>
2245 <td class="filesize">90993</td>
2246 <td class="filedate">22/12/2009 17:23:48 +0000</td>
2247 <td class="comment">MiniUPnP daemon source code</td>
2248 <td></td>
2249</tr>
2250<tr>
2251 <td class="filename"><a href='download.php?file=libnatpmp-20091219.tar.gz'>libnatpmp-20091219.tar.gz</a></td>
2252 <td class="filesize">16839</td>
2253 <td class="filedate">19/12/2009 14:35:22 +0000</td>
2254 <td class="comment">libnatpmp source code</td>
2255 <td></td>
2256</tr>
2257<tr>
2258 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091213.tar.gz'>miniupnpc-1.4.20091213.tar.gz</a></td>
2259 <td class="filesize">46510</td>
2260 <td class="filedate">12/12/2009 23:05:40 +0000</td>
2261 <td class="comment">MiniUPnP client source code</td>
2262 <td></td>
2263</tr>
2264<tr>
2265 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091211.tar.gz'>miniupnpc-1.4.20091211.tar.gz</a></td>
2266 <td class="filesize">45852</td>
2267 <td class="filedate">11/12/2009 16:43:01 +0000</td>
2268 <td class="comment">MiniUPnP client source code</td>
2269 <td></td>
2270</tr>
2271<tr>
2272 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20091210.zip'>upnpc-exe-win32-20091210.zip</a></td>
2273 <td class="filesize">38666</td>
2274 <td class="filedate">10/12/2009 18:50:27 +0000</td>
2275 <td class="comment">Windows executable</td>
2276 <td></td>
2277</tr>
2278<tr>
2279 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091208.tar.gz'>miniupnpc-1.4.20091208.tar.gz</a></td>
2280 <td class="filesize">43392</td>
2281 <td class="filedate">08/12/2009 10:58:26 +0000</td>
2282 <td class="comment">MiniUPnP client source code</td>
2283 <td></td>
2284</tr>
2285<tr>
2286 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091203.tar.gz'>miniupnpc-1.4.20091203.tar.gz</a></td>
2287 <td class="filesize">42040</td>
2288 <td class="filedate">03/12/2009 13:56:28 +0000</td>
2289 <td class="comment">MiniUPnP client source code</td>
2290 <td></td>
2291</tr>
2292<tr>
2293 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091106.tar.gz'>miniupnpd-1.4.20091106.tar.gz</a></td>
2294 <td class="filesize">90787</td>
2295 <td class="filedate">06/11/2009 21:18:50 +0000</td>
2296 <td class="comment">MiniUPnP daemon source code</td>
2297 <td></td>
2298</tr>
2299<tr>
2300 <td class="filename"><a href='download.php?file=miniupnpd-1.4.tar.gz'>miniupnpd-1.4.tar.gz</a></td>
2301 <td class="filesize">90071</td>
2302 <td class="filedate">30/10/2009 09:20:05 +0000</td>
2303 <td class="comment">MiniUPnP daemon release source code</td>
2304 <td></td>
2305</tr>
2306<tr>
2307 <td class="filename"><a href='download.php?file=miniupnpc-1.4.tar.gz'>miniupnpc-1.4.tar.gz</a></td>
2308 <td class="filesize">41790</td>
2309 <td class="filedate">30/10/2009 09:20:04 +0000</td>
2310 <td class="comment">MiniUPnP client release source code</td>
2311 <td></td>
2312</tr>
2313<tr>
2314 <td class="filename"><a href='download.php?file=miniupnpc-20091016.tar.gz'>miniupnpc-20091016.tar.gz</a></td>
2315 <td class="filesize">41792</td>
2316 <td class="filedate">16/10/2009 09:04:35 +0000</td>
2317 <td class="comment">MiniUPnP client source code</td>
2318 <td></td>
2319</tr>
2320<tr>
2321 <td class="filename"><a href='download.php?file=miniupnpd-20091010.tar.gz'>miniupnpd-20091010.tar.gz</a></td>
2322 <td class="filesize">90043</td>
2323 <td class="filedate">10/10/2009 19:21:30 +0000</td>
2324 <td class="comment">MiniUPnP daemon source code</td>
2325 <td></td>
2326</tr>
2327<tr>
2328 <td class="filename"><a href='download.php?file=miniupnpc-20091010.tar.gz'>miniupnpc-20091010.tar.gz</a></td>
2329 <td class="filesize">41671</td>
2330 <td class="filedate">10/10/2009 19:21:28 +0000</td>
2331 <td class="comment">MiniUPnP client source code</td>
2332 <td></td>
2333</tr>
2334<tr>
2335 <td class="filename"><a href='download.php?file=miniupnpd-20090921.tar.gz'>miniupnpd-20090921.tar.gz</a></td>
2336 <td class="filesize">89476</td>
2337 <td class="filedate">21/09/2009 13:00:04 +0000</td>
2338 <td class="comment">MiniUPnP daemon source code</td>
2339 <td></td>
2340</tr>
2341<tr>
2342 <td class="filename"><a href='download.php?file=miniupnpc-20090921.tar.gz'>miniupnpc-20090921.tar.gz</a></td>
2343 <td class="filesize">41630</td>
2344 <td class="filedate">21/09/2009 13:00:03 +0000</td>
2345 <td class="comment">MiniUPnP client source code</td>
2346 <td></td>
2347</tr>
2348<tr>
2349 <td class="filename"><a href='download.php?file=miniupnpd-20090904.tar.gz'>miniupnpd-20090904.tar.gz</a></td>
2350 <td class="filesize">89344</td>
2351 <td class="filedate">04/09/2009 16:24:26 +0000</td>
2352 <td class="comment">MiniUPnP daemon source code</td>
2353 <td></td>
2354</tr>
2355<tr>
2356 <td class="filename"><a href='download.php?file=miniupnpd-20090820.tar.gz'>miniupnpd-20090820.tar.gz</a></td>
2357 <td class="filesize">89149</td>
2358 <td class="filedate">20/08/2009 09:35:58 +0000</td>
2359 <td class="comment">MiniUPnP daemon source code</td>
2360 <td></td>
2361</tr>
2362<tr>
2363 <td class="filename"><a href='download.php?file=miniupnpc-20090807.tar.gz'>miniupnpc-20090807.tar.gz</a></td>
2364 <td class="filesize">41288</td>
2365 <td class="filedate">07/08/2009 14:46:11 +0000</td>
2366 <td class="comment">MiniUPnP client source code</td>
2367 <td></td>
2368</tr>
2369<tr>
2370 <td class="filename"><a href='download.php?file=miniupnpc-20090729.tar.gz'>miniupnpc-20090729.tar.gz</a></td>
2371 <td class="filesize">40588</td>
2372 <td class="filedate">29/07/2009 08:47:43 +0000</td>
2373 <td class="comment">MiniUPnP client source code</td>
2374 <td></td>
2375</tr>
2376<tr>
2377 <td class="filename"><a href='download.php?file=xchat-upnp20061022.patch'>xchat-upnp20061022.patch</a></td>
2378 <td class="filesize">10258</td>
2379 <td class="filedate">17/07/2009 15:49:46 +0000</td>
2380 <td class="comment">Patch to add UPnP capabilities to xchat</td>
2381 <td></td>
2382</tr>
2383<tr>
2384 <td class="filename"><a href='download.php?file=miniupnpc-20090713.tar.gz'>miniupnpc-20090713.tar.gz</a></td>
2385 <td class="filesize">40206</td>
2386 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2387 <td class="comment">MiniUPnP client source code</td>
2388 <td></td>
2389</tr>
2390<tr>
2391 <td class="filename"><a href='download.php?file=libnatpmp-20090713.tar.gz'>libnatpmp-20090713.tar.gz</a></td>
2392 <td class="filesize">14262</td>
2393 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2394 <td class="comment">libnatpmp source code</td>
2395 <td></td>
2396</tr>
2397<tr>
2398 <td class="filename"><a href='download.php?file=miniupnpd-20090605.tar.gz'>miniupnpd-20090605.tar.gz</a></td>
2399 <td class="filesize">83774</td>
2400 <td class="filedate">04/06/2009 23:32:20 +0000</td>
2401 <td class="comment">MiniUPnP daemon source code</td>
2402 <td></td>
2403</tr>
2404<tr>
2405 <td class="filename"><a href='download.php?file=miniupnpc-20090605.tar.gz'>miniupnpc-20090605.tar.gz</a></td>
2406 <td class="filesize">40077</td>
2407 <td class="filedate">04/06/2009 23:32:16 +0000</td>
2408 <td class="comment">MiniUPnP client source code</td>
2409 <td></td>
2410</tr>
2411<tr>
2412 <td class="filename"><a href='download.php?file=libnatpmp-20090605.tar.gz'>libnatpmp-20090605.tar.gz</a></td>
2413 <td class="filesize">13817</td>
2414 <td class="filedate">04/06/2009 23:32:15 +0000</td>
2415 <td class="comment">libnatpmp source code</td>
2416 <td></td>
2417</tr>
2418<tr>
2419 <td class="filename"><a href='download.php?file=miniupnpd-20090516.tar.gz'>miniupnpd-20090516.tar.gz</a></td>
2420 <td class="filesize">83689</td>
2421 <td class="filedate">16/05/2009 08:47:31 +0000</td>
2422 <td class="comment">MiniUPnP daemon source code</td>
2423 <td></td>
2424</tr>
2425<tr>
2426 <td class="filename"><a href='download.php?file=miniupnpc-1.3.tar.gz'>miniupnpc-1.3.tar.gz</a></td>
2427 <td class="filesize">40058</td>
2428 <td class="filedate">17/04/2009 21:27:55 +0000</td>
2429 <td class="comment">MiniUPnP client release source code</td>
2430 <td></td>
2431</tr>
2432<tr>
2433 <td class="filename"><a href='download.php?file=miniupnpd-1.3.tar.gz'>miniupnpd-1.3.tar.gz</a></td>
2434 <td class="filesize">83464</td>
2435 <td class="filedate">17/04/2009 20:11:21 +0000</td>
2436 <td class="comment">MiniUPnP daemon release source code</td>
2437 <td></td>
2438</tr>
2439<tr>
2440 <td class="filename"><a href='download.php?file=libnatpmp-20090310.tar.gz'>libnatpmp-20090310.tar.gz</a></td>
2441 <td class="filesize">11847</td>
2442 <td class="filedate">10/03/2009 10:19:45 +0000</td>
2443 <td class="comment">libnatpmp source code</td>
2444 <td></td>
2445</tr>
2446<tr>
2447 <td class="filename"><a href='download.php?file=miniupnpd-20090214.tar.gz'>miniupnpd-20090214.tar.gz</a></td>
2448 <td class="filesize">82921</td>
2449 <td class="filedate">14/02/2009 11:27:03 +0000</td>
2450 <td class="comment">MiniUPnP daemon source code</td>
2451 <td></td>
2452</tr>
2453<tr>
2454 <td class="filename"><a href='download.php?file=miniupnpd-20090213.tar.gz'>miniupnpd-20090213.tar.gz</a></td>
2455 <td class="filesize">82594</td>
2456 <td class="filedate">13/02/2009 19:48:01 +0000</td>
2457 <td class="comment">MiniUPnP daemon source code</td>
2458 <td></td>
2459</tr>
2460<tr>
2461 <td class="filename"><a href='download.php?file=libnatpmp-20090129.tar.gz'>libnatpmp-20090129.tar.gz</a></td>
2462 <td class="filesize">11748</td>
2463 <td class="filedate">29/01/2009 21:50:31 +0000</td>
2464 <td class="comment">libnatpmp source code</td>
2465 <td></td>
2466</tr>
2467<tr>
2468 <td class="filename"><a href='download.php?file=miniupnpc-20090129.tar.gz'>miniupnpc-20090129.tar.gz</a></td>
2469 <td class="filesize">39976</td>
2470 <td class="filedate">29/01/2009 21:50:30 +0000</td>
2471 <td class="comment">MiniUPnP client source code</td>
2472 <td></td>
2473</tr>
2474<tr>
2475 <td class="filename"><a href='download.php?file=miniupnpd-20090129.tar.gz'>miniupnpd-20090129.tar.gz</a></td>
2476 <td class="filesize">82487</td>
2477 <td class="filedate">29/01/2009 21:50:27 +0000</td>
2478 <td class="comment">MiniUPnP daemon source code</td>
2479 <td></td>
2480</tr>
2481<tr>
2482 <td class="filename"><a href='download.php?file=miniupnpd-20081009.tar.gz'>miniupnpd-20081009.tar.gz</a></td>
2483 <td class="filesize">81732</td>
2484 <td class="filedate">09/10/2008 12:53:02 +0000</td>
2485 <td class="comment">MiniUPnP daemon source code</td>
2486 <td></td>
2487</tr>
2488<tr>
2489 <td class="filename"><a href='download.php?file=minissdpd-1.0.tar.gz'>minissdpd-1.0.tar.gz</a></td>
2490 <td class="filesize">12996</td>
2491 <td class="filedate">07/10/2008 14:03:49 +0000</td>
2492 <td class="comment">MiniSSDPd release source code</td>
2493 <td></td>
2494</tr>
2495<tr>
2496 <td class="filename"><a href='download.php?file=miniupnpc-1.2.tar.gz'>miniupnpc-1.2.tar.gz</a></td>
2497 <td class="filesize">38787</td>
2498 <td class="filedate">07/10/2008 14:03:47 +0000</td>
2499 <td class="comment">MiniUPnP client release source code</td>
2500 <td></td>
2501</tr>
2502<tr>
2503 <td class="filename"><a href='download.php?file=miniupnpd-1.2.tar.gz'>miniupnpd-1.2.tar.gz</a></td>
2504 <td class="filesize">81025</td>
2505 <td class="filedate">07/10/2008 14:03:45 +0000</td>
2506 <td class="comment">MiniUPnP daemon release source code</td>
2507 <td></td>
2508</tr>
2509<tr>
2510 <td class="filename"><a href='download.php?file=miniupnpd-20081006.tar.gz'>miniupnpd-20081006.tar.gz</a></td>
2511 <td class="filesize">80510</td>
2512 <td class="filedate">06/10/2008 15:50:34 +0000</td>
2513 <td class="comment">MiniUPnP daemon source code</td>
2514 <td></td>
2515</tr>
2516<tr>
2517 <td class="filename"><a href='download.php?file=minissdpd-20081006.tar.gz'>minissdpd-20081006.tar.gz</a></td>
2518 <td class="filesize">12230</td>
2519 <td class="filedate">06/10/2008 15:50:33 +0000</td>
2520 <td class="comment">MiniSSDPd source code</td>
2521 <td></td>
2522</tr>
2523<tr>
2524 <td class="filename"><a href='download.php?file=libnatpmp-20081006.tar.gz'>libnatpmp-20081006.tar.gz</a></td>
2525 <td class="filesize">11710</td>
2526 <td class="filedate">06/10/2008 15:50:31 +0000</td>
2527 <td class="comment">libnatpmp source code</td>
2528 <td></td>
2529</tr>
2530<tr>
2531 <td class="filename" colspan="2"><a href='download.php?file=mediatomb_minissdp-20081006.patch'>mediatomb_minissdp-20081006.patch</a></td>
2532 <td class="filedate">06/10/2008 15:48:18 +0000</td>
2533 <td class="comment"></td>
2534 <td></td>
2535</tr>
2536<tr>
2537 <td class="filename"><a href='download.php?file=miniupnpc-20081002.tar.gz'>miniupnpc-20081002.tar.gz</a></td>
2538 <td class="filesize">38291</td>
2539 <td class="filedate">02/10/2008 09:20:18 +0000</td>
2540 <td class="comment">MiniUPnP client source code</td>
2541 <td></td>
2542</tr>
2543<tr>
2544 <td class="filename"><a href='download.php?file=miniupnpd-20081001.tar.gz'>miniupnpd-20081001.tar.gz</a></td>
2545 <td class="filesize">79696</td>
2546 <td class="filedate">01/10/2008 13:11:20 +0000</td>
2547 <td class="comment">MiniUPnP daemon source code</td>
2548 <td></td>
2549</tr>
2550<tr>
2551 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080925.zip'>upnpc-exe-win32-20080925.zip</a></td>
2552 <td class="filesize">36602</td>
2553 <td class="filedate">25/09/2008 06:59:33 +0000</td>
2554 <td class="comment">Windows executable</td>
2555 <td></td>
2556</tr>
2557<tr>
2558 <td class="filename"><a href='download.php?file=miniupnpd-20080710.tar.gz'>miniupnpd-20080710.tar.gz</a></td>
2559 <td class="filesize">78898</td>
2560 <td class="filedate">10/07/2008 09:38:41 +0000</td>
2561 <td class="comment">MiniUPnP daemon source code</td>
2562 <td></td>
2563</tr>
2564<tr>
2565 <td class="filename"><a href='download.php?file=libnatpmp-20080707.tar.gz'>libnatpmp-20080707.tar.gz</a></td>
2566 <td class="filesize">11679</td>
2567 <td class="filedate">06/07/2008 22:05:23 +0000</td>
2568 <td class="comment">libnatpmp source code</td>
2569 <td></td>
2570</tr>
2571<tr>
2572 <td class="filename"><a href='download.php?file=miniupnpc-1.1.tar.gz'>miniupnpc-1.1.tar.gz</a></td>
2573 <td class="filesize">38235</td>
2574 <td class="filedate">04/07/2008 16:45:24 +0000</td>
2575 <td class="comment">MiniUPnP client release source code</td>
2576 <td></td>
2577</tr>
2578<tr>
2579 <td class="filename"><a href='download.php?file=miniupnpc-20080703.tar.gz'>miniupnpc-20080703.tar.gz</a></td>
2580 <td class="filesize">38204</td>
2581 <td class="filedate">03/07/2008 15:47:37 +0000</td>
2582 <td class="comment">MiniUPnP client source code</td>
2583 <td></td>
2584</tr>
2585<tr>
2586 <td class="filename"><a href='download.php?file=libnatpmp-20080703.tar.gz'>libnatpmp-20080703.tar.gz</a></td>
2587 <td class="filesize">11570</td>
2588 <td class="filedate">03/07/2008 15:47:25 +0000</td>
2589 <td class="comment">libnatpmp source code</td>
2590 <td></td>
2591</tr>
2592<tr>
2593 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080703.zip'>upnpc-exe-win32-20080703.zip</a></td>
2594 <td class="filesize">36137</td>
2595 <td class="filedate">02/07/2008 23:35:14 +0000</td>
2596 <td class="comment">Windows executable</td>
2597 <td></td>
2598</tr>
2599<tr>
2600 <td class="filename"><a href='download.php?file=libnatpmp-20080702.tar.gz'>libnatpmp-20080702.tar.gz</a></td>
2601 <td class="filesize">8873</td>
2602 <td class="filedate">02/07/2008 17:32:35 +0000</td>
2603 <td class="comment">libnatpmp source code</td>
2604 <td></td>
2605</tr>
2606<tr>
2607 <td class="filename"><a href='download.php?file=libnatpmp-20080630.tar.gz'>libnatpmp-20080630.tar.gz</a></td>
2608 <td class="filesize">8864</td>
2609 <td class="filedate">30/06/2008 14:20:16 +0000</td>
2610 <td class="comment">libnatpmp source code</td>
2611 <td></td>
2612</tr>
2613<tr>
2614 <td class="filename"><a href='download.php?file=libnatpmp-20080529.tar.gz'>libnatpmp-20080529.tar.gz</a></td>
2615 <td class="filesize">7397</td>
2616 <td class="filedate">29/05/2008 09:06:25 +0000</td>
2617 <td class="comment">libnatpmp source code</td>
2618 <td></td>
2619</tr>
2620<tr>
2621 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080514.zip'>upnpc-exe-win32-20080514.zip</a></td>
2622 <td class="filesize">14227</td>
2623 <td class="filedate">14/05/2008 20:23:19 +0000</td>
2624 <td class="comment">Windows executable</td>
2625 <td></td>
2626</tr>
2627<tr>
2628 <td class="filename"><a href='download.php?file=libnatpmp-20080428.tar.gz'>libnatpmp-20080428.tar.gz</a></td>
2629 <td class="filesize">7295</td>
2630 <td class="filedate">28/04/2008 03:09:14 +0000</td>
2631 <td class="comment">libnatpmp source code</td>
2632 <td></td>
2633</tr>
2634<tr>
2635 <td class="filename"><a href='download.php?file=miniupnpd-20080427.tar.gz'>miniupnpd-20080427.tar.gz</a></td>
2636 <td class="filesize">78765</td>
2637 <td class="filedate">27/04/2008 18:16:36 +0000</td>
2638 <td class="comment">MiniUPnP daemon source code</td>
2639 <td></td>
2640</tr>
2641<tr>
2642 <td class="filename"><a href='download.php?file=miniupnpc-20080427.tar.gz'>miniupnpc-20080427.tar.gz</a></td>
2643 <td class="filesize">37610</td>
2644 <td class="filedate">27/04/2008 18:16:35 +0000</td>
2645 <td class="comment">MiniUPnP client source code</td>
2646 <td></td>
2647</tr>
2648<tr>
2649 <td class="filename"><a href='download.php?file=miniupnpd-1.1.tar.gz'>miniupnpd-1.1.tar.gz</a></td>
2650 <td class="filesize">78594</td>
2651 <td class="filedate">25/04/2008 17:38:05 +0000</td>
2652 <td class="comment">MiniUPnP daemon release source code</td>
2653 <td></td>
2654</tr>
2655<tr>
2656 <td class="filename"><a href='download.php?file=miniupnpc-20080423.tar.gz'>miniupnpc-20080423.tar.gz</a></td>
2657 <td class="filesize">36818</td>
2658 <td class="filedate">23/04/2008 11:57:36 +0000</td>
2659 <td class="comment">MiniUPnP client source code</td>
2660 <td></td>
2661</tr>
2662<tr>
2663 <td class="filename"><a href='download.php?file=miniupnpd-20080308.tar.gz'>miniupnpd-20080308.tar.gz</a></td>
2664 <td class="filesize">75679</td>
2665 <td class="filedate">08/03/2008 11:13:29 +0000</td>
2666 <td class="comment">MiniUPnP daemon source code</td>
2667 <td></td>
2668</tr>
2669<tr>
2670 <td class="filename"><a href='download.php?file=miniupnpd-20080303.tar.gz'>miniupnpd-20080303.tar.gz</a></td>
2671 <td class="filesize">74202</td>
2672 <td class="filedate">03/03/2008 01:43:16 +0000</td>
2673 <td class="comment">MiniUPnP daemon source code</td>
2674 <td></td>
2675</tr>
2676<tr>
2677 <td class="filename"><a href='download.php?file=miniupnpd-20080224.tar.gz'>miniupnpd-20080224.tar.gz</a></td>
2678 <td class="filesize">72773</td>
2679 <td class="filedate">24/02/2008 11:23:17 +0000</td>
2680 <td class="comment">MiniUPnP daemon source code</td>
2681 <td></td>
2682</tr>
2683<tr>
2684 <td class="filename"><a href='download.php?file=miniupnpc-1.0.tar.gz'>miniupnpc-1.0.tar.gz</a></td>
2685 <td class="filesize">36223</td>
2686 <td class="filedate">21/02/2008 13:26:46 +0000</td>
2687 <td class="comment">MiniUPnP client release source code</td>
2688 <td></td>
2689</tr>
2690<tr>
2691 <td class="filename"><a href='download.php?file=miniupnpd-20080221.tar.gz'>miniupnpd-20080221.tar.gz</a></td>
2692 <td class="filesize">70823</td>
2693 <td class="filedate">21/02/2008 10:23:46 +0000</td>
2694 <td class="comment">MiniUPnP daemon source code</td>
2695 <td></td>
2696</tr>
2697<tr>
2698 <td class="filename"><a href='download.php?file=miniupnpc-20080217.tar.gz'>miniupnpc-20080217.tar.gz</a></td>
2699 <td class="filesize">35243</td>
2700 <td class="filedate">16/02/2008 23:47:59 +0000</td>
2701 <td class="comment">MiniUPnP client source code</td>
2702 <td></td>
2703</tr>
2704<tr>
2705 <td class="filename"><a href='download.php?file=miniupnpd-20080207.tar.gz'>miniupnpd-20080207.tar.gz</a></td>
2706 <td class="filesize">70647</td>
2707 <td class="filedate">07/02/2008 21:21:00 +0000</td>
2708 <td class="comment">MiniUPnP daemon source code</td>
2709 <td></td>
2710</tr>
2711<tr>
2712 <td class="filename"><a href='download.php?file=miniupnpc-20080203.tar.gz'>miniupnpc-20080203.tar.gz</a></td>
2713 <td class="filesize">34921</td>
2714 <td class="filedate">03/02/2008 22:28:11 +0000</td>
2715 <td class="comment">MiniUPnP client source code</td>
2716 <td></td>
2717</tr>
2718<tr>
2719 <td class="filename"><a href='download.php?file=miniupnpd-1.0.tar.gz'>miniupnpd-1.0.tar.gz</a></td>
2720 <td class="filesize">69427</td>
2721 <td class="filedate">27/01/2008 22:41:25 +0000</td>
2722 <td class="comment">MiniUPnP daemon release source code</td>
2723 <td></td>
2724</tr>
2725<tr>
2726 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080118.zip'>upnpc-exe-win32-20080118.zip</a></td>
2727 <td class="filesize">13582</td>
2728 <td class="filedate">18/01/2008 11:42:16 +0000</td>
2729 <td class="comment">Windows executable</td>
2730 <td></td>
2731</tr>
2732<tr>
2733 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC13.tar.gz'>miniupnpd-1.0-RC13.tar.gz</a></td>
2734 <td class="filesize">67892</td>
2735 <td class="filedate">03/01/2008 16:50:21 +0000</td>
2736 <td class="comment">MiniUPnP daemon release source code</td>
2737 <td></td>
2738</tr>
2739<tr>
2740 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC13.tar.gz'>miniupnpc-1.0-RC13.tar.gz</a></td>
2741 <td class="filesize">34820</td>
2742 <td class="filedate">03/01/2008 16:50:20 +0000</td>
2743 <td class="comment">MiniUPnP client release source code</td>
2744 <td></td>
2745</tr>
2746<tr>
2747 <td class="filename"><a href='download.php?file=miniupnpd-20071220.tar.gz'>miniupnpd-20071220.tar.gz</a></td>
2748 <td class="filesize">67211</td>
2749 <td class="filedate">20/12/2007 12:08:34 +0000</td>
2750 <td class="comment">MiniUPnP daemon source code</td>
2751 <td></td>
2752</tr>
2753<tr>
2754 <td class="filename"><a href='download.php?file=miniupnpc-20071219.tar.gz'>miniupnpc-20071219.tar.gz</a></td>
2755 <td class="filesize">34290</td>
2756 <td class="filedate">19/12/2007 18:31:47 +0000</td>
2757 <td class="comment">MiniUPnP client source code</td>
2758 <td></td>
2759</tr>
2760<tr>
2761 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC12.tar.gz'>minissdpd-1.0-RC12.tar.gz</a></td>
2762 <td class="filesize">9956</td>
2763 <td class="filedate">19/12/2007 18:30:12 +0000</td>
2764 <td class="comment">MiniSSDPd release source code</td>
2765 <td></td>
2766</tr>
2767<tr>
2768 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC12.tar.gz'>miniupnpd-1.0-RC12.tar.gz</a></td>
2769 <td class="filesize">66911</td>
2770 <td class="filedate">14/12/2007 17:39:20 +0000</td>
2771 <td class="comment">MiniUPnP daemon release source code</td>
2772 <td></td>
2773</tr>
2774<tr>
2775 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC12.tar.gz'>miniupnpc-1.0-RC12.tar.gz</a></td>
2776 <td class="filesize">32543</td>
2777 <td class="filedate">14/12/2007 17:39:19 +0000</td>
2778 <td class="comment">MiniUPnP client release source code</td>
2779 <td></td>
2780</tr>
2781<tr>
2782 <td class="filename"><a href='download.php?file=miniupnpc-20071213.tar.gz'>miniupnpc-20071213.tar.gz</a></td>
2783 <td class="filesize">32541</td>
2784 <td class="filedate">13/12/2007 17:09:51 +0000</td>
2785 <td class="comment">MiniUPnP client source code</td>
2786 <td></td>
2787</tr>
2788<tr>
2789 <td class="filename"><a href='download.php?file=miniupnpd-20071213.tar.gz'>miniupnpd-20071213.tar.gz</a></td>
2790 <td class="filesize">66826</td>
2791 <td class="filedate">13/12/2007 16:42:50 +0000</td>
2792 <td class="comment">MiniUPnP daemon source code</td>
2793 <td></td>
2794</tr>
2795<tr>
2796 <td class="filename"><a href='download.php?file=libnatpmp-20071213.tar.gz'>libnatpmp-20071213.tar.gz</a></td>
2797 <td class="filesize">5997</td>
2798 <td class="filedate">13/12/2007 14:56:30 +0000</td>
2799 <td class="comment">libnatpmp source code</td>
2800 <td></td>
2801</tr>
2802<tr>
2803 <td class="filename"><a href='download.php?file=libnatpmp-20071202.tar.gz'>libnatpmp-20071202.tar.gz</a></td>
2804 <td class="filesize">5664</td>
2805 <td class="filedate">02/12/2007 00:15:28 +0000</td>
2806 <td class="comment">libnatpmp source code</td>
2807 <td></td>
2808</tr>
2809<tr>
2810 <td class="filename"><a href='download.php?file=miniupnpd-20071103.tar.gz'>miniupnpd-20071103.tar.gz</a></td>
2811 <td class="filesize">65740</td>
2812 <td class="filedate">02/11/2007 23:58:38 +0000</td>
2813 <td class="comment">MiniUPnP daemon source code</td>
2814 <td></td>
2815</tr>
2816<tr>
2817 <td class="filename"><a href='download.php?file=miniupnpd-20071102.tar.gz'>miniupnpd-20071102.tar.gz</a></td>
2818 <td class="filesize">65733</td>
2819 <td class="filedate">02/11/2007 23:05:44 +0000</td>
2820 <td class="comment">MiniUPnP daemon source code</td>
2821 <td></td>
2822</tr>
2823<tr>
2824 <td class="filename"><a href='download.php?file=miniupnpc-20071103.tar.gz'>miniupnpc-20071103.tar.gz</a></td>
2825 <td class="filesize">32239</td>
2826 <td class="filedate">02/11/2007 23:05:34 +0000</td>
2827 <td class="comment">MiniUPnP client source code</td>
2828 <td></td>
2829</tr>
2830<tr>
2831 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC11.tar.gz'>miniupnpd-1.0-RC11.tar.gz</a></td>
2832 <td class="filesize">64828</td>
2833 <td class="filedate">25/10/2007 13:27:18 +0000</td>
2834 <td class="comment">MiniUPnP daemon release source code</td>
2835 <td></td>
2836</tr>
2837<tr>
2838 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC11.tar.gz'>miniupnpc-1.0-RC11.tar.gz</a></td>
2839 <td class="filesize">32161</td>
2840 <td class="filedate">25/10/2007 13:27:17 +0000</td>
2841 <td class="comment">MiniUPnP client release source code</td>
2842 <td></td>
2843</tr>
2844<tr>
2845 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071025.zip'>upnpc-exe-win32-20071025.zip</a></td>
2846 <td class="filesize">12809</td>
2847 <td class="filedate">24/10/2007 23:15:55 +0000</td>
2848 <td class="comment">Windows executable</td>
2849 <td></td>
2850</tr>
2851<tr>
2852 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC10.tar.gz'>miniupnpd-1.0-RC10.tar.gz</a></td>
2853 <td class="filesize">62674</td>
2854 <td class="filedate">12/10/2007 08:38:33 +0000</td>
2855 <td class="comment">MiniUPnP daemon release source code</td>
2856 <td></td>
2857</tr>
2858<tr>
2859 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC10.tar.gz'>miniupnpc-1.0-RC10.tar.gz</a></td>
2860 <td class="filesize">31962</td>
2861 <td class="filedate">12/10/2007 08:38:31 +0000</td>
2862 <td class="comment">MiniUPnP client release source code</td>
2863 <td></td>
2864</tr>
2865<tr>
2866 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC10.tar.gz'>minissdpd-1.0-RC10.tar.gz</a></td>
2867 <td class="filesize">9517</td>
2868 <td class="filedate">12/10/2007 08:38:30 +0000</td>
2869 <td class="comment">MiniSSDPd release source code</td>
2870 <td></td>
2871</tr>
2872<tr>
2873 <td class="filename"><a href='download.php?file=miniupnpc-20071003.tar.gz'>miniupnpc-20071003.tar.gz</a></td>
2874 <td class="filesize">31199</td>
2875 <td class="filedate">03/10/2007 15:30:13 +0000</td>
2876 <td class="comment">MiniUPnP client source code</td>
2877 <td></td>
2878</tr>
2879<tr>
2880 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071001.zip'>upnpc-exe-win32-20071001.zip</a></td>
2881 <td class="filesize">12604</td>
2882 <td class="filedate">01/10/2007 17:09:22 +0000</td>
2883 <td class="comment">Windows executable</td>
2884 <td></td>
2885</tr>
2886<tr>
2887 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC9.tar.gz'>miniupnpd-1.0-RC9.tar.gz</a></td>
2888 <td class="filesize">54778</td>
2889 <td class="filedate">27/09/2007 19:38:36 +0000</td>
2890 <td class="comment">MiniUPnP daemon release source code</td>
2891 <td></td>
2892</tr>
2893<tr>
2894 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC9.tar.gz'>minissdpd-1.0-RC9.tar.gz</a></td>
2895 <td class="filesize">9163</td>
2896 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2897 <td class="comment">MiniSSDPd release source code</td>
2898 <td></td>
2899</tr>
2900<tr>
2901 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC9.tar.gz'>miniupnpc-1.0-RC9.tar.gz</a></td>
2902 <td class="filesize">30538</td>
2903 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2904 <td class="comment">MiniUPnP client release source code</td>
2905 <td></td>
2906</tr>
2907<tr>
2908 <td class="filename"><a href='download.php?file=miniupnpd-20070924.tar.gz'>miniupnpd-20070924.tar.gz</a></td>
2909 <td class="filesize">52338</td>
2910 <td class="filedate">24/09/2007 20:26:05 +0000</td>
2911 <td class="comment">MiniUPnP daemon source code</td>
2912 <td></td>
2913</tr>
2914<tr>
2915 <td class="filename"><a href='download.php?file=miniupnpd-20070923.tar.gz'>miniupnpd-20070923.tar.gz</a></td>
2916 <td class="filesize">51060</td>
2917 <td class="filedate">23/09/2007 21:13:34 +0000</td>
2918 <td class="comment">MiniUPnP daemon source code</td>
2919 <td></td>
2920</tr>
2921<tr>
2922 <td class="filename"><a href='download.php?file=miniupnpc-20070923.tar.gz'>miniupnpc-20070923.tar.gz</a></td>
2923 <td class="filesize">30246</td>
2924 <td class="filedate">23/09/2007 21:13:33 +0000</td>
2925 <td class="comment">MiniUPnP client source code</td>
2926 <td></td>
2927</tr>
2928<tr>
2929 <td class="filename"><a href='download.php?file=minissdpd-20070923.tar.gz'>minissdpd-20070923.tar.gz</a></td>
2930 <td class="filesize">8978</td>
2931 <td class="filedate">23/09/2007 21:13:32 +0000</td>
2932 <td class="comment">MiniSSDPd source code</td>
2933 <td></td>
2934</tr>
2935<tr>
2936 <td class="filename"><a href='download.php?file=miniupnpc-20070902.tar.gz'>miniupnpc-20070902.tar.gz</a></td>
2937 <td class="filesize">30205</td>
2938 <td class="filedate">01/09/2007 23:47:23 +0000</td>
2939 <td class="comment">MiniUPnP client source code</td>
2940 <td></td>
2941</tr>
2942<tr>
2943 <td class="filename"><a href='download.php?file=minissdpd-20070902.tar.gz'>minissdpd-20070902.tar.gz</a></td>
2944 <td class="filesize">6539</td>
2945 <td class="filedate">01/09/2007 23:47:20 +0000</td>
2946 <td class="comment">MiniSSDPd source code</td>
2947 <td></td>
2948</tr>
2949<tr>
2950 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC8.tar.gz'>miniupnpd-1.0-RC8.tar.gz</a></td>
2951 <td class="filesize">50952</td>
2952 <td class="filedate">29/08/2007 10:56:09 +0000</td>
2953 <td class="comment">MiniUPnP daemon release source code</td>
2954 <td></td>
2955</tr>
2956<tr>
2957 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC8.tar.gz'>miniupnpc-1.0-RC8.tar.gz</a></td>
2958 <td class="filesize">29312</td>
2959 <td class="filedate">29/08/2007 10:56:08 +0000</td>
2960 <td class="comment">MiniUPnP client release source code</td>
2961 <td></td>
2962</tr>
2963<tr>
2964 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC7.tar.gz'>miniupnpd-1.0-RC7.tar.gz</a></td>
2965 <td class="filesize">50613</td>
2966 <td class="filedate">20/07/2007 00:15:45 +0000</td>
2967 <td class="comment">MiniUPnP daemon release source code</td>
2968 <td></td>
2969</tr>
2970<tr>
2971 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC6.tar.gz'>miniupnpd-1.0-RC6.tar.gz</a></td>
2972 <td class="filesize">49986</td>
2973 <td class="filedate">12/06/2007 17:12:07 +0000</td>
2974 <td class="comment">MiniUPnP daemon release source code</td>
2975 <td></td>
2976</tr>
2977<tr>
2978 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC6.tar.gz'>miniupnpc-1.0-RC6.tar.gz</a></td>
2979 <td class="filesize">29032</td>
2980 <td class="filedate">12/06/2007 17:12:06 +0000</td>
2981 <td class="comment">MiniUPnP client release source code</td>
2982 <td></td>
2983</tr>
2984<tr>
2985 <td class="filename"><a href='download.php?file=miniupnpd-20070607.tar.gz'>miniupnpd-20070607.tar.gz</a></td>
2986 <td class="filesize">49768</td>
2987 <td class="filedate">06/06/2007 23:12:00 +0000</td>
2988 <td class="comment">MiniUPnP daemon source code</td>
2989 <td></td>
2990</tr>
2991<tr>
2992 <td class="filename"><a href='download.php?file=miniupnpd-20070605.tar.gz'>miniupnpd-20070605.tar.gz</a></td>
2993 <td class="filesize">49710</td>
2994 <td class="filedate">05/06/2007 21:01:53 +0000</td>
2995 <td class="comment">MiniUPnP daemon source code</td>
2996 <td></td>
2997</tr>
2998<tr>
2999 <td class="filename"><a href='download.php?file=miniupnpd-20070521.tar.gz'>miniupnpd-20070521.tar.gz</a></td>
3000 <td class="filesize">48374</td>
3001 <td class="filedate">21/05/2007 13:07:43 +0000</td>
3002 <td class="comment">MiniUPnP daemon source code</td>
3003 <td></td>
3004</tr>
3005<tr>
3006 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20070519.zip'>upnpc-exe-win32-20070519.zip</a></td>
3007 <td class="filesize">10836</td>
3008 <td class="filedate">19/05/2007 13:14:15 +0000</td>
3009 <td class="comment">Windows executable</td>
3010 <td></td>
3011</tr>
3012<tr>
3013 <td class="filename"><a href='download.php?file=miniupnpc-20070515.tar.gz'>miniupnpc-20070515.tar.gz</a></td>
3014 <td class="filesize">25802</td>
3015 <td class="filedate">15/05/2007 18:15:25 +0000</td>
3016 <td class="comment">MiniUPnP client source code</td>
3017 <td></td>
3018</tr>
3019<tr>
3020 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC5.tar.gz'>miniupnpd-1.0-RC5.tar.gz</a></td>
3021 <td class="filesize">48064</td>
3022 <td class="filedate">10/05/2007 20:22:48 +0000</td>
3023 <td class="comment">MiniUPnP daemon release source code</td>
3024 <td></td>
3025</tr>
3026<tr>
3027 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC5.tar.gz'>miniupnpc-1.0-RC5.tar.gz</a></td>
3028 <td class="filesize">25242</td>
3029 <td class="filedate">10/05/2007 20:22:46 +0000</td>
3030 <td class="comment">MiniUPnP client release source code</td>
3031 <td></td>
3032</tr>
3033<tr>
3034 <td class="filename"><a href='download.php?file=miniupnpd-20070412.tar.gz'>miniupnpd-20070412.tar.gz</a></td>
3035 <td class="filesize">47807</td>
3036 <td class="filedate">12/04/2007 20:21:48 +0000</td>
3037 <td class="comment">MiniUPnP daemon source code</td>
3038 <td></td>
3039</tr>
3040<tr>
3041 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC4.tar.gz'>miniupnpd-1.0-RC4.tar.gz</a></td>
3042 <td class="filesize">47687</td>
3043 <td class="filedate">17/03/2007 11:43:13 +0000</td>
3044 <td class="comment">MiniUPnP daemon release source code</td>
3045 <td></td>
3046</tr>
3047<tr>
3048 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC4.tar.gz'>miniupnpc-1.0-RC4.tar.gz</a></td>
3049 <td class="filesize">25085</td>
3050 <td class="filedate">17/03/2007 11:43:10 +0000</td>
3051 <td class="comment">MiniUPnP client release source code</td>
3052 <td></td>
3053</tr>
3054<tr>
3055 <td class="filename"><a href='download.php?file=miniupnpd-20070311.tar.gz'>miniupnpd-20070311.tar.gz</a></td>
3056 <td class="filesize">47599</td>
3057 <td class="filedate">11/03/2007 00:25:26 +0000</td>
3058 <td class="comment">MiniUPnP daemon source code</td>
3059 <td></td>
3060</tr>
3061<tr>
3062 <td class="filename"><a href='download.php?file=miniupnpd-20070208.tar.gz'>miniupnpd-20070208.tar.gz</a></td>
3063 <td class="filesize">45084</td>
3064 <td class="filedate">07/02/2007 23:04:06 +0000</td>
3065 <td class="comment">MiniUPnP daemon source code</td>
3066 <td></td>
3067</tr>
3068<tr>
3069 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC3.tar.gz'>miniupnpd-1.0-RC3.tar.gz</a></td>
3070 <td class="filesize">44683</td>
3071 <td class="filedate">30/01/2007 23:00:44 +0000</td>
3072 <td class="comment">MiniUPnP daemon release source code</td>
3073 <td></td>
3074</tr>
3075<tr>
3076 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC3.tar.gz'>miniupnpc-1.0-RC3.tar.gz</a></td>
3077 <td class="filesize">25055</td>
3078 <td class="filedate">30/01/2007 23:00:42 +0000</td>
3079 <td class="comment">MiniUPnP client release source code</td>
3080 <td></td>
3081</tr>
3082<tr>
3083 <td class="filename"><a href='download.php?file=miniupnpd-20070130.tar.gz'>miniupnpd-20070130.tar.gz</a></td>
3084 <td class="filesize">43735</td>
3085 <td class="filedate">29/01/2007 23:26:16 +0000</td>
3086 <td class="comment">MiniUPnP daemon source code</td>
3087 <td></td>
3088</tr>
3089<tr>
3090 <td class="filename"><a href='download.php?file=miniupnpc-20070130.tar.gz'>miniupnpc-20070130.tar.gz</a></td>
3091 <td class="filesize">24466</td>
3092 <td class="filedate">29/01/2007 23:26:13 +0000</td>
3093 <td class="comment">MiniUPnP client source code</td>
3094 <td></td>
3095</tr>
3096<tr>
3097 <td class="filename"><a href='download.php?file=miniupnpd-20070127.tar.gz'>miniupnpd-20070127.tar.gz</a></td>
3098 <td class="filesize">42643</td>
3099 <td class="filedate">27/01/2007 16:02:35 +0000</td>
3100 <td class="comment">MiniUPnP daemon source code</td>
3101 <td></td>
3102</tr>
3103<tr>
3104 <td class="filename"><a href='download.php?file=miniupnpc-20070127.tar.gz'>miniupnpc-20070127.tar.gz</a></td>
3105 <td class="filesize">24241</td>
3106 <td class="filedate">27/01/2007 16:02:33 +0000</td>
3107 <td class="comment">MiniUPnP client source code</td>
3108 <td></td>
3109</tr>
3110<tr>
3111 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC2.tar.gz'>miniupnpd-1.0-RC2.tar.gz</a></td>
3112 <td class="filesize">40424</td>
3113 <td class="filedate">17/01/2007 16:13:05 +0000</td>
3114 <td class="comment">MiniUPnP daemon release source code</td>
3115 <td></td>
3116</tr>
3117<tr>
3118 <td class="filename"><a href='download.php?file=miniupnpd-20070112.tar.gz'>miniupnpd-20070112.tar.gz</a></td>
3119 <td class="filesize">40708</td>
3120 <td class="filedate">12/01/2007 13:40:50 +0000</td>
3121 <td class="comment">MiniUPnP daemon source code</td>
3122 <td></td>
3123</tr>
3124<tr>
3125 <td class="filename"><a href='download.php?file=miniupnpd-20070111.tar.gz'>miniupnpd-20070111.tar.gz</a></td>
3126 <td class="filesize">40651</td>
3127 <td class="filedate">11/01/2007 18:50:21 +0000</td>
3128 <td class="comment">MiniUPnP daemon source code</td>
3129 <td></td>
3130</tr>
3131<tr>
3132 <td class="filename"><a href='download.php?file=miniupnpd-20070108.tar.gz'>miniupnpd-20070108.tar.gz</a></td>
3133 <td class="filesize">40025</td>
3134 <td class="filedate">08/01/2007 10:02:14 +0000</td>
3135 <td class="comment">MiniUPnP daemon source code</td>
3136 <td></td>
3137</tr>
3138<tr>
3139 <td class="filename"><a href='download.php?file=miniupnpd-20070103.tar.gz'>miniupnpd-20070103.tar.gz</a></td>
3140 <td class="filesize">40065</td>
3141 <td class="filedate">03/01/2007 14:39:11 +0000</td>
3142 <td class="comment">MiniUPnP daemon source code</td>
3143 <td></td>
3144</tr>
3145<tr>
3146 <td class="filename"><a href='download.php?file=miniupnpc-20061214.tar.gz'>miniupnpc-20061214.tar.gz</a></td>
3147 <td class="filesize">24106</td>
3148 <td class="filedate">14/12/2006 15:43:54 +0000</td>
3149 <td class="comment">MiniUPnP client source code</td>
3150 <td></td>
3151</tr>
3152<tr>
3153 <td class="filename"><a href='download.php?file=miniupnpd-20061214.tar.gz'>miniupnpd-20061214.tar.gz</a></td>
3154 <td class="filesize">39750</td>
3155 <td class="filedate">14/12/2006 13:44:51 +0000</td>
3156 <td class="comment">MiniUPnP daemon source code</td>
3157 <td></td>
3158</tr>
3159<tr>
3160 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC1.tar.gz'>miniupnpd-1.0-RC1.tar.gz</a></td>
3161 <td class="filesize">39572</td>
3162 <td class="filedate">07/12/2006 10:55:31 +0000</td>
3163 <td class="comment">MiniUPnP daemon release source code</td>
3164 <td></td>
3165</tr>
3166<tr>
3167 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC1.tar.gz'>miniupnpc-1.0-RC1.tar.gz</a></td>
3168 <td class="filesize">23582</td>
3169 <td class="filedate">07/12/2006 10:55:30 +0000</td>
3170 <td class="comment">MiniUPnP client release source code</td>
3171 <td></td>
3172</tr>
3173<tr>
3174 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061201.zip'>upnpc-exe-win32-20061201.zip</a></td>
3175 <td class="filesize">10378</td>
3176 <td class="filedate">01/12/2006 00:33:08 +0000</td>
3177 <td class="comment">Windows executable</td>
3178 <td></td>
3179</tr>
3180<tr>
3181 <td class="filename"><a href='download.php?file=miniupnpd20061130.tar.gz'>miniupnpd20061130.tar.gz</a></td>
3182 <td class="filesize">37184</td>
3183 <td class="filedate">30/11/2006 12:25:25 +0000</td>
3184 <td class="comment">MiniUPnP daemon source code</td>
3185 <td></td>
3186</tr>
3187<tr>
3188 <td class="filename"><a href='download.php?file=miniupnpd20061129.tar.gz'>miniupnpd20061129.tar.gz</a></td>
3189 <td class="filesize">36045</td>
3190 <td class="filedate">29/11/2006 00:10:49 +0000</td>
3191 <td class="comment">MiniUPnP daemon source code</td>
3192 <td></td>
3193</tr>
3194<tr>
3195 <td class="filename"><a href='download.php?file=miniupnpd20061127.tar.gz'>miniupnpd20061127.tar.gz</a></td>
3196 <td class="filesize">34155</td>
3197 <td class="filedate">26/11/2006 23:15:28 +0000</td>
3198 <td class="comment">MiniUPnP daemon source code</td>
3199 <td></td>
3200</tr>
3201<tr>
3202 <td class="filename"><a href='download.php?file=miniupnpc20061123.tar.gz'>miniupnpc20061123.tar.gz</a></td>
3203 <td class="filesize">21004</td>
3204 <td class="filedate">23/11/2006 22:41:46 +0000</td>
3205 <td class="comment">MiniUPnP client source code</td>
3206 <td></td>
3207</tr>
3208<tr>
3209 <td class="filename" colspan="2"><a href='download.php?file=miniupnpd-bin-openwrt20061123.tar.gz'>miniupnpd-bin-openwrt20061123.tar.gz</a></td>
3210 <td class="filedate">23/11/2006 22:41:44 +0000</td>
3211 <td class="comment">Precompiled binaries for openwrt</td>
3212 <td></td>
3213</tr>
3214<tr>
3215 <td class="filename"><a href='download.php?file=miniupnpd20061123.tar.gz'>miniupnpd20061123.tar.gz</a></td>
3216 <td class="filesize">33809</td>
3217 <td class="filedate">23/11/2006 22:28:29 +0000</td>
3218 <td class="comment">MiniUPnP daemon source code</td>
3219 <td></td>
3220</tr>
3221<tr>
3222 <td class="filename"><a href='download.php?file=miniupnpc20061119.tar.gz'>miniupnpc20061119.tar.gz</a></td>
3223 <td class="filesize">20897</td>
3224 <td class="filedate">19/11/2006 22:50:37 +0000</td>
3225 <td class="comment">MiniUPnP client source code</td>
3226 <td></td>
3227</tr>
3228<tr>
3229 <td class="filename"><a href='download.php?file=miniupnpd20061119.tar.gz'>miniupnpd20061119.tar.gz</a></td>
3230 <td class="filesize">32580</td>
3231 <td class="filedate">19/11/2006 22:50:36 +0000</td>
3232 <td class="comment">MiniUPnP daemon source code</td>
3233 <td></td>
3234</tr>
3235<tr>
3236 <td class="filename"><a href='download.php?file=miniupnpd20061117.tar.gz'>miniupnpd20061117.tar.gz</a></td>
3237 <td class="filesize">32646</td>
3238 <td class="filedate">17/11/2006 13:29:33 +0000</td>
3239 <td class="comment">MiniUPnP daemon source code</td>
3240 <td></td>
3241</tr>
3242<tr>
3243 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061112.zip'>upnpc-exe-win32-20061112.zip</a></td>
3244 <td class="filesize">10262</td>
3245 <td class="filedate">12/11/2006 22:41:25 +0000</td>
3246 <td class="comment">Windows executable</td>
3247 <td></td>
3248</tr>
3249<tr>
3250 <td class="filename"><a href='download.php?file=miniupnpd20061112.tar.gz'>miniupnpd20061112.tar.gz</a></td>
3251 <td class="filesize">32023</td>
3252 <td class="filedate">12/11/2006 21:30:32 +0000</td>
3253 <td class="comment">MiniUPnP daemon source code</td>
3254 <td></td>
3255</tr>
3256<tr>
3257 <td class="filename"><a href='download.php?file=miniupnpc20061112.tar.gz'>miniupnpc20061112.tar.gz</a></td>
3258 <td class="filesize">21047</td>
3259 <td class="filedate">12/11/2006 21:30:31 +0000</td>
3260 <td class="comment">MiniUPnP client source code</td>
3261 <td></td>
3262</tr>
3263<tr>
3264 <td class="filename"><a href='download.php?file=miniupnpd20061110.tar.gz'>miniupnpd20061110.tar.gz</a></td>
3265 <td class="filesize">27926</td>
3266 <td class="filedate">09/11/2006 23:35:02 +0000</td>
3267 <td class="comment">MiniUPnP daemon source code</td>
3268 <td></td>
3269</tr>
3270<tr>
3271 <td class="filename"><a href='download.php?file=miniupnpc20061110.tar.gz'>miniupnpc20061110.tar.gz</a></td>
3272 <td class="filesize">21009</td>
3273 <td class="filedate">09/11/2006 23:32:19 +0000</td>
3274 <td class="comment">MiniUPnP client source code</td>
3275 <td></td>
3276</tr>
3277<tr>
3278 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061101.zip'>upnpc-exe-win32-20061101.zip</a></td>
3279 <td class="filesize">10089</td>
3280 <td class="filedate">08/11/2006 20:35:09 +0000</td>
3281 <td class="comment">Windows executable</td>
3282 <td></td>
3283</tr>
3284<tr>
3285 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061020.zip'>upnpc-exe-win32-20061020.zip</a></td>
3286 <td class="filesize">9183</td>
3287 <td class="filedate">08/11/2006 20:35:08 +0000</td>
3288 <td class="comment">Windows executable</td>
3289 <td></td>
3290</tr>
3291<tr>
3292 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060909.zip'>upnpc-exe-win32-20060909.zip</a></td>
3293 <td class="filesize">9994</td>
3294 <td class="filedate">08/11/2006 20:35:07 +0000</td>
3295 <td class="comment">Windows executable</td>
3296 <td></td>
3297</tr>
3298<tr>
3299 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060801.zip'>upnpc-exe-win32-20060801.zip</a></td>
3300 <td class="filesize">10002</td>
3301 <td class="filedate">08/11/2006 20:35:06 +0000</td>
3302 <td class="comment">Windows executable</td>
3303 <td></td>
3304</tr>
3305<tr>
3306 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060711.zip'>upnpc-exe-win32-20060711.zip</a></td>
3307 <td class="filesize">13733</td>
3308 <td class="filedate">08/11/2006 20:35:05 +0000</td>
3309 <td class="comment">Windows executable</td>
3310 <td></td>
3311</tr>
3312<tr>
3313 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060709.zip'>upnpc-exe-win32-20060709.zip</a></td>
3314 <td class="filesize">13713</td>
3315 <td class="filedate">08/11/2006 20:35:04 +0000</td>
3316 <td class="comment">Windows executable</td>
3317 <td></td>
3318</tr>
3319<tr>
3320 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060704.zip'>upnpc-exe-win32-20060704.zip</a></td>
3321 <td class="filesize">13297</td>
3322 <td class="filedate">08/11/2006 20:35:03 +0000</td>
3323 <td class="comment">Windows executable</td>
3324 <td></td>
3325</tr>
3326<tr>
3327 <td class="filename"><a href='download.php?file=miniupnpc20061107.tar.gz'>miniupnpc20061107.tar.gz</a></td>
3328 <td class="filesize">20708</td>
3329 <td class="filedate">06/11/2006 23:36:57 +0000</td>
3330 <td class="comment">MiniUPnP client source code</td>
3331 <td></td>
3332</tr>
3333<tr>
3334 <td class="filename"><a href='download.php?file=miniupnpd20061107.tar.gz'>miniupnpd20061107.tar.gz</a></td>
3335 <td class="filesize">26992</td>
3336 <td class="filedate">06/11/2006 23:35:06 +0000</td>
3337 <td class="comment">MiniUPnP daemon source code</td>
3338 <td></td>
3339</tr>
3340<tr>
3341 <td class="filename"><a href='download.php?file=miniupnpc20061106.tar.gz'>miniupnpc20061106.tar.gz</a></td>
3342 <td class="filesize">20575</td>
3343 <td class="filedate">06/11/2006 17:02:15 +0000</td>
3344 <td class="comment">MiniUPnP client source code</td>
3345 <td></td>
3346</tr>
3347<tr>
3348 <td class="filename"><a href='download.php?file=miniupnpd20061106.tar.gz'>miniupnpd20061106.tar.gz</a></td>
3349 <td class="filesize">26597</td>
3350 <td class="filedate">06/11/2006 15:39:10 +0000</td>
3351 <td class="comment">MiniUPnP daemon source code</td>
3352 <td></td>
3353</tr>
3354<tr>
3355 <td class="filename"><a href='download.php?file=miniupnpc20061101.tar.gz'>miniupnpc20061101.tar.gz</a></td>
3356 <td class="filesize">20395</td>
3357 <td class="filedate">04/11/2006 18:16:15 +0000</td>
3358 <td class="comment">MiniUPnP client source code</td>
3359 <td></td>
3360</tr>
3361<tr>
3362 <td class="filename"><a href='download.php?file=miniupnpc20061031.tar.gz'>miniupnpc20061031.tar.gz</a></td>
3363 <td class="filesize">20396</td>
3364 <td class="filedate">04/11/2006 18:16:13 +0000</td>
3365 <td class="comment">MiniUPnP client source code</td>
3366 <td></td>
3367</tr>
3368<tr>
3369 <td class="filename"><a href='download.php?file=miniupnpc20061023.tar.gz'>miniupnpc20061023.tar.gz</a></td>
3370 <td class="filesize">20109</td>
3371 <td class="filedate">04/11/2006 18:16:12 +0000</td>
3372 <td class="comment">MiniUPnP client source code</td>
3373 <td></td>
3374</tr>
3375<tr>
3376 <td class="filename"><a href='download.php?file=miniupnpc20061020.tar.gz'>miniupnpc20061020.tar.gz</a></td>
3377 <td class="filesize">19739</td>
3378 <td class="filedate">04/11/2006 18:16:10 +0000</td>
3379 <td class="comment">MiniUPnP client source code</td>
3380 <td></td>
3381</tr>
3382<tr>
3383 <td class="filename"><a href='download.php?file=miniupnpc20060909.tar.gz'>miniupnpc20060909.tar.gz</a></td>
3384 <td class="filesize">19285</td>
3385 <td class="filedate">04/11/2006 18:16:09 +0000</td>
3386 <td class="comment">MiniUPnP client source code</td>
3387 <td></td>
3388</tr>
3389<tr>
3390 <td class="filename"><a href='download.php?file=miniupnpc20060731.tar.gz'>miniupnpc20060731.tar.gz</a></td>
3391 <td class="filesize">19032</td>
3392 <td class="filedate">04/11/2006 18:16:07 +0000</td>
3393 <td class="comment">MiniUPnP client source code</td>
3394 <td></td>
3395</tr>
3396<tr>
3397 <td class="filename"><a href='download.php?file=miniupnpc20060711.tar.gz'>miniupnpc20060711.tar.gz</a></td>
3398 <td class="filesize">19151</td>
3399 <td class="filedate">04/11/2006 18:16:06 +0000</td>
3400 <td class="comment">MiniUPnP client source code</td>
3401 <td></td>
3402</tr>
3403<tr>
3404 <td class="filename"><a href='download.php?file=miniupnpc20060709.tar.gz'>miniupnpc20060709.tar.gz</a></td>
3405 <td class="filesize">19080</td>
3406 <td class="filedate">04/11/2006 18:16:04 +0000</td>
3407 <td class="comment">MiniUPnP client source code</td>
3408 <td></td>
3409</tr>
3410<tr>
3411 <td class="filename"><a href='download.php?file=miniupnpc20060703.tar.gz'>miniupnpc20060703.tar.gz</a></td>
3412 <td class="filesize">17906</td>
3413 <td class="filedate">04/11/2006 18:16:03 +0000</td>
3414 <td class="comment">MiniUPnP client source code</td>
3415 <td></td>
3416</tr>
3417<tr>
3418 <td class="filename"><a href='download.php?file=miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
3419 <td class="filesize">14840</td>
3420 <td class="filedate">04/11/2006 18:16:01 +0000</td>
3421 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
3422 <td></td>
3423</tr>
3424<tr>
3425 <td class="filename"><a href='download.php?file=miniupnpd20061029.tar.gz'>miniupnpd20061029.tar.gz</a></td>
3426 <td class="filesize">24197</td>
3427 <td class="filedate">03/11/2006 13:40:30 +0000</td>
3428 <td class="comment">MiniUPnP daemon source code</td>
3429 <td></td>
3430</tr>
3431<tr>
3432 <td class="filename"><a href='download.php?file=miniupnpd20061027.tar.gz'>miniupnpd20061027.tar.gz</a></td>
3433 <td class="filesize">23904</td>
3434 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3435 <td class="comment">MiniUPnP daemon source code</td>
3436 <td></td>
3437</tr>
3438<tr>
3439 <td class="filename"><a href='download.php?file=miniupnpd20061028.tar.gz'>miniupnpd20061028.tar.gz</a></td>
3440 <td class="filesize">24383</td>
3441 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3442 <td class="comment">MiniUPnP daemon source code</td>
3443 <td></td>
3444</tr>
3445<tr>
3446 <td class="filename"><a href='download.php?file=miniupnpd20061018.tar.gz'>miniupnpd20061018.tar.gz</a></td>
3447 <td class="filesize">23051</td>
3448 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3449 <td class="comment">MiniUPnP daemon source code</td>
3450 <td></td>
3451</tr>
3452<tr>
3453 <td class="filename"><a href='download.php?file=miniupnpd20061023.tar.gz'>miniupnpd20061023.tar.gz</a></td>
3454 <td class="filesize">23478</td>
3455 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3456 <td class="comment">MiniUPnP daemon source code</td>
3457 <td></td>
3458</tr>
3459<tr>
3460 <td class="filename"><a href='download.php?file=miniupnpd20060930.tar.gz'>miniupnpd20060930.tar.gz</a></td>
3461 <td class="filesize">22832</td>
3462 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3463 <td class="comment">MiniUPnP daemon source code</td>
3464 <td></td>
3465</tr>
3466<tr>
3467 <td class="filename"><a href='download.php?file=miniupnpd20060924.tar.gz'>miniupnpd20060924.tar.gz</a></td>
3468 <td class="filesize">22038</td>
3469 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3470 <td class="comment">MiniUPnP daemon source code</td>
3471 <td></td>
3472</tr>
3473<tr>
3474 <td class="filename"><a href='download.php?file=miniupnpd20060919.tar.gz'>miniupnpd20060919.tar.gz</a></td>
3475 <td class="filesize">21566</td>
3476 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3477 <td class="comment">MiniUPnP daemon source code</td>
3478 <td></td>
3479</tr>
3480<tr>
3481 <td class="filename"><a href='download.php?file=miniupnpd20060729.tar.gz'>miniupnpd20060729.tar.gz</a></td>
3482 <td class="filesize">19202</td>
3483 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3484 <td class="comment">MiniUPnP daemon source code</td>
3485 <td></td>
3486</tr>
3487<tr>
3488 <td class="filename"><a href='download.php?file=miniupnpd20060909.tar.gz'>miniupnpd20060909.tar.gz</a></td>
3489 <td class="filesize">19952</td>
3490 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3491 <td class="comment">MiniUPnP daemon source code</td>
3492 <td></td>
3493</tr>
3494</table>
3495
3496<p><a href="..">Home</a></p>
3497<p>Contact: miniupnp _AT_ free _DOT_ fr</p>
3498<p align="center">
3499<a href="https://validator.w3.org/check?uri=referer"><img src="https://www.w3.org/Icons/valid-xhtml10" alt="Valid XHTML 1.0 Transitional" height="31" width="88" /></a>
3500<a href="https://jigsaw.w3.org/css-validator/check/referer"><img style="border:0;width:88px;height:31px" src="https://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!" /></a>
3501<!--
3502 <a href="https://freshmeat.net/projects/miniupnp"><img src="https://s3.amazonaws.com/entp-tender-production/assets/bc5be96f147ec8db3c10fc017f1f53889904ef5b/fm_logo_white_150_normal.png" border="0" alt="freshmeat.net" /></a>
3503-->
3504<!-- https://futuresimple.github.com/images/github_logo.png -->
3505<!-- <a href="https://github.com/miniupnp/miniupnp"><img src="https://assets-cdn.github.com/images/modules/logos_page/GitHub-Logo.png" alt="github.com" height="31" /></a> -->
3506<a href="https://github.com/miniupnp/miniupnp"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://github.blog/wp-content/uploads/2008/12/forkme_left_green_007200.png" alt="Fork me on GitHub" /></a>
3507</p>
3508
3509<script type="text/javascript">
3510var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
3511document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
3512</script>
3513<script type="text/javascript">
3514try {
3515 var ua = 'UA-10295521';
3516 if(window.location.hostname == 'miniupnp.free.fr')
3517 ua += '-1';
3518 else if(window.location.hostname == 'miniupnp.tuxfamily.org')
3519 ua += '-2';
3520 else ua = '';
3521 if(ua != '') {
3522 var pageTracker = _gat._getTracker(ua);
3523 pageTracker._trackPageview();
3524 }
3525} catch(err) {}</script>
3526</body>
3527</html>
3528
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 85c1f79ff3..e027705bf4 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -7,7 +7,10 @@
7# 7#
8 8
9import contextlib 9import contextlib
10import shutil
10import unittest 11import unittest
12import unittest.mock
13import urllib.parse
11import hashlib 14import hashlib
12import tempfile 15import tempfile
13import collections 16import collections
@@ -17,6 +20,7 @@ import tarfile
17from bb.fetch2 import URI 20from bb.fetch2 import URI
18from bb.fetch2 import FetchMethod 21from bb.fetch2 import FetchMethod
19import bb 22import bb
23import bb.utils
20from bb.tests.support.httpserver import HTTPService 24from bb.tests.support.httpserver import HTTPService
21 25
22def skipIfNoNetwork(): 26def skipIfNoNetwork():
@@ -24,6 +28,18 @@ def skipIfNoNetwork():
24 return unittest.skip("network test") 28 return unittest.skip("network test")
25 return lambda f: f 29 return lambda f: f
26 30
31
32@contextlib.contextmanager
33def hide_directory(directory):
34 """Hide the given directory and restore it after the context is left"""
35 temp_name = directory + ".bak"
36 os.rename(directory, temp_name)
37 try:
38 yield
39 finally:
40 os.rename(temp_name, directory)
41
42
27class TestTimeout(Exception): 43class TestTimeout(Exception):
28 # Indicate to pytest that this is not a test suite 44 # Indicate to pytest that this is not a test suite
29 __test__ = False 45 __test__ = False
@@ -323,6 +339,21 @@ class URITest(unittest.TestCase):
323 'params': {"downloadfilename" : "EGPL-T101.zip"}, 339 'params': {"downloadfilename" : "EGPL-T101.zip"},
324 'query': {"9BE0BF6657": None}, 340 'query': {"9BE0BF6657": None},
325 'relative': False 341 'relative': False
342 },
343 "file://example@.service": {
344 'uri': 'file:example%40.service',
345 'scheme': 'file',
346 'hostname': '',
347 'port': None,
348 'hostport': '',
349 'path': 'example@.service',
350 'userinfo': '',
351 'userinfo': '',
352 'username': '',
353 'password': '',
354 'params': {},
355 'query': {},
356 'relative': True
326 } 357 }
327 358
328 } 359 }
@@ -459,16 +490,16 @@ class FetcherTest(unittest.TestCase):
459class MirrorUriTest(FetcherTest): 490class MirrorUriTest(FetcherTest):
460 491
461 replaceuris = { 492 replaceuris = {
462 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") 493 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "http://somewhere.org/somedir/")
463 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", 494 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
464 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 495 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
465 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 496 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
466 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 497 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
467 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 498 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
468 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") 499 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
469 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 500 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
470 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") 501 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
471 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", 502 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
472 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") 503 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
473 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 504 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
474 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") 505 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
@@ -481,12 +512,12 @@ class MirrorUriTest(FetcherTest):
481 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", 512 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
482 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") 513 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
483 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", 514 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
484 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 515 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
485 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 516 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
486 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 517 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
487 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 518 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
488 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") 519 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
489 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 520 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
490 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") 521 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
491 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 522 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
492 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") 523 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
@@ -502,6 +533,10 @@ class MirrorUriTest(FetcherTest):
502 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", 533 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
503 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz") 534 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz")
504 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", 535 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
536 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
537 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
538 ("git://internal.git.server.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
539 : None,
505 540
506 #Renaming files doesn't work 541 #Renaming files doesn't work
507 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" 542 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
@@ -510,8 +545,8 @@ class MirrorUriTest(FetcherTest):
510 545
511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ 546 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ 547 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
513 "https://.*/.* file:///someotherpath/downloads/ " \ 548 "https?://.*/.* file:///someotherpath/downloads/ " \
514 "http://.*/.* file:///someotherpath/downloads/" 549 "svn://svn.server1.com/ svn://svn.server2.com/"
515 550
516 def test_urireplace(self): 551 def test_urireplace(self):
517 self.d.setVar("FILESPATH", ".") 552 self.d.setVar("FILESPATH", ".")
@@ -520,7 +555,7 @@ class MirrorUriTest(FetcherTest):
520 ud.setup_localpath(self.d) 555 ud.setup_localpath(self.d)
521 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2])) 556 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
522 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d) 557 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
523 self.assertEqual([v], newuris) 558 self.assertEqual([v] if v else [], newuris)
524 559
525 def test_urilist1(self): 560 def test_urilist1(self):
526 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 561 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
@@ -535,6 +570,13 @@ class MirrorUriTest(FetcherTest):
535 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 570 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
536 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) 571 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
537 572
573 def test_urilistsvn(self):
574 # Catch svn:// -> svn:// bug
575 fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d)
576 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
577 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
578 self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2'])
579
538 def test_mirror_of_mirror(self): 580 def test_mirror_of_mirror(self):
539 # Test if mirror of a mirror works 581 # Test if mirror of a mirror works
540 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" 582 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
@@ -547,16 +589,16 @@ class MirrorUriTest(FetcherTest):
547 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', 589 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
548 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) 590 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
549 591
550 recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ " \ 592 recmirrorvar = "https://.*/[^/]* http://aaaa/A/A/A/ " \
551 "https://.*/[^/]* https://BBBB/B/B/B/" 593 "https://.*/[^/]* https://bbbb/B/B/B/"
552 594
553 def test_recursive(self): 595 def test_recursive(self):
554 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 596 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
555 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar) 597 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
556 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 598 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
557 self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz', 599 self.assertEqual(uris, ['http://aaaa/A/A/A/bitbake/bitbake-1.0.tar.gz',
558 'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz', 600 'https://bbbb/B/B/B/bitbake/bitbake-1.0.tar.gz',
559 'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz']) 601 'http://aaaa/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
560 602
561 603
562class GitDownloadDirectoryNamingTest(FetcherTest): 604class GitDownloadDirectoryNamingTest(FetcherTest):
@@ -679,7 +721,7 @@ class GitShallowTarballNamingTest(FetcherTest):
679class CleanTarballTest(FetcherTest): 721class CleanTarballTest(FetcherTest):
680 def setUp(self): 722 def setUp(self):
681 super(CleanTarballTest, self).setUp() 723 super(CleanTarballTest, self).setUp()
682 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https" 724 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https;branch=master"
683 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" 725 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
684 726
685 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') 727 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -720,6 +762,7 @@ class FetcherLocalTest(FetcherTest):
720 os.makedirs(self.localsrcdir) 762 os.makedirs(self.localsrcdir)
721 touch(os.path.join(self.localsrcdir, 'a')) 763 touch(os.path.join(self.localsrcdir, 'a'))
722 touch(os.path.join(self.localsrcdir, 'b')) 764 touch(os.path.join(self.localsrcdir, 'b'))
765 touch(os.path.join(self.localsrcdir, 'c@d'))
723 os.makedirs(os.path.join(self.localsrcdir, 'dir')) 766 os.makedirs(os.path.join(self.localsrcdir, 'dir'))
724 touch(os.path.join(self.localsrcdir, 'dir', 'c')) 767 touch(os.path.join(self.localsrcdir, 'dir', 'c'))
725 touch(os.path.join(self.localsrcdir, 'dir', 'd')) 768 touch(os.path.join(self.localsrcdir, 'dir', 'd'))
@@ -751,6 +794,10 @@ class FetcherLocalTest(FetcherTest):
751 tree = self.fetchUnpack(['file://a', 'file://dir/c']) 794 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
752 self.assertEqual(tree, ['a', 'dir/c']) 795 self.assertEqual(tree, ['a', 'dir/c'])
753 796
797 def test_local_at(self):
798 tree = self.fetchUnpack(['file://c@d'])
799 self.assertEqual(tree, ['c@d'])
800
754 def test_local_backslash(self): 801 def test_local_backslash(self):
755 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device']) 802 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device'])
756 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device']) 803 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device'])
@@ -1056,12 +1103,6 @@ class FetcherNetworkTest(FetcherTest):
1056 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) 1103 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
1057 1104
1058 @skipIfNoNetwork() 1105 @skipIfNoNetwork()
1059 def test_gitfetch_tagandrev(self):
1060 # SRCREV is set but does not match rev= parameter
1061 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https"
1062 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
1063
1064 @skipIfNoNetwork()
1065 def test_gitfetch_usehead(self): 1106 def test_gitfetch_usehead(self):
1066 # Since self.gitfetcher() sets SRCREV we expect this to override 1107 # Since self.gitfetcher() sets SRCREV we expect this to override
1067 # `usehead=1' and instead fetch the specified SRCREV. See 1108 # `usehead=1' and instead fetch the specified SRCREV. See
@@ -1095,7 +1136,7 @@ class FetcherNetworkTest(FetcherTest):
1095 @skipIfNoNetwork() 1136 @skipIfNoNetwork()
1096 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): 1137 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
1097 realurl = "https://git.openembedded.org/bitbake" 1138 realurl = "https://git.openembedded.org/bitbake"
1098 recipeurl = "git://someserver.org/bitbake;protocol=https" 1139 recipeurl = "git://someserver.org/bitbake;protocol=https;branch=master"
1099 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") 1140 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
1100 os.chdir(self.tempdir) 1141 os.chdir(self.tempdir)
1101 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir) 1142 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
@@ -1250,7 +1291,6 @@ class FetcherNetworkTest(FetcherTest):
1250 1291
1251class SVNTest(FetcherTest): 1292class SVNTest(FetcherTest):
1252 def skipIfNoSvn(): 1293 def skipIfNoSvn():
1253 import shutil
1254 if not shutil.which("svn"): 1294 if not shutil.which("svn"):
1255 return unittest.skip("svn not installed, tests being skipped") 1295 return unittest.skip("svn not installed, tests being skipped")
1256 1296
@@ -1373,15 +1413,17 @@ class TrustedNetworksTest(FetcherTest):
1373 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1413 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1374 1414
1375class URLHandle(unittest.TestCase): 1415class URLHandle(unittest.TestCase):
1376 1416 # Quote password as per RFC3986
1417 password = urllib.parse.quote(r"!#$%^&*()-_={}[]\|:?,.<>~`", r"!$&'/()*+,;=")
1377 datatable = { 1418 datatable = {
1378 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), 1419 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
1379 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), 1420 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
1380 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), 1421 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
1381 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}), 1422 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
1382 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), 1423 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
1424 "file://example@.service": ('file', '', 'example@.service', '', '', {}),
1383 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}), 1425 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
1384 r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}), 1426 'git://s.o-me_ONE:%s@git.openembedded.org/bitbake;branch=main;protocol=https' % password: ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', password, {'branch': 'main', 'protocol' : 'https'}),
1385 } 1427 }
1386 # we require a pathname to encodeurl but users can still pass such urls to 1428 # we require a pathname to encodeurl but users can still pass such urls to
1387 # decodeurl and we need to handle them 1429 # decodeurl and we need to handle them
@@ -1399,6 +1441,8 @@ class URLHandle(unittest.TestCase):
1399 def test_encodeurl(self): 1441 def test_encodeurl(self):
1400 for k, v in self.datatable.items(): 1442 for k, v in self.datatable.items():
1401 result = bb.fetch.encodeurl(v) 1443 result = bb.fetch.encodeurl(v)
1444 if result.startswith("file:"):
1445 result = urllib.parse.unquote(result)
1402 self.assertEqual(result, k) 1446 self.assertEqual(result, k)
1403 1447
1404class FetchLatestVersionTest(FetcherTest): 1448class FetchLatestVersionTest(FetcherTest):
@@ -1419,12 +1463,12 @@ class FetchLatestVersionTest(FetcherTest):
1419 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "") 1463 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
1420 : "1.4.0", 1464 : "1.4.0",
1421 # combination version pattern 1465 # combination version pattern
1422 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") 1466 ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1423 : "1.2.0", 1467 : "1.2.0",
1424 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") 1468 ("u-boot-mkimage", "git://git.yoctoproject.org/bbfetchtests-u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1425 : "2014.01", 1469 : "2014.01",
1426 # version pattern "yyyymmdd" 1470 # version pattern "yyyymmdd"
1427 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") 1471 ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
1428 : "20120614", 1472 : "20120614",
1429 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1473 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1430 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1474 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
@@ -1440,98 +1484,126 @@ class FetchLatestVersionTest(FetcherTest):
1440 : "0.28.0", 1484 : "0.28.0",
1441 } 1485 }
1442 1486
1487 WgetTestData = collections.namedtuple("WgetTestData", ["pn", "path", "pv", "check_uri", "check_regex"], defaults=[None, None, None])
1443 test_wget_uris = { 1488 test_wget_uris = {
1444 # 1489 #
1445 # packages with versions inside directory name 1490 # packages with versions inside directory name
1446 # 1491 #
1447 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2 1492 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2
1448 ("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "") 1493 WgetTestData("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2")
1449 : "2.24.2", 1494 : "2.24.2",
1450 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz 1495 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz
1451 ("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "") 1496 WgetTestData("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz")
1452 : "1.6.0", 1497 : "1.6.0",
1453 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz 1498 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
1454 ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") 1499 WgetTestData("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz")
1455 : "2.8.12.1", 1500 : "2.8.12.1",
1456 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz 1501 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
1457 ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "") 1502 WgetTestData("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz")
1458 : "2.10.3", 1503 : "2.10.3",
1459 # 1504 #
1460 # packages with versions only in current directory 1505 # packages with versions only in current directory
1461 # 1506 #
1462 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 1507 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
1463 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") 1508 WgetTestData("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2")
1464 : "2.19", 1509 : "2.19",
1465 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 1510 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
1466 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") 1511 WgetTestData("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2")
1467 : "20120814", 1512 : "20120814",
1468 # 1513 #
1469 # packages with "99" in the name of possible version 1514 # packages with "99" in the name of possible version
1470 # 1515 #
1471 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz 1516 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz
1472 ("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "") 1517 WgetTestData("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz")
1473 : "5.0", 1518 : "5.0",
1474 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2 1519 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2
1475 ("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "") 1520 WgetTestData("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2")
1476 : "1.15.1", 1521 : "1.15.1",
1477 # 1522 #
1478 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX 1523 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
1479 # 1524 #
1480 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 1525 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
1481 # https://github.com/apple/cups/releases 1526 # https://github.com/apple/cups/releases
1482 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") 1527 WgetTestData("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", check_uri="/apple/cups/releases", check_regex=r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
1483 : "2.0.0", 1528 : "2.0.0",
1484 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz 1529 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
1485 # http://ftp.debian.org/debian/pool/main/d/db5.3/ 1530 # http://ftp.debian.org/debian/pool/main/d/db5.3/
1486 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") 1531 WgetTestData("db", "/berkeley-db/db-5.3.21.tar.gz", check_uri="/debian/pool/main/d/db5.3/", check_regex=r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
1487 : "5.3.10", 1532 : "5.3.10",
1488 # 1533 #
1489 # packages where the tarball compression changed in the new version 1534 # packages where the tarball compression changed in the new version
1490 # 1535 #
1491 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz 1536 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
1492 ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "") 1537 WgetTestData("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz")
1493 : "2.8", 1538 : "2.8",
1539
1540 #
1541 # packages where the path doesn't actually contain the filename, so downloadfilename should be respected
1542 #
1543 WgetTestData("miniupnpd", "/software/miniupnp/download.php?file=miniupnpd_2.1.20191006.tar.gz;downloadfilename=miniupnpd_2.1.20191006.tar.gz", pv="2.1.20191006", check_uri="/software/miniupnp/download.php", check_regex=r"miniupnpd-(?P<pver>\d+(\.\d+)+)\.tar")
1544 : "2.3.7",
1494 } 1545 }
1495 1546
1547 test_crate_uris = {
1548 # basic example; version pattern "A.B.C+cargo-D.E.F"
1549 ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69")
1550 : "0.9.29"
1551 }
1552
1496 @skipIfNoNetwork() 1553 @skipIfNoNetwork()
1497 def test_git_latest_versionstring(self): 1554 def test_git_latest_versionstring(self):
1498 for k, v in self.test_git_uris.items(): 1555 for k, v in self.test_git_uris.items():
1499 self.d.setVar("PN", k[0]) 1556 with self.subTest(pn=k[0]):
1500 self.d.setVar("SRCREV", k[2]) 1557 self.d.setVar("PN", k[0])
1501 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3]) 1558 self.d.setVar("SRCREV", k[2])
1502 ud = bb.fetch2.FetchData(k[1], self.d) 1559 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
1503 pupver= ud.method.latest_versionstring(ud, self.d) 1560 ud = bb.fetch2.FetchData(k[1], self.d)
1504 verstring = pupver[0] 1561 pupver= ud.method.latest_versionstring(ud, self.d)
1505 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1562 verstring = pupver[0]
1506 r = bb.utils.vercmp_string(v, verstring) 1563 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1507 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1564 r = bb.utils.vercmp_string(v, verstring)
1508 if k[4]: 1565 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1509 r = bb.utils.vercmp_string(verstring, k[4]) 1566 if k[4]:
1510 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4])) 1567 r = bb.utils.vercmp_string(verstring, k[4])
1568 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
1511 1569
1512 def test_wget_latest_versionstring(self): 1570 def test_wget_latest_versionstring(self):
1513 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" 1571 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
1514 server = HTTPService(testdata) 1572 server = HTTPService(testdata, host="127.0.0.1")
1515 server.start() 1573 server.start()
1516 port = server.port 1574 port = server.port
1517 try: 1575 try:
1518 for k, v in self.test_wget_uris.items(): 1576 for data, v in self.test_wget_uris.items():
1577 with self.subTest(pn=data.pn):
1578 self.d.setVar("PN", data.pn)
1579 self.d.setVar("PV", data.pv)
1580 if data.check_uri:
1581 checkuri = "http://127.0.0.1:%s/%s" % (port, data.check_uri)
1582 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1583 if data.check_regex:
1584 self.d.setVar("UPSTREAM_CHECK_REGEX", data.check_regex)
1585
1586 url = "http://127.0.0.1:%s/%s" % (port, data.path)
1587 ud = bb.fetch2.FetchData(url, self.d)
1588 pupver = ud.method.latest_versionstring(ud, self.d)
1589 verstring = pupver[0]
1590 self.assertTrue(verstring, msg="Could not find upstream version for %s" % data.pn)
1591 r = bb.utils.vercmp_string(v, verstring)
1592 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (data.pn, v, verstring))
1593 finally:
1594 server.stop()
1595
1596 @skipIfNoNetwork()
1597 def test_crate_latest_versionstring(self):
1598 for k, v in self.test_crate_uris.items():
1599 with self.subTest(pn=k[0]):
1519 self.d.setVar("PN", k[0]) 1600 self.d.setVar("PN", k[0])
1520 checkuri = "" 1601 ud = bb.fetch2.FetchData(k[1], self.d)
1521 if k[2]:
1522 checkuri = "http://localhost:%s/" % port + k[2]
1523 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1524 self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
1525 url = "http://localhost:%s/" % port + k[1]
1526 ud = bb.fetch2.FetchData(url, self.d)
1527 pupver = ud.method.latest_versionstring(ud, self.d) 1602 pupver = ud.method.latest_versionstring(ud, self.d)
1528 verstring = pupver[0] 1603 verstring = pupver[0]
1529 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1604 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1530 r = bb.utils.vercmp_string(v, verstring) 1605 r = bb.utils.vercmp_string(v, verstring)
1531 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1606 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1532 finally:
1533 server.stop()
1534
1535 1607
1536class FetchCheckStatusTest(FetcherTest): 1608class FetchCheckStatusTest(FetcherTest):
1537 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1609 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
@@ -1715,6 +1787,8 @@ class GitShallowTest(FetcherTest):
1715 if cwd is None: 1787 if cwd is None:
1716 cwd = self.gitdir 1788 cwd = self.gitdir
1717 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() 1789 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines()
1790 # Resolve references into the same format as the comparision (needed by git 2.48 onwards)
1791 actual_refs = self.git(['rev-parse', '--symbolic-full-name'] + actual_refs, cwd=cwd).splitlines()
1718 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() 1792 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines()
1719 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) 1793 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs)))
1720 1794
@@ -1761,7 +1835,6 @@ class GitShallowTest(FetcherTest):
1761 def fetch_shallow(self, uri=None, disabled=False, keepclone=False): 1835 def fetch_shallow(self, uri=None, disabled=False, keepclone=False):
1762 """Fetch a uri, generating a shallow tarball, then unpack using it""" 1836 """Fetch a uri, generating a shallow tarball, then unpack using it"""
1763 fetcher, ud = self.fetch_and_unpack(uri) 1837 fetcher, ud = self.fetch_and_unpack(uri)
1764 assert os.path.exists(ud.clonedir), 'Git clone in DLDIR (%s) does not exist for uri %s' % (ud.clonedir, uri)
1765 1838
1766 # Confirm that the unpacked repo is unshallow 1839 # Confirm that the unpacked repo is unshallow
1767 if not disabled: 1840 if not disabled:
@@ -1769,9 +1842,10 @@ class GitShallowTest(FetcherTest):
1769 1842
1770 # fetch and unpack, from the shallow tarball 1843 # fetch and unpack, from the shallow tarball
1771 bb.utils.remove(self.gitdir, recurse=True) 1844 bb.utils.remove(self.gitdir, recurse=True)
1772 bb.process.run('chmod u+w -R "%s"' % ud.clonedir) 1845 if os.path.exists(ud.clonedir):
1773 bb.utils.remove(ud.clonedir, recurse=True) 1846 bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
1774 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) 1847 bb.utils.remove(ud.clonedir, recurse=True)
1848 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
1775 1849
1776 # confirm that the unpacked repo is used when no git clone or git 1850 # confirm that the unpacked repo is used when no git clone or git
1777 # mirror tarball is available 1851 # mirror tarball is available
@@ -1854,7 +1928,12 @@ class GitShallowTest(FetcherTest):
1854 self.add_empty_file('c') 1928 self.add_empty_file('c')
1855 self.assertRevCount(3, cwd=self.srcdir) 1929 self.assertRevCount(3, cwd=self.srcdir)
1856 1930
1931 # Clone without tarball
1932 self.d.setVar('BB_GIT_SHALLOW', '0')
1933 fetcher, ud = self.fetch()
1934
1857 # Clone and generate mirror tarball 1935 # Clone and generate mirror tarball
1936 self.d.setVar('BB_GIT_SHALLOW', '1')
1858 fetcher, ud = self.fetch() 1937 fetcher, ud = self.fetch()
1859 1938
1860 # Ensure we have a current mirror tarball, but an out of date clone 1939 # Ensure we have a current mirror tarball, but an out of date clone
@@ -1866,6 +1945,7 @@ class GitShallowTest(FetcherTest):
1866 fetcher, ud = self.fetch() 1945 fetcher, ud = self.fetch()
1867 fetcher.unpack(self.d.getVar('WORKDIR')) 1946 fetcher.unpack(self.d.getVar('WORKDIR'))
1868 self.assertRevCount(1) 1947 self.assertRevCount(1)
1948 assert os.path.exists(os.path.join(self.d.getVar('WORKDIR'), 'git', 'c'))
1869 1949
1870 def test_shallow_single_branch_no_merge(self): 1950 def test_shallow_single_branch_no_merge(self):
1871 self.add_empty_file('a') 1951 self.add_empty_file('a')
@@ -1963,7 +2043,7 @@ class GitShallowTest(FetcherTest):
1963 self.git('submodule update', cwd=self.srcdir) 2043 self.git('submodule update', cwd=self.srcdir)
1964 self.git('commit -m submodule -a', cwd=self.srcdir) 2044 self.git('commit -m submodule -a', cwd=self.srcdir)
1965 2045
1966 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 2046 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1967 2047
1968 # Fetch once to generate the shallow tarball 2048 # Fetch once to generate the shallow tarball
1969 fetcher, ud = self.fetch(uri) 2049 fetcher, ud = self.fetch(uri)
@@ -2004,70 +2084,17 @@ class GitShallowTest(FetcherTest):
2004 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0] 2084 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0]
2005 assert os.path.exists(os.path.join(self.gitdir, 'c')) 2085 assert os.path.exists(os.path.join(self.gitdir, 'c'))
2006 2086
2007 def test_shallow_multi_one_uri(self):
2008 # Create initial git repo
2009 self.add_empty_file('a')
2010 self.add_empty_file('b')
2011 self.git('checkout -b a_branch', cwd=self.srcdir)
2012 self.add_empty_file('c')
2013 self.add_empty_file('d')
2014 self.git('checkout master', cwd=self.srcdir)
2015 self.git('tag v0.0 a_branch', cwd=self.srcdir)
2016 self.add_empty_file('e')
2017 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
2018 self.add_empty_file('f')
2019 self.assertRevCount(7, cwd=self.srcdir)
2020
2021 uri = self.d.getVar('SRC_URI').split()[0]
2022 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
2023
2024 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2025 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
2026 self.d.setVar('SRCREV_master', '${AUTOREV}')
2027 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
2028
2029 self.fetch_shallow(uri)
2030
2031 self.assertRevCount(5)
2032 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
2033
2034 def test_shallow_multi_one_uri_depths(self):
2035 # Create initial git repo
2036 self.add_empty_file('a')
2037 self.add_empty_file('b')
2038 self.git('checkout -b a_branch', cwd=self.srcdir)
2039 self.add_empty_file('c')
2040 self.add_empty_file('d')
2041 self.git('checkout master', cwd=self.srcdir)
2042 self.add_empty_file('e')
2043 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
2044 self.add_empty_file('f')
2045 self.assertRevCount(7, cwd=self.srcdir)
2046
2047 uri = self.d.getVar('SRC_URI').split()[0]
2048 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
2049
2050 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2051 self.d.setVar('BB_GIT_SHALLOW_DEPTH_master', '3')
2052 self.d.setVar('BB_GIT_SHALLOW_DEPTH_a_branch', '1')
2053 self.d.setVar('SRCREV_master', '${AUTOREV}')
2054 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
2055
2056 self.fetch_shallow(uri)
2057
2058 self.assertRevCount(4, ['--all'])
2059 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
2060
2061 def test_shallow_clone_preferred_over_shallow(self): 2087 def test_shallow_clone_preferred_over_shallow(self):
2062 self.add_empty_file('a') 2088 self.add_empty_file('a')
2063 self.add_empty_file('b') 2089 self.add_empty_file('b')
2064 2090
2065 # Fetch once to generate the shallow tarball 2091 # Fetch once to generate the shallow tarball
2092 self.d.setVar('BB_GIT_SHALLOW', '0')
2066 fetcher, ud = self.fetch() 2093 fetcher, ud = self.fetch()
2067 assert os.path.exists(os.path.join(self.dldir, ud.mirrortarballs[0]))
2068 2094
2069 # Fetch and unpack with both the clonedir and shallow tarball available 2095 # Fetch and unpack with both the clonedir and shallow tarball available
2070 bb.utils.remove(self.gitdir, recurse=True) 2096 bb.utils.remove(self.gitdir, recurse=True)
2097 self.d.setVar('BB_GIT_SHALLOW', '1')
2071 fetcher, ud = self.fetch_and_unpack() 2098 fetcher, ud = self.fetch_and_unpack()
2072 2099
2073 # The unpacked tree should *not* be shallow 2100 # The unpacked tree should *not* be shallow
@@ -2175,7 +2202,7 @@ class GitShallowTest(FetcherTest):
2175 2202
2176 self.fetch_shallow() 2203 self.fetch_shallow()
2177 2204
2178 self.assertRevCount(5) 2205 self.assertRevCount(2)
2179 2206
2180 def test_shallow_invalid_revs(self): 2207 def test_shallow_invalid_revs(self):
2181 self.add_empty_file('a') 2208 self.add_empty_file('a')
@@ -2194,7 +2221,10 @@ class GitShallowTest(FetcherTest):
2194 self.git('tag v0.0 master', cwd=self.srcdir) 2221 self.git('tag v0.0 master', cwd=self.srcdir)
2195 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') 2222 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2196 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') 2223 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
2197 self.fetch_shallow() 2224
2225 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs("BitBake.Fetcher", level="ERROR") as cm:
2226 self.fetch_shallow()
2227 self.assertIn("fatal: no commits selected for shallow requests", cm.output[0])
2198 2228
2199 def test_shallow_fetch_missing_revs_fails(self): 2229 def test_shallow_fetch_missing_revs_fails(self):
2200 self.add_empty_file('a') 2230 self.add_empty_file('a')
@@ -2208,6 +2238,33 @@ class GitShallowTest(FetcherTest):
2208 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0]) 2238 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0])
2209 2239
2210 @skipIfNoNetwork() 2240 @skipIfNoNetwork()
2241 def test_git_shallow_fetch_premirrors(self):
2242 url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
2243
2244 # Create a separate premirror directory within tempdir
2245 premirror = os.path.join(self.tempdir, "premirror")
2246 os.mkdir(premirror)
2247
2248 # Fetch a non-shallow clone into the premirror subdir
2249 self.d.setVar('BB_GIT_SHALLOW', '0')
2250 self.d.setVar("DL_DIR", premirror)
2251 fetcher, ud = self.fetch(url)
2252
2253 # Fetch a shallow clone from the premirror subdir with unpacking
2254 # using the original recipe URL and the premirror mapping
2255 self.d.setVar('BB_GIT_SHALLOW', '1')
2256 self.d.setVar("DL_DIR", self.dldir)
2257 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2258 self.d.setVar('BB_NO_NETWORK', '1')
2259 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
2260 self.d.setVar("PREMIRRORS", "git://.*/.* git://{0};protocol=file".format(premirror + "/git2/" + ud.host + ud.path.replace("/", ".")))
2261 fetcher = self.fetch_and_unpack(url)
2262
2263 # Verify that the unpacked sources are shallow clones
2264 self.assertRevCount(1)
2265 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2266
2267 @skipIfNoNetwork()
2211 def test_bitbake(self): 2268 def test_bitbake(self):
2212 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir) 2269 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
2213 self.git('config core.bare true', cwd=self.srcdir) 2270 self.git('config core.bare true', cwd=self.srcdir)
@@ -2225,7 +2282,7 @@ class GitShallowTest(FetcherTest):
2225 revs = len(self.git('rev-list master').splitlines()) 2282 revs = len(self.git('rev-list master').splitlines())
2226 self.assertNotEqual(orig_revs, revs) 2283 self.assertNotEqual(orig_revs, revs)
2227 self.assertRefs(['master', 'origin/master']) 2284 self.assertRefs(['master', 'origin/master'])
2228 self.assertRevCount(orig_revs - 1758) 2285 self.assertRevCount(orig_revs - 1760)
2229 2286
2230 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self): 2287 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self):
2231 self.add_empty_file('a') 2288 self.add_empty_file('a')
@@ -2239,23 +2296,33 @@ class GitShallowTest(FetcherTest):
2239 self.assertIn("No up to date source found", context.exception.msg) 2296 self.assertIn("No up to date source found", context.exception.msg)
2240 self.assertIn("clone directory not available or not up to date", context.exception.msg) 2297 self.assertIn("clone directory not available or not up to date", context.exception.msg)
2241 2298
2242 @skipIfNoNetwork() 2299 def test_shallow_check_is_shallow(self):
2243 def test_that_unpack_does_work_when_using_git_shallow_tarball_but_tarball_is_not_available(self): 2300 self.add_empty_file('a')
2244 self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') 2301 self.add_empty_file('b')
2245 self.d.setVar('BB_GIT_SHALLOW', '1')
2246 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
2247 fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d)
2248 fetcher.download()
2249 2302
2250 bb.utils.remove(self.dldir + "/*.tar.gz") 2303 # Fetch and unpack without the clonedir and *only* shallow tarball available
2251 fetcher.unpack(self.unpackdir) 2304 bb.utils.remove(self.gitdir, recurse=True)
2305 fetcher, ud = self.fetch_and_unpack()
2252 2306
2253 dir = os.listdir(self.unpackdir + "/git/") 2307 # The unpacked tree *should* be shallow
2254 self.assertIn("fstests.doap", dir) 2308 self.assertRevCount(1)
2309 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2310
2311 def test_shallow_succeeds_with_tag_containing_slash(self):
2312 self.add_empty_file('a')
2313 self.add_empty_file('b')
2314 self.git('tag t1/t2/t3', cwd=self.srcdir)
2315 self.assertRevCount(2, cwd=self.srcdir)
2316
2317 srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
2318 self.d.setVar('SRCREV', srcrev)
2319 uri = self.d.getVar('SRC_URI').split()[0]
2320 uri = '%s;tag=t1/t2/t3' % uri
2321 self.fetch_shallow(uri)
2322 self.assertRevCount(1)
2255 2323
2256class GitLfsTest(FetcherTest): 2324class GitLfsTest(FetcherTest):
2257 def skipIfNoGitLFS(): 2325 def skipIfNoGitLFS():
2258 import shutil
2259 if not shutil.which('git-lfs'): 2326 if not shutil.which('git-lfs'):
2260 return unittest.skip('git-lfs not installed') 2327 return unittest.skip('git-lfs not installed')
2261 return lambda f: f 2328 return lambda f: f
@@ -2279,12 +2346,18 @@ class GitLfsTest(FetcherTest):
2279 self.git_init(cwd=self.srcdir) 2346 self.git_init(cwd=self.srcdir)
2280 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text') 2347 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
2281 2348
2282 def commit_file(self, filename, content): 2349 def commit(self, *, cwd=None):
2283 with open(os.path.join(self.srcdir, filename), "w") as f: 2350 cwd = cwd or self.srcdir
2351 self.git(["commit", "-m", "Change"], cwd=cwd)
2352 return self.git(["rev-parse", "HEAD"], cwd=cwd).strip()
2353
2354 def commit_file(self, filename, content, *, cwd=None):
2355 cwd = cwd or self.srcdir
2356
2357 with open(os.path.join(cwd, filename), "w") as f:
2284 f.write(content) 2358 f.write(content)
2285 self.git(["add", filename], cwd=self.srcdir) 2359 self.git(["add", filename], cwd=cwd)
2286 self.git(["commit", "-m", "Change"], cwd=self.srcdir) 2360 return self.commit(cwd=cwd)
2287 return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip()
2288 2361
2289 def fetch(self, uri=None, download=True): 2362 def fetch(self, uri=None, download=True):
2290 uris = self.d.getVar('SRC_URI').split() 2363 uris = self.d.getVar('SRC_URI').split()
@@ -2305,25 +2378,112 @@ class GitLfsTest(FetcherTest):
2305 return unpacked_lfs_file 2378 return unpacked_lfs_file
2306 2379
2307 @skipIfNoGitLFS() 2380 @skipIfNoGitLFS()
2381 def test_gitsm_lfs(self):
2382 """Test that the gitsm fetcher caches objects stored via LFS"""
2383 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2384
2385 def fetch_and_verify(revision, filename, content):
2386 self.d.setVar('SRCREV', revision)
2387 fetcher, ud = self.fetch()
2388
2389 with hide_directory(submoduledir), hide_directory(self.srcdir):
2390 workdir = self.d.getVar('WORKDIR')
2391 fetcher.unpack(workdir)
2392
2393 with open(os.path.join(workdir, "git", filename)) as f:
2394 self.assertEqual(f.read(), content)
2395
2396 # Create the git repository that will later be used as a submodule
2397 submoduledir = self.tempdir + "/submodule"
2398 bb.utils.mkdirhier(submoduledir)
2399 self.git_init(submoduledir)
2400 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2401 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2402
2403 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2404 _ = self.commit_file("a.mp3", "submodule version 2", cwd=submoduledir)
2405
2406 # Add the submodule to the repository at its current HEAD revision
2407 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2408 cwd=self.srcdir)
2409 base_commit_1 = self.commit()
2410
2411 # Let the submodule point at a different revision
2412 self.git(["checkout", submodule_commit_1], self.srcdir + "/submodule")
2413 self.git(["add", "submodule"], cwd=self.srcdir)
2414 base_commit_2 = self.commit()
2415
2416 # Add a LFS file to the repository
2417 base_commit_3 = self.commit_file("a.mp3", "version 1")
2418 # Update the added LFS file
2419 base_commit_4 = self.commit_file("a.mp3", "version 2")
2420
2421 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2422
2423 # Verify that LFS objects referenced from submodules are fetched and checked out
2424 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 2")
2425 # Verify that the repository inside the download cache of a submodile is extended with any
2426 # additional LFS objects needed when checking out a different revision.
2427 fetch_and_verify(base_commit_2, "submodule/a.mp3", "submodule version 1")
2428 # Verify that LFS objects referenced from the base repository are fetched and checked out
2429 fetch_and_verify(base_commit_3, "a.mp3", "version 1")
2430 # Verify that the cached repository is extended with any additional LFS objects required
2431 # when checking out a different revision.
2432 fetch_and_verify(base_commit_4, "a.mp3", "version 2")
2433
2434 @skipIfNoGitLFS()
2435 def test_gitsm_lfs_disabled(self):
2436 """Test that the gitsm fetcher does not use LFS when explicitly disabled"""
2437 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2438
2439 def fetch_and_verify(revision, filename, content):
2440 self.d.setVar('SRCREV', revision)
2441 fetcher, ud = self.fetch()
2442
2443 with hide_directory(submoduledir), hide_directory(self.srcdir):
2444 workdir = self.d.getVar('WORKDIR')
2445 fetcher.unpack(workdir)
2446
2447 with open(os.path.join(workdir, "git", filename)) as f:
2448 # Assume that LFS did not perform smudging when the expected content is
2449 # missing.
2450 self.assertNotEqual(f.read(), content)
2451
2452 # Create the git repository that will later be used as a submodule
2453 submoduledir = self.tempdir + "/submodule"
2454 bb.utils.mkdirhier(submoduledir)
2455 self.git_init(submoduledir)
2456 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2457 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2458
2459 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2460
2461 # Add the submodule to the repository at its current HEAD revision
2462 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2463 cwd=self.srcdir)
2464 base_commit_1 = self.commit()
2465
2466 # Add a LFS file to the repository
2467 base_commit_2 = self.commit_file("a.mp3", "version 1")
2468
2469 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master;lfs=0" % self.srcdir)
2470
2471 # Verify that LFS objects referenced from submodules are not fetched nor checked out
2472 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 1")
2473 # Verify that the LFS objects referenced from the base repository are not fetched nor
2474 # checked out
2475 fetch_and_verify(base_commit_2, "a.mp3", "version 1")
2476
2477 @skipIfNoGitLFS()
2308 def test_fetch_lfs_on_srcrev_change(self): 2478 def test_fetch_lfs_on_srcrev_change(self):
2309 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested""" 2479 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
2310 self.git(["lfs", "install", "--local"], cwd=self.srcdir) 2480 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2311 2481
2312 @contextlib.contextmanager
2313 def hide_upstream_repository():
2314 """Hide the upstream repository to make sure that git lfs cannot pull from it"""
2315 temp_name = self.srcdir + ".bak"
2316 os.rename(self.srcdir, temp_name)
2317 try:
2318 yield
2319 finally:
2320 os.rename(temp_name, self.srcdir)
2321
2322 def fetch_and_verify(revision, filename, content): 2482 def fetch_and_verify(revision, filename, content):
2323 self.d.setVar('SRCREV', revision) 2483 self.d.setVar('SRCREV', revision)
2324 fetcher, ud = self.fetch() 2484 fetcher, ud = self.fetch()
2325 2485
2326 with hide_upstream_repository(): 2486 with hide_directory(self.srcdir):
2327 workdir = self.d.getVar('WORKDIR') 2487 workdir = self.d.getVar('WORKDIR')
2328 fetcher.unpack(workdir) 2488 fetcher.unpack(workdir)
2329 2489
@@ -2375,8 +2535,6 @@ class GitLfsTest(FetcherTest):
2375 2535
2376 @skipIfNoGitLFS() 2536 @skipIfNoGitLFS()
2377 def test_lfs_enabled(self): 2537 def test_lfs_enabled(self):
2378 import shutil
2379
2380 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir 2538 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2381 self.d.setVar('SRC_URI', uri) 2539 self.d.setVar('SRC_URI', uri)
2382 2540
@@ -2387,8 +2545,6 @@ class GitLfsTest(FetcherTest):
2387 2545
2388 @skipIfNoGitLFS() 2546 @skipIfNoGitLFS()
2389 def test_lfs_disabled(self): 2547 def test_lfs_disabled(self):
2390 import shutil
2391
2392 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir 2548 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2393 self.d.setVar('SRC_URI', uri) 2549 self.d.setVar('SRC_URI', uri)
2394 2550
@@ -2397,58 +2553,76 @@ class GitLfsTest(FetcherTest):
2397 fetcher, ud = self.fetch() 2553 fetcher, ud = self.fetch()
2398 fetcher.unpack(self.d.getVar('WORKDIR')) 2554 fetcher.unpack(self.d.getVar('WORKDIR'))
2399 2555
2400 def test_lfs_enabled_not_installed(self): 2556 @skipIfNoGitLFS()
2401 import shutil 2557 def test_lfs_enabled_not_installed_during_unpack(self):
2558 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2559 self.d.setVar('SRC_URI', uri)
2560
2561 # Careful: suppress initial attempt at downloading
2562 fetcher, ud = self.fetch(uri=None, download=False)
2563
2564 fetcher.download()
2565 # If git-lfs cannot be found, the unpack should throw an error
2566 with self.assertRaises(bb.fetch2.FetchError):
2567 with unittest.mock.patch("shutil.which", return_value=None):
2568 shutil.rmtree(self.gitdir, ignore_errors=True)
2569 fetcher.unpack(self.d.getVar('WORKDIR'))
2402 2570
2571 def test_lfs_enabled_not_installed(self):
2403 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir 2572 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2404 self.d.setVar('SRC_URI', uri) 2573 self.d.setVar('SRC_URI', uri)
2405 2574
2406 # Careful: suppress initial attempt at downloading 2575 # Careful: suppress initial attempt at downloading
2407 fetcher, ud = self.fetch(uri=None, download=False) 2576 fetcher, ud = self.fetch(uri=None, download=False)
2408 2577
2409 # Artificially assert that git-lfs is not installed, so 2578 # If git-lfs cannot be found, the download should throw an error
2410 # we can verify a failure to unpack in it's absence. 2579 with unittest.mock.patch("shutil.which", return_value=None):
2411 old_find_git_lfs = ud.method._find_git_lfs
2412 try:
2413 # If git-lfs cannot be found, the unpack should throw an error
2414 with self.assertRaises(bb.fetch2.FetchError): 2580 with self.assertRaises(bb.fetch2.FetchError):
2415 fetcher.download() 2581 fetcher.download()
2416 ud.method._find_git_lfs = lambda d: False
2417 shutil.rmtree(self.gitdir, ignore_errors=True)
2418 fetcher.unpack(self.d.getVar('WORKDIR'))
2419 finally:
2420 ud.method._find_git_lfs = old_find_git_lfs
2421 2582
2422 def test_lfs_disabled_not_installed(self): 2583 def test_lfs_disabled_not_installed(self):
2423 import shutil
2424
2425 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir 2584 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2426 self.d.setVar('SRC_URI', uri) 2585 self.d.setVar('SRC_URI', uri)
2427 2586
2428 # Careful: suppress initial attempt at downloading 2587 # Careful: suppress initial attempt at downloading
2429 fetcher, ud = self.fetch(uri=None, download=False) 2588 fetcher, ud = self.fetch(uri=None, download=False)
2430 2589
2431 # Artificially assert that git-lfs is not installed, so 2590 # Even if git-lfs cannot be found, the download / unpack should be successful
2432 # we can verify a failure to unpack in it's absence. 2591 with unittest.mock.patch("shutil.which", return_value=None):
2433 old_find_git_lfs = ud.method._find_git_lfs 2592 fetcher.download()
2434 try: 2593 shutil.rmtree(self.gitdir, ignore_errors=True)
2435 # Even if git-lfs cannot be found, the unpack should be successful 2594 fetcher.unpack(self.d.getVar('WORKDIR'))
2595
2596 def test_lfs_enabled_not_installed_but_not_needed(self):
2597 srcdir = os.path.join(self.tempdir, "emptygit")
2598 bb.utils.mkdirhier(srcdir)
2599 self.git_init(srcdir)
2600 self.commit_file("test", "test content", cwd=srcdir)
2601
2602 uri = 'git://%s;protocol=file;lfs=1;branch=master' % srcdir
2603 self.d.setVar('SRC_URI', uri)
2604
2605 # Careful: suppress initial attempt at downloading
2606 fetcher, ud = self.fetch(uri=None, download=False)
2607
2608 # It shouldnt't matter that git-lfs cannot be found as the repository configuration does not
2609 # specify any LFS filters.
2610 with unittest.mock.patch("shutil.which", return_value=None):
2436 fetcher.download() 2611 fetcher.download()
2437 ud.method._find_git_lfs = lambda d: False
2438 shutil.rmtree(self.gitdir, ignore_errors=True) 2612 shutil.rmtree(self.gitdir, ignore_errors=True)
2439 fetcher.unpack(self.d.getVar('WORKDIR')) 2613 fetcher.unpack(self.d.getVar('WORKDIR'))
2440 finally:
2441 ud.method._find_git_lfs = old_find_git_lfs
2442 2614
2443class GitURLWithSpacesTest(FetcherTest): 2615class GitURLWithSpacesTest(FetcherTest):
2444 test_git_urls = { 2616 test_git_urls = {
2445 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : { 2617 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : {
2446 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master', 2618 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master',
2619 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example.git',
2447 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', 2620 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
2448 'path': '/tfs/example path/example.git' 2621 'path': '/tfs/example path/example.git'
2449 }, 2622 },
2450 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : { 2623 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : {
2451 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master', 2624 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master',
2625 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git',
2452 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', 2626 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
2453 'path': '/tfs/example path/example repo.git' 2627 'path': '/tfs/example path/example repo.git'
2454 } 2628 }
@@ -2471,6 +2645,51 @@ class GitURLWithSpacesTest(FetcherTest):
2471 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock')) 2645 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock'))
2472 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) 2646 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
2473 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) 2647 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
2648 self.assertEqual(ud.method._get_repo_url(ud), ref['repo_url'])
2649
2650
2651class FetchLocallyMissingTagFromRemote(FetcherTest):
2652 def setUp(self):
2653 FetcherTest.setUp(self)
2654 self.gitdir = os.path.join(self.tempdir, 'git')
2655 self.srcdir = os.path.join(self.tempdir, 'gitsource')
2656
2657 bb.utils.mkdirhier(self.srcdir)
2658 self.git_init(cwd=self.srcdir)
2659 self.d.setVar('WORKDIR', self.tempdir)
2660 self.d.setVar('S', self.gitdir)
2661
2662 uri = 'git://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
2663 self.d.setVar('SRC_URI', uri)
2664
2665 open(os.path.join(self.srcdir, 'dummyfile'), 'w').close()
2666 self.git(['add', 'dummyfile'], self.srcdir)
2667 self.git(['commit', '-m', 'dummymsg', 'dummyfile'], self.srcdir)
2668
2669 def _fetch_and_unpack(self, uri_to_fetch):
2670 fetcher = bb.fetch2.Fetch([uri_to_fetch], self.d)
2671 fetcher.download()
2672 fetcher.unpack(self.d.getVar('WORKDIR'))
2673
2674 def test_tag_present_in_remote_but_not_local(self):
2675 # fetch a repo that has no tag in it
2676 # then add a tag to this repo, and fetch it again, without
2677 # changing SRC_REV, but by adding ';tag=tag1` to SRC_URI
2678 # the new tag should be fetched and unpacked
2679 srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
2680 self.d.setVar('SRCREV', srcrev)
2681 src_uri = self.d.getVar('SRC_URI')
2682 self._fetch_and_unpack(src_uri)
2683
2684 self.git('tag -m -a tag1', cwd=self.srcdir)
2685
2686 src_uri = '%s;tag=tag1' % self.d.getVar('SRC_URI').split()[0]
2687 self.d.setVar('SRC_URI', src_uri)
2688 self._fetch_and_unpack(src_uri)
2689
2690 output = self.git('log --pretty=oneline -n 1 refs/tags/tag1', cwd=self.gitdir)
2691 assert "fatal: ambiguous argument" not in output
2692
2474 2693
2475class CrateTest(FetcherTest): 2694class CrateTest(FetcherTest):
2476 @skipIfNoNetwork() 2695 @skipIfNoNetwork()
@@ -2592,7 +2811,6 @@ class CrateTest(FetcherTest):
2592 2811
2593class NPMTest(FetcherTest): 2812class NPMTest(FetcherTest):
2594 def skipIfNoNpm(): 2813 def skipIfNoNpm():
2595 import shutil
2596 if not shutil.which('npm'): 2814 if not shutil.which('npm'):
2597 return unittest.skip('npm not installed') 2815 return unittest.skip('npm not installed')
2598 return lambda f: f 2816 return lambda f: f
@@ -2600,8 +2818,8 @@ class NPMTest(FetcherTest):
2600 @skipIfNoNpm() 2818 @skipIfNoNpm()
2601 @skipIfNoNetwork() 2819 @skipIfNoNetwork()
2602 def test_npm(self): 2820 def test_npm(self):
2603 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2821 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2604 fetcher = bb.fetch.Fetch([url], self.d) 2822 fetcher = bb.fetch.Fetch(urls, self.d)
2605 ud = fetcher.ud[fetcher.urls[0]] 2823 ud = fetcher.ud[fetcher.urls[0]]
2606 fetcher.download() 2824 fetcher.download()
2607 self.assertTrue(os.path.exists(ud.localpath)) 2825 self.assertTrue(os.path.exists(ud.localpath))
@@ -2614,9 +2832,9 @@ class NPMTest(FetcherTest):
2614 @skipIfNoNpm() 2832 @skipIfNoNpm()
2615 @skipIfNoNetwork() 2833 @skipIfNoNetwork()
2616 def test_npm_bad_checksum(self): 2834 def test_npm_bad_checksum(self):
2617 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2835 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2618 # Fetch once to get a tarball 2836 # Fetch once to get a tarball
2619 fetcher = bb.fetch.Fetch([url], self.d) 2837 fetcher = bb.fetch.Fetch(urls, self.d)
2620 ud = fetcher.ud[fetcher.urls[0]] 2838 ud = fetcher.ud[fetcher.urls[0]]
2621 fetcher.download() 2839 fetcher.download()
2622 self.assertTrue(os.path.exists(ud.localpath)) 2840 self.assertTrue(os.path.exists(ud.localpath))
@@ -2633,9 +2851,9 @@ class NPMTest(FetcherTest):
2633 @skipIfNoNpm() 2851 @skipIfNoNpm()
2634 @skipIfNoNetwork() 2852 @skipIfNoNetwork()
2635 def test_npm_premirrors(self): 2853 def test_npm_premirrors(self):
2636 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2854 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2637 # Fetch once to get a tarball 2855 # Fetch once to get a tarball
2638 fetcher = bb.fetch.Fetch([url], self.d) 2856 fetcher = bb.fetch.Fetch(urls, self.d)
2639 ud = fetcher.ud[fetcher.urls[0]] 2857 ud = fetcher.ud[fetcher.urls[0]]
2640 fetcher.download() 2858 fetcher.download()
2641 self.assertTrue(os.path.exists(ud.localpath)) 2859 self.assertTrue(os.path.exists(ud.localpath))
@@ -2655,7 +2873,7 @@ class NPMTest(FetcherTest):
2655 # while the fetcher object exists, which it does when we rename the 2873 # while the fetcher object exists, which it does when we rename the
2656 # download directory to "mirror" above. Thus we need a new fetcher to go 2874 # download directory to "mirror" above. Thus we need a new fetcher to go
2657 # with the now empty download directory. 2875 # with the now empty download directory.
2658 fetcher = bb.fetch.Fetch([url], self.d) 2876 fetcher = bb.fetch.Fetch(urls, self.d)
2659 ud = fetcher.ud[fetcher.urls[0]] 2877 ud = fetcher.ud[fetcher.urls[0]]
2660 fetcher.download() 2878 fetcher.download()
2661 self.assertTrue(os.path.exists(ud.localpath)) 2879 self.assertTrue(os.path.exists(ud.localpath))
@@ -2663,9 +2881,9 @@ class NPMTest(FetcherTest):
2663 @skipIfNoNpm() 2881 @skipIfNoNpm()
2664 @skipIfNoNetwork() 2882 @skipIfNoNetwork()
2665 def test_npm_premirrors_with_specified_filename(self): 2883 def test_npm_premirrors_with_specified_filename(self):
2666 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2884 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2667 # Fetch once to get a tarball 2885 # Fetch once to get a tarball
2668 fetcher = bb.fetch.Fetch([url], self.d) 2886 fetcher = bb.fetch.Fetch(urls, self.d)
2669 ud = fetcher.ud[fetcher.urls[0]] 2887 ud = fetcher.ud[fetcher.urls[0]]
2670 fetcher.download() 2888 fetcher.download()
2671 self.assertTrue(os.path.exists(ud.localpath)) 2889 self.assertTrue(os.path.exists(ud.localpath))
@@ -2685,8 +2903,8 @@ class NPMTest(FetcherTest):
2685 @skipIfNoNetwork() 2903 @skipIfNoNetwork()
2686 def test_npm_mirrors(self): 2904 def test_npm_mirrors(self):
2687 # Fetch once to get a tarball 2905 # Fetch once to get a tarball
2688 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2906 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2689 fetcher = bb.fetch.Fetch([url], self.d) 2907 fetcher = bb.fetch.Fetch(urls, self.d)
2690 ud = fetcher.ud[fetcher.urls[0]] 2908 ud = fetcher.ud[fetcher.urls[0]]
2691 fetcher.download() 2909 fetcher.download()
2692 self.assertTrue(os.path.exists(ud.localpath)) 2910 self.assertTrue(os.path.exists(ud.localpath))
@@ -2710,8 +2928,8 @@ class NPMTest(FetcherTest):
2710 @skipIfNoNpm() 2928 @skipIfNoNpm()
2711 @skipIfNoNetwork() 2929 @skipIfNoNetwork()
2712 def test_npm_destsuffix_downloadfilename(self): 2930 def test_npm_destsuffix_downloadfilename(self):
2713 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' 2931 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz']
2714 fetcher = bb.fetch.Fetch([url], self.d) 2932 fetcher = bb.fetch.Fetch(urls, self.d)
2715 fetcher.download() 2933 fetcher.download()
2716 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz'))) 2934 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz')))
2717 fetcher.unpack(self.unpackdir) 2935 fetcher.unpack(self.unpackdir)
@@ -2719,18 +2937,18 @@ class NPMTest(FetcherTest):
2719 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) 2937 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json')))
2720 2938
2721 def test_npm_no_network_no_tarball(self): 2939 def test_npm_no_network_no_tarball(self):
2722 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2940 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2723 self.d.setVar('BB_NO_NETWORK', '1') 2941 self.d.setVar('BB_NO_NETWORK', '1')
2724 fetcher = bb.fetch.Fetch([url], self.d) 2942 fetcher = bb.fetch.Fetch(urls, self.d)
2725 with self.assertRaises(bb.fetch2.NetworkAccess): 2943 with self.assertRaises(bb.fetch2.NetworkAccess):
2726 fetcher.download() 2944 fetcher.download()
2727 2945
2728 @skipIfNoNpm() 2946 @skipIfNoNpm()
2729 @skipIfNoNetwork() 2947 @skipIfNoNetwork()
2730 def test_npm_no_network_with_tarball(self): 2948 def test_npm_no_network_with_tarball(self):
2731 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2949 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2732 # Fetch once to get a tarball 2950 # Fetch once to get a tarball
2733 fetcher = bb.fetch.Fetch([url], self.d) 2951 fetcher = bb.fetch.Fetch(urls, self.d)
2734 fetcher.download() 2952 fetcher.download()
2735 # Disable network access 2953 # Disable network access
2736 self.d.setVar('BB_NO_NETWORK', '1') 2954 self.d.setVar('BB_NO_NETWORK', '1')
@@ -2743,8 +2961,8 @@ class NPMTest(FetcherTest):
2743 @skipIfNoNpm() 2961 @skipIfNoNpm()
2744 @skipIfNoNetwork() 2962 @skipIfNoNetwork()
2745 def test_npm_registry_alternate(self): 2963 def test_npm_registry_alternate(self):
2746 url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0' 2964 urls = ['npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0']
2747 fetcher = bb.fetch.Fetch([url], self.d) 2965 fetcher = bb.fetch.Fetch(urls, self.d)
2748 fetcher.download() 2966 fetcher.download()
2749 fetcher.unpack(self.unpackdir) 2967 fetcher.unpack(self.unpackdir)
2750 unpackdir = os.path.join(self.unpackdir, 'npm') 2968 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2753,8 +2971,8 @@ class NPMTest(FetcherTest):
2753 @skipIfNoNpm() 2971 @skipIfNoNpm()
2754 @skipIfNoNetwork() 2972 @skipIfNoNetwork()
2755 def test_npm_version_latest(self): 2973 def test_npm_version_latest(self):
2756 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest' 2974 url = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest']
2757 fetcher = bb.fetch.Fetch([url], self.d) 2975 fetcher = bb.fetch.Fetch(url, self.d)
2758 fetcher.download() 2976 fetcher.download()
2759 fetcher.unpack(self.unpackdir) 2977 fetcher.unpack(self.unpackdir)
2760 unpackdir = os.path.join(self.unpackdir, 'npm') 2978 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2763,46 +2981,46 @@ class NPMTest(FetcherTest):
2763 @skipIfNoNpm() 2981 @skipIfNoNpm()
2764 @skipIfNoNetwork() 2982 @skipIfNoNetwork()
2765 def test_npm_registry_invalid(self): 2983 def test_npm_registry_invalid(self):
2766 url = 'npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2984 urls = ['npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2767 fetcher = bb.fetch.Fetch([url], self.d) 2985 fetcher = bb.fetch.Fetch(urls, self.d)
2768 with self.assertRaises(bb.fetch2.FetchError): 2986 with self.assertRaises(bb.fetch2.FetchError):
2769 fetcher.download() 2987 fetcher.download()
2770 2988
2771 @skipIfNoNpm() 2989 @skipIfNoNpm()
2772 @skipIfNoNetwork() 2990 @skipIfNoNetwork()
2773 def test_npm_package_invalid(self): 2991 def test_npm_package_invalid(self):
2774 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0' 2992 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0']
2775 fetcher = bb.fetch.Fetch([url], self.d) 2993 fetcher = bb.fetch.Fetch(urls, self.d)
2776 with self.assertRaises(bb.fetch2.FetchError): 2994 with self.assertRaises(bb.fetch2.FetchError):
2777 fetcher.download() 2995 fetcher.download()
2778 2996
2779 @skipIfNoNpm() 2997 @skipIfNoNpm()
2780 @skipIfNoNetwork() 2998 @skipIfNoNetwork()
2781 def test_npm_version_invalid(self): 2999 def test_npm_version_invalid(self):
2782 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid' 3000 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid']
2783 with self.assertRaises(bb.fetch2.ParameterError): 3001 with self.assertRaises(bb.fetch2.ParameterError):
2784 fetcher = bb.fetch.Fetch([url], self.d) 3002 fetcher = bb.fetch.Fetch(urls, self.d)
2785 3003
2786 @skipIfNoNpm() 3004 @skipIfNoNpm()
2787 @skipIfNoNetwork() 3005 @skipIfNoNetwork()
2788 def test_npm_registry_none(self): 3006 def test_npm_registry_none(self):
2789 url = 'npm://;package=@savoirfairelinux/node-server-example;version=1.0.0' 3007 urls = ['npm://;package=@savoirfairelinux/node-server-example;version=1.0.0']
2790 with self.assertRaises(bb.fetch2.MalformedUrl): 3008 with self.assertRaises(bb.fetch2.MalformedUrl):
2791 fetcher = bb.fetch.Fetch([url], self.d) 3009 fetcher = bb.fetch.Fetch(urls, self.d)
2792 3010
2793 @skipIfNoNpm() 3011 @skipIfNoNpm()
2794 @skipIfNoNetwork() 3012 @skipIfNoNetwork()
2795 def test_npm_package_none(self): 3013 def test_npm_package_none(self):
2796 url = 'npm://registry.npmjs.org;version=1.0.0' 3014 urls = ['npm://registry.npmjs.org;version=1.0.0']
2797 with self.assertRaises(bb.fetch2.MissingParameterError): 3015 with self.assertRaises(bb.fetch2.MissingParameterError):
2798 fetcher = bb.fetch.Fetch([url], self.d) 3016 fetcher = bb.fetch.Fetch(urls, self.d)
2799 3017
2800 @skipIfNoNpm() 3018 @skipIfNoNpm()
2801 @skipIfNoNetwork() 3019 @skipIfNoNetwork()
2802 def test_npm_version_none(self): 3020 def test_npm_version_none(self):
2803 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example' 3021 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example']
2804 with self.assertRaises(bb.fetch2.MissingParameterError): 3022 with self.assertRaises(bb.fetch2.MissingParameterError):
2805 fetcher = bb.fetch.Fetch([url], self.d) 3023 fetcher = bb.fetch.Fetch(urls, self.d)
2806 3024
2807 def create_shrinkwrap_file(self, data): 3025 def create_shrinkwrap_file(self, data):
2808 import json 3026 import json
@@ -2811,32 +3029,30 @@ class NPMTest(FetcherTest):
2811 bb.utils.mkdirhier(datadir) 3029 bb.utils.mkdirhier(datadir)
2812 with open(swfile, 'w') as f: 3030 with open(swfile, 'w') as f:
2813 json.dump(data, f) 3031 json.dump(data, f)
2814 # Also configure the S directory
2815 self.sdir = os.path.join(self.unpackdir, 'S')
2816 self.d.setVar('S', self.sdir)
2817 return swfile 3032 return swfile
2818 3033
2819 @skipIfNoNpm()
2820 @skipIfNoNetwork() 3034 @skipIfNoNetwork()
2821 def test_npmsw(self): 3035 def test_npmsw(self):
2822 swfile = self.create_shrinkwrap_file({ 3036 swfile = self.create_shrinkwrap_file({
2823 'dependencies': { 3037 'packages': {
2824 'array-flatten': { 3038 'node_modules/array-flatten': {
2825 'version': '1.1.1', 3039 'version': '1.1.1',
2826 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3040 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2827 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=', 3041 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=',
2828 'dependencies': { 3042 'dependencies': {
2829 'content-type': { 3043 'content-type': "1.0.4"
2830 'version': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2831 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
2832 'dependencies': {
2833 'cookie': {
2834 'version': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2835 'from': 'git+https://github.com/jshttp/cookie.git'
2836 }
2837 }
2838 }
2839 } 3044 }
3045 },
3046 'node_modules/array-flatten/node_modules/content-type': {
3047 'version': '1.0.4',
3048 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3049 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3050 'dependencies': {
3051 'cookie': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
3052 }
3053 },
3054 'node_modules/array-flatten/node_modules/content-type/node_modules/cookie': {
3055 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2840 } 3056 }
2841 } 3057 }
2842 }) 3058 })
@@ -2846,31 +3062,17 @@ class NPMTest(FetcherTest):
2846 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3062 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2847 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) 3063 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2848 fetcher.unpack(self.unpackdir) 3064 fetcher.unpack(self.unpackdir)
2849 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'npm-shrinkwrap.json'))) 3065 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm-shrinkwrap.json')))
2850 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3066 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2851 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json'))) 3067 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json')))
2852 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json'))) 3068 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json')))
2853 3069
2854 @skipIfNoNpm()
2855 @skipIfNoNetwork() 3070 @skipIfNoNetwork()
2856 def test_npmsw_git(self): 3071 def test_npmsw_git(self):
2857 swfile = self.create_shrinkwrap_file({ 3072 swfile = self.create_shrinkwrap_file({
2858 'dependencies': { 3073 'packages': {
2859 'cookie': { 3074 'node_modules/cookie': {
2860 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', 3075 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2861 'from': 'github:jshttp/cookie.git'
2862 }
2863 }
2864 })
2865 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2866 fetcher.download()
2867 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2868
2869 swfile = self.create_shrinkwrap_file({
2870 'dependencies': {
2871 'cookie': {
2872 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2873 'from': 'jshttp/cookie.git'
2874 } 3076 }
2875 } 3077 }
2876 }) 3078 })
@@ -2878,29 +3080,16 @@ class NPMTest(FetcherTest):
2878 fetcher.download() 3080 fetcher.download()
2879 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) 3081 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2880 3082
2881 swfile = self.create_shrinkwrap_file({
2882 'dependencies': {
2883 'nodejs': {
2884 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262',
2885 'from': 'gitlab:gitlab-examples/nodejs'
2886 }
2887 }
2888 })
2889 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2890 fetcher.download()
2891 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git')))
2892
2893 @skipIfNoNpm()
2894 @skipIfNoNetwork() 3083 @skipIfNoNetwork()
2895 def test_npmsw_dev(self): 3084 def test_npmsw_dev(self):
2896 swfile = self.create_shrinkwrap_file({ 3085 swfile = self.create_shrinkwrap_file({
2897 'dependencies': { 3086 'packages': {
2898 'array-flatten': { 3087 'node_modules/array-flatten': {
2899 'version': '1.1.1', 3088 'version': '1.1.1',
2900 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3089 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2901 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3090 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
2902 }, 3091 },
2903 'content-type': { 3092 'node_modules/content-type': {
2904 'version': '1.0.4', 3093 'version': '1.0.4',
2905 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', 3094 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2906 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', 3095 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
@@ -2919,12 +3108,11 @@ class NPMTest(FetcherTest):
2919 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) 3108 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
2920 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3109 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2921 3110
2922 @skipIfNoNpm()
2923 @skipIfNoNetwork() 3111 @skipIfNoNetwork()
2924 def test_npmsw_destsuffix(self): 3112 def test_npmsw_destsuffix(self):
2925 swfile = self.create_shrinkwrap_file({ 3113 swfile = self.create_shrinkwrap_file({
2926 'dependencies': { 3114 'packages': {
2927 'array-flatten': { 3115 'node_modules/array-flatten': {
2928 'version': '1.1.1', 3116 'version': '1.1.1',
2929 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3117 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2930 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3118 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2938,8 +3126,8 @@ class NPMTest(FetcherTest):
2938 3126
2939 def test_npmsw_no_network_no_tarball(self): 3127 def test_npmsw_no_network_no_tarball(self):
2940 swfile = self.create_shrinkwrap_file({ 3128 swfile = self.create_shrinkwrap_file({
2941 'dependencies': { 3129 'packages': {
2942 'array-flatten': { 3130 'node_modules/array-flatten': {
2943 'version': '1.1.1', 3131 'version': '1.1.1',
2944 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3132 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2945 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3133 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2961,8 +3149,8 @@ class NPMTest(FetcherTest):
2961 self.d.setVar('BB_NO_NETWORK', '1') 3149 self.d.setVar('BB_NO_NETWORK', '1')
2962 # Fetch again 3150 # Fetch again
2963 swfile = self.create_shrinkwrap_file({ 3151 swfile = self.create_shrinkwrap_file({
2964 'dependencies': { 3152 'packages': {
2965 'array-flatten': { 3153 'node_modules/array-flatten': {
2966 'version': '1.1.1', 3154 'version': '1.1.1',
2967 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3155 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2968 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3156 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2972,15 +3160,14 @@ class NPMTest(FetcherTest):
2972 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3160 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2973 fetcher.download() 3161 fetcher.download()
2974 fetcher.unpack(self.unpackdir) 3162 fetcher.unpack(self.unpackdir)
2975 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3163 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2976 3164
2977 @skipIfNoNpm()
2978 @skipIfNoNetwork() 3165 @skipIfNoNetwork()
2979 def test_npmsw_npm_reusability(self): 3166 def test_npmsw_npm_reusability(self):
2980 # Fetch once with npmsw 3167 # Fetch once with npmsw
2981 swfile = self.create_shrinkwrap_file({ 3168 swfile = self.create_shrinkwrap_file({
2982 'dependencies': { 3169 'packages': {
2983 'array-flatten': { 3170 'node_modules/array-flatten': {
2984 'version': '1.1.1', 3171 'version': '1.1.1',
2985 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3172 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2986 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3173 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2997,13 +3184,12 @@ class NPMTest(FetcherTest):
2997 fetcher.unpack(self.unpackdir) 3184 fetcher.unpack(self.unpackdir)
2998 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json'))) 3185 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json')))
2999 3186
3000 @skipIfNoNpm()
3001 @skipIfNoNetwork() 3187 @skipIfNoNetwork()
3002 def test_npmsw_bad_checksum(self): 3188 def test_npmsw_bad_checksum(self):
3003 # Try to fetch with bad checksum 3189 # Try to fetch with bad checksum
3004 swfile = self.create_shrinkwrap_file({ 3190 swfile = self.create_shrinkwrap_file({
3005 'dependencies': { 3191 'packages': {
3006 'array-flatten': { 3192 'node_modules/array-flatten': {
3007 'version': '1.1.1', 3193 'version': '1.1.1',
3008 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3194 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3009 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg=' 3195 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg='
@@ -3015,8 +3201,8 @@ class NPMTest(FetcherTest):
3015 fetcher.download() 3201 fetcher.download()
3016 # Fetch correctly to get a tarball 3202 # Fetch correctly to get a tarball
3017 swfile = self.create_shrinkwrap_file({ 3203 swfile = self.create_shrinkwrap_file({
3018 'dependencies': { 3204 'packages': {
3019 'array-flatten': { 3205 'node_modules/array-flatten': {
3020 'version': '1.1.1', 3206 'version': '1.1.1',
3021 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3207 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3022 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3208 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -3054,8 +3240,8 @@ class NPMTest(FetcherTest):
3054 # Fetch again 3240 # Fetch again
3055 self.assertFalse(os.path.exists(ud.localpath)) 3241 self.assertFalse(os.path.exists(ud.localpath))
3056 swfile = self.create_shrinkwrap_file({ 3242 swfile = self.create_shrinkwrap_file({
3057 'dependencies': { 3243 'packages': {
3058 'array-flatten': { 3244 'node_modules/array-flatten': {
3059 'version': '1.1.1', 3245 'version': '1.1.1',
3060 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3246 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3061 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3247 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -3082,8 +3268,8 @@ class NPMTest(FetcherTest):
3082 # Fetch again with invalid url 3268 # Fetch again with invalid url
3083 self.assertFalse(os.path.exists(ud.localpath)) 3269 self.assertFalse(os.path.exists(ud.localpath))
3084 swfile = self.create_shrinkwrap_file({ 3270 swfile = self.create_shrinkwrap_file({
3085 'dependencies': { 3271 'packages': {
3086 'array-flatten': { 3272 'node_modules/array-flatten': {
3087 'version': '1.1.1', 3273 'version': '1.1.1',
3088 'resolved': 'https://invalid', 3274 'resolved': 'https://invalid',
3089 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3275 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -3094,6 +3280,28 @@ class NPMTest(FetcherTest):
3094 fetcher.download() 3280 fetcher.download()
3095 self.assertTrue(os.path.exists(ud.localpath)) 3281 self.assertTrue(os.path.exists(ud.localpath))
3096 3282
3283 @skipIfNoNetwork()
3284 def test_npmsw_bundled(self):
3285 swfile = self.create_shrinkwrap_file({
3286 'packages': {
3287 'node_modules/array-flatten': {
3288 'version': '1.1.1',
3289 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3290 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
3291 },
3292 'node_modules/content-type': {
3293 'version': '1.0.4',
3294 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3295 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3296 'inBundle': True
3297 }
3298 }
3299 })
3300 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
3301 fetcher.download()
3302 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
3303 self.assertFalse(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
3304
3097class GitSharedTest(FetcherTest): 3305class GitSharedTest(FetcherTest):
3098 def setUp(self): 3306 def setUp(self):
3099 super(GitSharedTest, self).setUp() 3307 super(GitSharedTest, self).setUp()
@@ -3121,6 +3329,72 @@ class GitSharedTest(FetcherTest):
3121 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') 3329 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3122 self.assertFalse(os.path.exists(alt)) 3330 self.assertFalse(os.path.exists(alt))
3123 3331
3332class GitTagVerificationTests(FetcherTest):
3333
3334 @skipIfNoNetwork()
3335 def test_tag_rev_match(self):
3336 # Test a url with rev= and tag= set works
3337 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=5b4e20377eea8d428edf1aeb2187c18f82ca6757;tag=2.12.0"], self.d)
3338 fetcher.download()
3339 fetcher.unpack(self.unpackdir)
3340
3341 def test_annotated_tag_rev_match(self):
3342 # Test a url with rev= and tag= set works
3343 # rev is the annotated tag revision in this case
3344 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=fa30183549bd09f33fd4eebf56771ca5393526a6;tag=2.12.0"], self.d)
3345 fetcher.download()
3346 fetcher.unpack(self.unpackdir)
3347
3348 @skipIfNoNetwork()
3349 def test_tag_rev_match2(self):
3350 # Test a url with SRCREV and tag= set works
3351 self.d.setVar('SRCREV', '5b4e20377eea8d428edf1aeb2187c18f82ca6757')
3352 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;tag=2.12.0"], self.d)
3353 fetcher.download()
3354 fetcher.unpack(self.unpackdir)
3355
3356 @skipIfNoNetwork()
3357 def test_tag_rev_match3(self):
3358 # Test a url with SRCREV, rev= and tag= set works
3359 self.d.setVar('SRCREV', '5b4e20377eea8d428edf1aeb2187c18f82ca6757')
3360 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=5b4e20377eea8d428edf1aeb2187c18f82ca6757;tag=2.12.0"], self.d)
3361 fetcher.download()
3362 fetcher.unpack(self.unpackdir)
3363
3364 @skipIfNoNetwork()
3365 def test_tag_rev_match4(self):
3366 # Test a url with SRCREV and rev= mismatching errors
3367 self.d.setVar('SRCREV', 'bade540fc31a1c26839efd2c7785a751ce24ebfb')
3368 with self.assertRaises(bb.fetch2.FetchError):
3369 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=5b4e20377eea8d428edf1aeb2187c18f82ca6757;tag=2.12.0"], self.d)
3370
3371 @skipIfNoNetwork()
3372 def test_tag_rev_match5(self):
3373 # Test a url with SRCREV, rev= and tag= set works when using shallow clones
3374 self.d.setVar('BB_GIT_SHALLOW', '1')
3375 self.d.setVar('SRCREV', '5b4e20377eea8d428edf1aeb2187c18f82ca6757')
3376 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=5b4e20377eea8d428edf1aeb2187c18f82ca6757;tag=2.12.0"], self.d)
3377 fetcher.download()
3378 fetcher.unpack(self.unpackdir)
3379
3380 @skipIfNoNetwork()
3381 def test_tag_rev_match6(self):
3382 # Test a url with SRCREV, rev= and a mismatched tag= when using shallow clones
3383 self.d.setVar('BB_GIT_SHALLOW', '1')
3384 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=5b4e20377eea8d428edf1aeb2187c18f82ca6757;tag=2.8.0"], self.d)
3385 fetcher.download()
3386 with self.assertRaises(bb.fetch2.FetchError):
3387 fetcher.unpack(self.unpackdir)
3388
3389 @skipIfNoNetwork()
3390 def test_tag_rev_match7(self):
3391 # Test a url with SRCREV, rev= and a mismatched tag=
3392 self.d.setVar('SRCREV', '5b4e20377eea8d428edf1aeb2187c18f82ca6757')
3393 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.12;protocol=https;rev=5b4e20377eea8d428edf1aeb2187c18f82ca6757;tag=2.8.0"], self.d)
3394 fetcher.download()
3395 with self.assertRaises(bb.fetch2.FetchError):
3396 fetcher.unpack(self.unpackdir)
3397
3124 3398
3125class FetchPremirroronlyLocalTest(FetcherTest): 3399class FetchPremirroronlyLocalTest(FetcherTest):
3126 3400
@@ -3203,58 +3477,6 @@ class FetchPremirroronlyLocalTest(FetcherTest):
3203 with self.assertRaises(bb.fetch2.NetworkAccess): 3477 with self.assertRaises(bb.fetch2.NetworkAccess):
3204 fetcher.download() 3478 fetcher.download()
3205 3479
3206 def test_mirror_tarball_multiple_branches(self):
3207 """
3208 test if PREMIRRORS can handle multiple name/branches correctly
3209 both branches have required revisions
3210 """
3211 self.make_git_repo()
3212 branch1rev = self.git_new_branch("testbranch1")
3213 branch2rev = self.git_new_branch("testbranch2")
3214 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
3215 self.d.setVar("SRCREV_branch1", branch1rev)
3216 self.d.setVar("SRCREV_branch2", branch2rev)
3217 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3218 self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
3219 fetcher.download()
3220 fetcher.unpack(os.path.join(self.tempdir, "unpacked"))
3221 unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename)
3222 self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!")
3223 with open(unpacked, 'r') as f:
3224 content = f.read()
3225 ## We expect to see testbranch1 in the file, not master, not testbranch2
3226 self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!")
3227
3228 def test_mirror_tarball_multiple_branches_nobranch(self):
3229 """
3230 test if PREMIRRORS can handle multiple name/branches correctly
3231 Unbalanced name/branches raises ParameterError
3232 """
3233 self.make_git_repo()
3234 branch1rev = self.git_new_branch("testbranch1")
3235 branch2rev = self.git_new_branch("testbranch2")
3236 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2"
3237 self.d.setVar("SRCREV_branch1", branch1rev)
3238 self.d.setVar("SRCREV_branch2", branch2rev)
3239 with self.assertRaises(bb.fetch2.ParameterError):
3240 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3241
3242 def test_mirror_tarball_multiple_branches_norev(self):
3243 """
3244 test if PREMIRRORS can handle multiple name/branches correctly
3245 one of the branches specifies non existing SRCREV
3246 """
3247 self.make_git_repo()
3248 branch1rev = self.git_new_branch("testbranch1")
3249 branch2rev = self.git_new_branch("testbranch2")
3250 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
3251 self.d.setVar("SRCREV_branch1", branch1rev)
3252 self.d.setVar("SRCREV_branch2", "0"*40)
3253 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3254 self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
3255 with self.assertRaises(bb.fetch2.NetworkAccess):
3256 fetcher.download()
3257
3258 3480
3259class FetchPremirroronlyNetworkTest(FetcherTest): 3481class FetchPremirroronlyNetworkTest(FetcherTest):
3260 3482
@@ -3265,16 +3487,16 @@ class FetchPremirroronlyNetworkTest(FetcherTest):
3265 self.reponame = "fstests" 3487 self.reponame = "fstests"
3266 self.clonedir = os.path.join(self.tempdir, "git") 3488 self.clonedir = os.path.join(self.tempdir, "git")
3267 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame)) 3489 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
3268 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https" 3490 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https;branch=master"
3269 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") 3491 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3270 self.d.setVar("BB_NO_NETWORK", "0") 3492 self.d.setVar("BB_NO_NETWORK", "0")
3271 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") 3493 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3272 3494
3273 def make_git_repo(self): 3495 def make_git_repo(self):
3274 import shutil
3275 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz" 3496 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
3276 os.makedirs(self.clonedir) 3497 os.makedirs(self.clonedir)
3277 self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir) 3498 self.git("clone --bare {}".format(self.recipe_url), self.clonedir)
3499 self.git("update-ref HEAD 15413486df1f5a5b5af699b6f3ba5f0984e52a9f", self.gitdir)
3278 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) 3500 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3279 shutil.rmtree(self.clonedir) 3501 shutil.rmtree(self.clonedir)
3280 3502
@@ -3282,7 +3504,7 @@ class FetchPremirroronlyNetworkTest(FetcherTest):
3282 def test_mirror_tarball_updated(self): 3504 def test_mirror_tarball_updated(self):
3283 self.make_git_repo() 3505 self.make_git_repo()
3284 ## Upstream commit is in the mirror 3506 ## Upstream commit is in the mirror
3285 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec") 3507 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
3286 fetcher = bb.fetch.Fetch([self.recipe_url], self.d) 3508 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3287 fetcher.download() 3509 fetcher.download()
3288 3510
@@ -3290,7 +3512,7 @@ class FetchPremirroronlyNetworkTest(FetcherTest):
3290 def test_mirror_tarball_outdated(self): 3512 def test_mirror_tarball_outdated(self):
3291 self.make_git_repo() 3513 self.make_git_repo()
3292 ## Upstream commit not in the mirror 3514 ## Upstream commit not in the mirror
3293 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f") 3515 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
3294 fetcher = bb.fetch.Fetch([self.recipe_url], self.d) 3516 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3295 with self.assertRaises(bb.fetch2.NetworkAccess): 3517 with self.assertRaises(bb.fetch2.NetworkAccess):
3296 fetcher.download() 3518 fetcher.download()
@@ -3300,7 +3522,6 @@ class FetchPremirroronlyMercurialTest(FetcherTest):
3300 the test covers also basic hg:// clone (see fetch_and_create_tarball 3522 the test covers also basic hg:// clone (see fetch_and_create_tarball
3301 """ 3523 """
3302 def skipIfNoHg(): 3524 def skipIfNoHg():
3303 import shutil
3304 if not shutil.which('hg'): 3525 if not shutil.which('hg'):
3305 return unittest.skip('Mercurial not installed') 3526 return unittest.skip('Mercurial not installed')
3306 return lambda f: f 3527 return lambda f: f
@@ -3347,7 +3568,7 @@ class FetchPremirroronlyBrokenTarball(FetcherTest):
3347 os.mkdir(self.mirrordir) 3568 os.mkdir(self.mirrordir)
3348 self.reponame = "bitbake" 3569 self.reponame = "bitbake"
3349 self.gitdir = os.path.join(self.tempdir, "git", self.reponame) 3570 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3350 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https" 3571 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https;branch=master"
3351 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") 3572 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3352 self.d.setVar("BB_NO_NETWORK", "1") 3573 self.d.setVar("BB_NO_NETWORK", "1")
3353 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") 3574 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
@@ -3356,10 +3577,223 @@ class FetchPremirroronlyBrokenTarball(FetcherTest):
3356 targz.write("This is not tar.gz file!") 3577 targz.write("This is not tar.gz file!")
3357 3578
3358 def test_mirror_broken_download(self): 3579 def test_mirror_broken_download(self):
3359 import sys
3360 self.d.setVar("SRCREV", "0"*40) 3580 self.d.setVar("SRCREV", "0"*40)
3361 fetcher = bb.fetch.Fetch([self.recipe_url], self.d) 3581 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3362 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs: 3582 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
3363 fetcher.download() 3583 fetcher.download()
3364 output = "".join(logs.output) 3584 output = "".join(logs.output)
3365 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output) 3585 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
3586
3587class GoModTest(FetcherTest):
3588
3589 @skipIfNoNetwork()
3590 def test_gomod_url(self):
3591 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3592 'sha256sum=9bb69aea32f1d59711701f9562d66432c9c0374205e5009d1d1a62f03fb4fdad']
3593
3594 fetcher = bb.fetch2.Fetch(urls, self.d)
3595 ud = fetcher.ud[urls[0]]
3596 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.zip')
3597 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.zip')
3598 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3599
3600 fetcher.download()
3601 fetcher.unpack(self.unpackdir)
3602 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3603 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3604 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3605 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3606 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3607
3608 @skipIfNoNetwork()
3609 def test_gomod_url_go_mod_only(self):
3610 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;mod=1;'
3611 'sha256sum=7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873']
3612
3613 fetcher = bb.fetch2.Fetch(urls, self.d)
3614 ud = fetcher.ud[urls[0]]
3615 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.mod')
3616 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.mod')
3617 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3618
3619 fetcher.download()
3620 fetcher.unpack(self.unpackdir)
3621 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3622 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3623
3624 @skipIfNoNetwork()
3625 def test_gomod_url_sha256sum_varflag(self):
3626 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0']
3627 self.d.setVarFlag('SRC_URI', 'gopkg.in/ini.v1@v1.67.0.sha256sum', 'bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6')
3628
3629 fetcher = bb.fetch2.Fetch(urls, self.d)
3630 ud = fetcher.ud[urls[0]]
3631 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3632 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3633 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3634
3635 fetcher.download()
3636 fetcher.unpack(self.unpackdir)
3637 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3638 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3639 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3640 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3641 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3642
3643 @skipIfNoNetwork()
3644 def test_gomod_url_no_go_mod_in_module(self):
3645 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0;'
3646 'sha256sum=bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6']
3647
3648 fetcher = bb.fetch2.Fetch(urls, self.d)
3649 ud = fetcher.ud[urls[0]]
3650 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3651 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3652 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3653
3654 fetcher.download()
3655 fetcher.unpack(self.unpackdir)
3656 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3657 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3658 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3659 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3660 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3661
3662 @skipIfNoNetwork()
3663 def test_gomod_url_host_only(self):
3664 urls = ['gomod://go.opencensus.io;version=v0.24.0;'
3665 'sha256sum=203a767d7f8e7c1ebe5588220ad168d1e15b14ae70a636de7ca9a4a88a7e0d0c']
3666
3667 fetcher = bb.fetch2.Fetch(urls, self.d)
3668 ud = fetcher.ud[urls[0]]
3669 self.assertEqual(ud.url, 'https://proxy.golang.org/go.opencensus.io/%40v/v0.24.0.zip')
3670 self.assertEqual(ud.parm['downloadfilename'], 'go.opencensus.io@v0.24.0.zip')
3671 self.assertEqual(ud.parm['name'], 'go.opencensus.io@v0.24.0')
3672
3673 fetcher.download()
3674 fetcher.unpack(self.unpackdir)
3675 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3676 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3677 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3678 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3679 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
3680
3681class GoModGitTest(FetcherTest):
3682
3683 @skipIfNoNetwork()
3684 def test_gomodgit_url_repo(self):
3685 urls = ['gomodgit://golang.org/x/net;version=v0.9.0;'
3686 'repo=go.googlesource.com/net;'
3687 'srcrev=694cff8668bac64e0864b552bffc280cd27f21b1']
3688
3689 fetcher = bb.fetch2.Fetch(urls, self.d)
3690 ud = fetcher.ud[urls[0]]
3691 self.assertEqual(ud.host, 'go.googlesource.com')
3692 self.assertEqual(ud.path, '/net')
3693 self.assertEqual(ud.name, 'golang.org/x/net@v0.9.0')
3694 self.assertEqual(self.d.getVar('SRCREV_golang.org/x/net@v0.9.0'), '694cff8668bac64e0864b552bffc280cd27f21b1')
3695
3696 fetcher.download()
3697 self.assertTrue(os.path.exists(ud.localpath))
3698
3699 fetcher.unpack(self.unpackdir)
3700 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3701 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'ed42bd05533fd84ae290a5d33ebd3695a0a2b06131beebd5450825bee8603aca')))
3702 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3703 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.zip')))
3704 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')))
3705 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')),
3706 'c5d6851ede50ec1c001afb763040194b68961bf06997e2605e8bf06dcd2aeb2e')
3707
3708 @skipIfNoNetwork()
3709 def test_gomodgit_url_subdir(self):
3710 urls = ['gomodgit://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3711 'repo=github.com/Azure/azure-sdk-for-go;subdir=sdk/storage/azblob;'
3712 'srcrev=ec928e0ed34db682b3f783d3739d1c538142e0c3']
3713
3714 fetcher = bb.fetch2.Fetch(urls, self.d)
3715 ud = fetcher.ud[urls[0]]
3716 self.assertEqual(ud.host, 'github.com')
3717 self.assertEqual(ud.path, '/Azure/azure-sdk-for-go')
3718 self.assertEqual(ud.parm['subpath'], 'sdk/storage/azblob')
3719 self.assertEqual(ud.name, 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3720 self.assertEqual(self.d.getVar('SRCREV_github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'), 'ec928e0ed34db682b3f783d3739d1c538142e0c3')
3721
3722 fetcher.download()
3723 self.assertTrue(os.path.exists(ud.localpath))
3724
3725 fetcher.unpack(self.unpackdir)
3726 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3727 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'd31d6145676ed3066ce573a8198f326dea5be45a43b3d8f41ce7787fd71d66b3')))
3728 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3729 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3730 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3731 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3732 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3733
3734 @skipIfNoNetwork()
3735 def test_gomodgit_url_srcrev_var(self):
3736 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0']
3737 self.d.setVar('SRCREV_gopkg.in/ini.v1@v1.67.0', 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3738
3739 fetcher = bb.fetch2.Fetch(urls, self.d)
3740 ud = fetcher.ud[urls[0]]
3741 self.assertEqual(ud.host, 'gopkg.in')
3742 self.assertEqual(ud.path, '/ini.v1')
3743 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3744 self.assertEqual(ud.parm['srcrev'], 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3745
3746 fetcher.download()
3747 fetcher.unpack(self.unpackdir)
3748 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3749 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3750 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3751 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3752 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3753 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3754 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3755
3756 @skipIfNoNetwork()
3757 def test_gomodgit_url_no_go_mod_in_module(self):
3758 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0;'
3759 'srcrev=b2f570e5b5b844226bbefe6fb521d891f529a951']
3760
3761 fetcher = bb.fetch2.Fetch(urls, self.d)
3762 ud = fetcher.ud[urls[0]]
3763 self.assertEqual(ud.host, 'gopkg.in')
3764 self.assertEqual(ud.path, '/ini.v1')
3765 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3766 self.assertEqual(self.d.getVar('SRCREV_gopkg.in/ini.v1@v1.67.0'), 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3767
3768 fetcher.download()
3769 fetcher.unpack(self.unpackdir)
3770 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3771 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3772 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3773 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3774 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3775 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3776 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3777
3778 @skipIfNoNetwork()
3779 def test_gomodgit_url_host_only(self):
3780 urls = ['gomodgit://go.opencensus.io;version=v0.24.0;'
3781 'repo=github.com/census-instrumentation/opencensus-go;'
3782 'srcrev=b1a01ee95db0e690d91d7193d037447816fae4c5']
3783
3784 fetcher = bb.fetch2.Fetch(urls, self.d)
3785 ud = fetcher.ud[urls[0]]
3786 self.assertEqual(ud.host, 'github.com')
3787 self.assertEqual(ud.path, '/census-instrumentation/opencensus-go')
3788 self.assertEqual(ud.name, 'go.opencensus.io@v0.24.0')
3789 self.assertEqual(self.d.getVar('SRCREV_go.opencensus.io@v0.24.0'), 'b1a01ee95db0e690d91d7193d037447816fae4c5')
3790
3791 fetcher.download()
3792 fetcher.unpack(self.unpackdir)
3793 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3794 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'aae3ac7b2122ed3345654e6327855e9682f4a5350d63e93dbcfc51c4419df0e1')))
3795 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3796 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3797 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3798 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3799 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
diff --git a/bitbake/lib/bb/tests/filter.py b/bitbake/lib/bb/tests/filter.py
new file mode 100644
index 0000000000..245df7b22b
--- /dev/null
+++ b/bitbake/lib/bb/tests/filter.py
@@ -0,0 +1,88 @@
1#
2# Copyright (C) 2025 Garmin Ltd. or its subsidiaries
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import unittest
8import bb.filter
9
10
11class BuiltinFilterTest(unittest.TestCase):
12 def test_disallowed_builtins(self):
13 with self.assertRaises(NameError):
14 val = bb.filter.apply_filters("1", ["open('foo.txt', 'rb')"])
15
16 def test_prefix(self):
17 val = bb.filter.apply_filters("1 2 3", ["prefix(val, 'a')"])
18 self.assertEqual(val, "a1 a2 a3")
19
20 val = bb.filter.apply_filters("", ["prefix(val, 'a')"])
21 self.assertEqual(val, "")
22
23 def test_suffix(self):
24 val = bb.filter.apply_filters("1 2 3", ["suffix(val, 'b')"])
25 self.assertEqual(val, "1b 2b 3b")
26
27 val = bb.filter.apply_filters("", ["suffix(val, 'b')"])
28 self.assertEqual(val, "")
29
30 def test_sort(self):
31 val = bb.filter.apply_filters("z y x", ["sort(val)"])
32 self.assertEqual(val, "x y z")
33
34 val = bb.filter.apply_filters("", ["sort(val)"])
35 self.assertEqual(val, "")
36
37 def test_identity(self):
38 val = bb.filter.apply_filters("1 2 3", ["val"])
39 self.assertEqual(val, "1 2 3")
40
41 val = bb.filter.apply_filters("123", ["val"])
42 self.assertEqual(val, "123")
43
44 def test_empty(self):
45 val = bb.filter.apply_filters("1 2 3", ["", "prefix(val, 'a')", ""])
46 self.assertEqual(val, "a1 a2 a3")
47
48 def test_nested(self):
49 val = bb.filter.apply_filters("1 2 3", ["prefix(prefix(val, 'a'), 'b')"])
50 self.assertEqual(val, "ba1 ba2 ba3")
51
52 val = bb.filter.apply_filters("1 2 3", ["prefix(prefix(val, 'b'), 'a')"])
53 self.assertEqual(val, "ab1 ab2 ab3")
54
55 def test_filter_order(self):
56 val = bb.filter.apply_filters("1 2 3", ["prefix(val, 'a')", "prefix(val, 'b')"])
57 self.assertEqual(val, "ba1 ba2 ba3")
58
59 val = bb.filter.apply_filters("1 2 3", ["prefix(val, 'b')", "prefix(val, 'a')"])
60 self.assertEqual(val, "ab1 ab2 ab3")
61
62 val = bb.filter.apply_filters("1 2 3", ["prefix(val, 'a')", "suffix(val, 'b')"])
63 self.assertEqual(val, "a1b a2b a3b")
64
65 val = bb.filter.apply_filters("1 2 3", ["suffix(val, 'b')", "prefix(val, 'a')"])
66 self.assertEqual(val, "a1b a2b a3b")
67
68 def test_remove(self):
69 val = bb.filter.apply_filters("1 2 3", ["remove(val, ['2'])"])
70 self.assertEqual(val, "1 3")
71
72 val = bb.filter.apply_filters("1,2,3", ["remove(val, ['2'], ',')"])
73 self.assertEqual(val, "1,3")
74
75 val = bb.filter.apply_filters("1 2 3", ["remove(val, ['4'])"])
76 self.assertEqual(val, "1 2 3")
77
78 val = bb.filter.apply_filters("1 2 3", ["remove(val, ['1', '2'])"])
79 self.assertEqual(val, "3")
80
81 val = bb.filter.apply_filters("1 2 3", ["remove(val, '2')"])
82 self.assertEqual(val, "1 3")
83
84 val = bb.filter.apply_filters("1 2 3", ["remove(val, '4')"])
85 self.assertEqual(val, "1 2 3")
86
87 val = bb.filter.apply_filters("1 2 3", ["remove(val, '1 2')"])
88 self.assertEqual(val, "3")
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py
index 72d1962e7e..d3867ece98 100644
--- a/bitbake/lib/bb/tests/parse.py
+++ b/bitbake/lib/bb/tests/parse.py
@@ -39,7 +39,6 @@ C = "3"
39 os.chdir(self.origdir) 39 os.chdir(self.origdir)
40 40
41 def parsehelper(self, content, suffix = ".bb"): 41 def parsehelper(self, content, suffix = ".bb"):
42
43 f = tempfile.NamedTemporaryFile(suffix = suffix) 42 f = tempfile.NamedTemporaryFile(suffix = suffix)
44 f.write(bytes(content, "utf-8")) 43 f.write(bytes(content, "utf-8"))
45 f.flush() 44 f.flush()
@@ -47,17 +46,17 @@ C = "3"
47 return f 46 return f
48 47
49 def test_parse_simple(self): 48 def test_parse_simple(self):
50 f = self.parsehelper(self.testfile) 49 with self.parsehelper(self.testfile) as f:
51 d = bb.parse.handle(f.name, self.d)[''] 50 d = bb.parse.handle(f.name, self.d)['']
52 self.assertEqual(d.getVar("A"), "1") 51 self.assertEqual(d.getVar("A"), "1")
53 self.assertEqual(d.getVar("B"), "2") 52 self.assertEqual(d.getVar("B"), "2")
54 self.assertEqual(d.getVar("C"), "3") 53 self.assertEqual(d.getVar("C"), "3")
55 54
56 def test_parse_incomplete_function(self): 55 def test_parse_incomplete_function(self):
57 testfileB = self.testfile.replace("}", "") 56 testfileB = self.testfile.replace("}", "")
58 f = self.parsehelper(testfileB) 57 with self.parsehelper(testfileB) as f:
59 with self.assertRaises(bb.parse.ParseError): 58 with self.assertRaises(bb.parse.ParseError):
60 d = bb.parse.handle(f.name, self.d)[''] 59 d = bb.parse.handle(f.name, self.d)['']
61 60
62 unsettest = """ 61 unsettest = """
63A = "1" 62A = "1"
@@ -69,12 +68,65 @@ unset B[flag]
69""" 68"""
70 69
71 def test_parse_unset(self): 70 def test_parse_unset(self):
72 f = self.parsehelper(self.unsettest) 71 with self.parsehelper(self.unsettest) as f:
73 d = bb.parse.handle(f.name, self.d)[''] 72 d = bb.parse.handle(f.name, self.d)['']
74 self.assertEqual(d.getVar("A"), None) 73 self.assertEqual(d.getVar("A"), None)
75 self.assertEqual(d.getVarFlag("A","flag"), None) 74 self.assertEqual(d.getVarFlag("A","flag"), None)
76 self.assertEqual(d.getVar("B"), "2") 75 self.assertEqual(d.getVar("B"), "2")
77 76
77 defaulttest = """
78A = "set value"
79A ??= "default value"
80
81A[flag_set_vs_question] = "set flag"
82A[flag_set_vs_question] ?= "question flag"
83
84A[flag_set_vs_default] = "set flag"
85A[flag_set_vs_default] ??= "default flag"
86
87A[flag_question] ?= "question flag"
88
89A[flag_default] ??= "default flag"
90
91A[flag_question_vs_default] ?= "question flag"
92A[flag_question_vs_default] ??= "default flag"
93
94A[flag_default_vs_question] ??= "default flag"
95A[flag_default_vs_question] ?= "question flag"
96
97A[flag_set_question_default] = "set flag"
98A[flag_set_question_default] ?= "question flag"
99A[flag_set_question_default] ??= "default flag"
100
101A[flag_set_default_question] = "set flag"
102A[flag_set_default_question] ??= "default flag"
103A[flag_set_default_question] ?= "question flag"
104
105A[flag_set_twice] = "set flag first"
106A[flag_set_twice] = "set flag second"
107
108A[flag_question_twice] ?= "question flag first"
109A[flag_question_twice] ?= "question flag second"
110
111A[flag_default_twice] ??= "default flag first"
112A[flag_default_twice] ??= "default flag second"
113"""
114 def test_parse_defaulttest(self):
115 with self.parsehelper(self.defaulttest) as f:
116 d = bb.parse.handle(f.name, self.d)['']
117 self.assertEqual(d.getVar("A"), "set value")
118 self.assertEqual(d.getVarFlag("A","flag_set_vs_question"), "set flag")
119 self.assertEqual(d.getVarFlag("A","flag_set_vs_default"), "set flag")
120 self.assertEqual(d.getVarFlag("A","flag_question"), "question flag")
121 self.assertEqual(d.getVarFlag("A","flag_default"), "default flag")
122 self.assertEqual(d.getVarFlag("A","flag_question_vs_default"), "question flag")
123 self.assertEqual(d.getVarFlag("A","flag_default_vs_question"), "question flag")
124 self.assertEqual(d.getVarFlag("A","flag_set_question_default"), "set flag")
125 self.assertEqual(d.getVarFlag("A","flag_set_default_question"), "set flag")
126 self.assertEqual(d.getVarFlag("A","flag_set_twice"), "set flag second")
127 self.assertEqual(d.getVarFlag("A","flag_question_twice"), "question flag first")
128 self.assertEqual(d.getVarFlag("A","flag_default_twice"), "default flag second")
129
78 exporttest = """ 130 exporttest = """
79A = "a" 131A = "a"
80export B = "b" 132export B = "b"
@@ -83,8 +135,8 @@ exportD = "d"
83""" 135"""
84 136
85 def test_parse_exports(self): 137 def test_parse_exports(self):
86 f = self.parsehelper(self.exporttest) 138 with self.parsehelper(self.exporttest) as f:
87 d = bb.parse.handle(f.name, self.d)[''] 139 d = bb.parse.handle(f.name, self.d)['']
88 self.assertEqual(d.getVar("A"), "a") 140 self.assertEqual(d.getVar("A"), "a")
89 self.assertIsNone(d.getVarFlag("A", "export")) 141 self.assertIsNone(d.getVarFlag("A", "export"))
90 self.assertEqual(d.getVar("B"), "b") 142 self.assertEqual(d.getVar("B"), "b")
@@ -96,7 +148,6 @@ exportD = "d"
96 self.assertEqual(d.getVar("exportD"), "d") 148 self.assertEqual(d.getVar("exportD"), "d")
97 self.assertIsNone(d.getVarFlag("exportD", "export")) 149 self.assertIsNone(d.getVarFlag("exportD", "export"))
98 150
99
100 overridetest = """ 151 overridetest = """
101RRECOMMENDS:${PN} = "a" 152RRECOMMENDS:${PN} = "a"
102RRECOMMENDS:${PN}:libc = "b" 153RRECOMMENDS:${PN}:libc = "b"
@@ -105,8 +156,8 @@ PN = "gtk+"
105""" 156"""
106 157
107 def test_parse_overrides(self): 158 def test_parse_overrides(self):
108 f = self.parsehelper(self.overridetest) 159 with self.parsehelper(self.overridetest) as f:
109 d = bb.parse.handle(f.name, self.d)[''] 160 d = bb.parse.handle(f.name, self.d)['']
110 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 161 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
111 bb.data.expandKeys(d) 162 bb.data.expandKeys(d)
112 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 163 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
@@ -120,8 +171,8 @@ EXTRA_OECONF:append = " c"
120""" 171"""
121 172
122 def test_parse_overrides2(self): 173 def test_parse_overrides2(self):
123 f = self.parsehelper(self.overridetest2) 174 with self.parsehelper(self.overridetest2) as f:
124 d = bb.parse.handle(f.name, self.d)[''] 175 d = bb.parse.handle(f.name, self.d)['']
125 d.appendVar("EXTRA_OECONF", " d") 176 d.appendVar("EXTRA_OECONF", " d")
126 d.setVar("OVERRIDES", "class-target") 177 d.setVar("OVERRIDES", "class-target")
127 self.assertEqual(d.getVar("EXTRA_OECONF"), "b c d") 178 self.assertEqual(d.getVar("EXTRA_OECONF"), "b c d")
@@ -133,8 +184,8 @@ PN = "bc"
133""" 184"""
134 185
135 def test_parse_combinations(self): 186 def test_parse_combinations(self):
136 f = self.parsehelper(self.overridetest3) 187 with self.parsehelper(self.overridetest3) as f:
137 d = bb.parse.handle(f.name, self.d)[''] 188 d = bb.parse.handle(f.name, self.d)['']
138 bb.data.expandKeys(d) 189 bb.data.expandKeys(d)
139 self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B") 190 self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B")
140 d.setVar("DESCRIPTION", "E") 191 d.setVar("DESCRIPTION", "E")
@@ -142,7 +193,6 @@ PN = "bc"
142 d.setVar("OVERRIDES", "bc-dev") 193 d.setVar("OVERRIDES", "bc-dev")
143 self.assertEqual(d.getVar("DESCRIPTION"), "C D") 194 self.assertEqual(d.getVar("DESCRIPTION"), "C D")
144 195
145
146 classextend = """ 196 classextend = """
147VAR_var:override1 = "B" 197VAR_var:override1 = "B"
148EXTRA = ":override1" 198EXTRA = ":override1"
@@ -165,19 +215,31 @@ python () {
165 # 215 #
166 def test_parse_classextend_contamination(self): 216 def test_parse_classextend_contamination(self):
167 self.d.setVar("__bbclasstype", "recipe") 217 self.d.setVar("__bbclasstype", "recipe")
168 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass") 218 with self.parsehelper(self.classextend_bbclass, suffix=".bbclass") as cls:
169 #clsname = os.path.basename(cls.name).replace(".bbclass", "") 219 #clsname = os.path.basename(cls.name).replace(".bbclass", "")
170 self.classextend = self.classextend.replace("###CLASS###", cls.name) 220 self.classextend = self.classextend.replace("###CLASS###", cls.name)
171 f = self.parsehelper(self.classextend) 221 with self.parsehelper(self.classextend) as f:
172 alldata = bb.parse.handle(f.name, self.d) 222 alldata = bb.parse.handle(f.name, self.d)
173 d1 = alldata[''] 223 d1 = alldata['']
174 d2 = alldata[cls.name] 224 d2 = alldata[cls.name]
175 self.assertEqual(d1.getVar("VAR_var"), "B") 225 self.assertEqual(d1.getVar("VAR_var"), "B")
176 self.assertEqual(d2.getVar("VAR_var"), None) 226 self.assertEqual(d2.getVar("VAR_var"), None)
177 227
178 addtask_deltask = """ 228 addtask_deltask = """
179addtask do_patch after do_foo after do_unpack before do_configure before do_compile 229addtask do_patch after do_foo after do_unpack before do_configure before do_compile
180addtask do_fetch do_patch 230addtask do_fetch2 do_patch2
231
232addtask do_myplaintask
233addtask do_myplaintask2
234deltask do_myplaintask2
235addtask do_mytask# comment
236addtask do_mytask2 # comment2
237addtask do_mytask3
238deltask do_mytask3# comment
239deltask do_mytask4 # comment2
240
241# Ensure a missing task prefix on after works
242addtask do_mytask5 after mytask
181 243
182MYVAR = "do_patch" 244MYVAR = "do_patch"
183EMPTYVAR = "" 245EMPTYVAR = ""
@@ -185,17 +247,10 @@ deltask do_fetch ${MYVAR} ${EMPTYVAR}
185deltask ${EMPTYVAR} 247deltask ${EMPTYVAR}
186""" 248"""
187 def test_parse_addtask_deltask(self): 249 def test_parse_addtask_deltask(self):
188 import sys 250 with self.parsehelper(self.addtask_deltask) as f:
189
190 with self.assertLogs() as logs:
191 f = self.parsehelper(self.addtask_deltask)
192 d = bb.parse.handle(f.name, self.d)[''] 251 d = bb.parse.handle(f.name, self.d)['']
193 252 self.assertSequenceEqual(['do_fetch2', 'do_patch2', 'do_myplaintask', 'do_mytask', 'do_mytask2', 'do_mytask5'], bb.build.listtasks(d))
194 output = "".join(logs.output) 253 self.assertEqual(['do_mytask'], d.getVarFlag("do_mytask5", "deps"))
195 self.assertTrue("addtask contained multiple 'before' keywords" in output)
196 self.assertTrue("addtask contained multiple 'after' keywords" in output)
197 self.assertTrue('addtask ignored: " do_patch"' in output)
198 #self.assertTrue('dependent task do_foo for do_patch does not exist' in output)
199 254
200 broken_multiline_comment = """ 255 broken_multiline_comment = """
201# First line of comment \\ 256# First line of comment \\
@@ -203,10 +258,9 @@ deltask ${EMPTYVAR}
203 258
204""" 259"""
205 def test_parse_broken_multiline_comment(self): 260 def test_parse_broken_multiline_comment(self):
206 f = self.parsehelper(self.broken_multiline_comment) 261 with self.parsehelper(self.broken_multiline_comment) as f:
207 with self.assertRaises(bb.BBHandledException): 262 with self.assertRaises(bb.BBHandledException):
208 d = bb.parse.handle(f.name, self.d)[''] 263 d = bb.parse.handle(f.name, self.d)['']
209
210 264
211 comment_in_var = """ 265 comment_in_var = """
212VAR = " \\ 266VAR = " \\
@@ -216,10 +270,9 @@ VAR = " \\
216" 270"
217""" 271"""
218 def test_parse_comment_in_var(self): 272 def test_parse_comment_in_var(self):
219 f = self.parsehelper(self.comment_in_var) 273 with self.parsehelper(self.comment_in_var) as f:
220 with self.assertRaises(bb.BBHandledException): 274 with self.assertRaises(bb.BBHandledException):
221 d = bb.parse.handle(f.name, self.d)[''] 275 d = bb.parse.handle(f.name, self.d)['']
222
223 276
224 at_sign_in_var_flag = """ 277 at_sign_in_var_flag = """
225A[flag@.service] = "nonet" 278A[flag@.service] = "nonet"
@@ -229,8 +282,8 @@ C[f] = "flag"
229unset A[flag@.service] 282unset A[flag@.service]
230""" 283"""
231 def test_parse_at_sign_in_var_flag(self): 284 def test_parse_at_sign_in_var_flag(self):
232 f = self.parsehelper(self.at_sign_in_var_flag) 285 with self.parsehelper(self.at_sign_in_var_flag) as f:
233 d = bb.parse.handle(f.name, self.d)[''] 286 d = bb.parse.handle(f.name, self.d)['']
234 self.assertEqual(d.getVar("A"), None) 287 self.assertEqual(d.getVar("A"), None)
235 self.assertEqual(d.getVar("B"), None) 288 self.assertEqual(d.getVar("B"), None)
236 self.assertEqual(d.getVarFlag("A","flag@.service"), None) 289 self.assertEqual(d.getVarFlag("A","flag@.service"), None)
@@ -239,9 +292,9 @@ unset A[flag@.service]
239 292
240 def test_parse_invalid_at_sign_in_var_flag(self): 293 def test_parse_invalid_at_sign_in_var_flag(self):
241 invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f") 294 invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f")
242 f = self.parsehelper(invalid_at_sign) 295 with self.parsehelper(invalid_at_sign) as f:
243 with self.assertRaises(bb.parse.ParseError): 296 with self.assertRaises(bb.parse.ParseError):
244 d = bb.parse.handle(f.name, self.d)[''] 297 d = bb.parse.handle(f.name, self.d)['']
245 298
246 export_function_recipe = """ 299 export_function_recipe = """
247inherit someclass 300inherit someclass
@@ -296,14 +349,11 @@ EXPORT_FUNCTIONS do_compile do_compilepython
296 os.makedirs(tempdir + "/classes") 349 os.makedirs(tempdir + "/classes")
297 with open(tempdir + "/classes/someclass.bbclass", "w") as f: 350 with open(tempdir + "/classes/someclass.bbclass", "w") as f:
298 f.write(self.export_function_class) 351 f.write(self.export_function_class)
299 f.flush()
300 with open(tempdir + "/classes/secondclass.bbclass", "w") as f: 352 with open(tempdir + "/classes/secondclass.bbclass", "w") as f:
301 f.write(self.export_function_class2) 353 f.write(self.export_function_class2)
302 f.flush()
303 354
304 with open(recipename, "w") as f: 355 with open(recipename, "w") as f:
305 f.write(self.export_function_recipe) 356 f.write(self.export_function_recipe)
306 f.flush()
307 os.chdir(tempdir) 357 os.chdir(tempdir)
308 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))[''] 358 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
309 self.assertIn("someclass_do_compile", d.getVar("do_compile")) 359 self.assertIn("someclass_do_compile", d.getVar("do_compile"))
@@ -313,7 +363,6 @@ EXPORT_FUNCTIONS do_compile do_compilepython
313 recipename2 = tempdir + "/recipe2.bb" 363 recipename2 = tempdir + "/recipe2.bb"
314 with open(recipename2, "w") as f: 364 with open(recipename2, "w") as f:
315 f.write(self.export_function_recipe2) 365 f.write(self.export_function_recipe2)
316 f.flush()
317 366
318 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))[''] 367 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
319 self.assertNotIn("someclass_do_compile", d.getVar("do_compile")) 368 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
@@ -324,10 +373,8 @@ EXPORT_FUNCTIONS do_compile do_compilepython
324 373
325 with open(recipename, "a+") as f: 374 with open(recipename, "a+") as f:
326 f.write("\ninherit secondclass\n") 375 f.write("\ninherit secondclass\n")
327 f.flush()
328 with open(recipename2, "a+") as f: 376 with open(recipename2, "a+") as f:
329 f.write("\ninherit secondclass\n") 377 f.write("\ninherit secondclass\n")
330 f.flush()
331 378
332 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))[''] 379 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
333 self.assertIn("secondclass_do_compile", d.getVar("do_compile")) 380 self.assertIn("secondclass_do_compile", d.getVar("do_compile"))
@@ -341,3 +388,123 @@ EXPORT_FUNCTIONS do_compile do_compilepython
341 self.assertIn("else", d.getVar("do_compilepython")) 388 self.assertIn("else", d.getVar("do_compilepython"))
342 check_function_flags(d) 389 check_function_flags(d)
343 390
391 export_function_unclosed_tab = """
392do_compile () {
393 bb.note("Something")
394\t}
395"""
396 export_function_unclosed_space = """
397do_compile () {
398 bb.note("Something")
399 }
400"""
401 export_function_residue = """
402do_compile () {
403 bb.note("Something")
404}
405
406include \\
407"""
408
409 def test_unclosed_functions(self):
410 def test_helper(content, expected_error):
411 with tempfile.TemporaryDirectory() as tempdir:
412 recipename = tempdir + "/recipe_unclosed.bb"
413 with open(recipename, "w") as f:
414 f.write(content)
415 os.chdir(tempdir)
416 with self.assertRaises(bb.parse.ParseError) as error:
417 bb.parse.handle(recipename, bb.data.createCopy(self.d))
418 self.assertIn(expected_error, str(error.exception))
419
420 with tempfile.TemporaryDirectory() as tempdir:
421 test_helper(self.export_function_unclosed_tab, "Unparsed lines from unclosed function")
422 test_helper(self.export_function_unclosed_space, "Unparsed lines from unclosed function")
423 test_helper(self.export_function_residue, "Unparsed lines")
424
425 recipename_closed = tempdir + "/recipe_closed.bb"
426 with open(recipename_closed, "w") as in_file:
427 lines = self.export_function_unclosed_tab.split("\n")
428 lines[3] = "}"
429 in_file.write("\n".join(lines))
430 bb.parse.handle(recipename_closed, bb.data.createCopy(self.d))
431
432 special_character_assignment = """
433A+="a"
434A+ = "b"
435+ = "c"
436"""
437 ambigous_assignment = """
438+= "d"
439"""
440 def test_parse_special_character_assignment(self):
441 with self.parsehelper(self.special_character_assignment) as f:
442 d = bb.parse.handle(f.name, self.d)['']
443 self.assertEqual(d.getVar("A"), " a")
444 self.assertEqual(d.getVar("A+"), "b")
445 self.assertEqual(d.getVar("+"), "c")
446
447 with self.parsehelper(self.ambigous_assignment) as f:
448 with self.assertRaises(bb.parse.ParseError) as error:
449 bb.parse.handle(f.name, self.d)
450 self.assertIn("Empty variable name in assignment", str(error.exception))
451
452 someconf1 = """
453EXTRA_OECONF:append = " foo"
454"""
455
456 someconf2 = """
457EXTRA_OECONF:append = " bar"
458"""
459
460 someconf3 = """
461EXTRA_OECONF:append = " foobar"
462"""
463
464 def test_include_and_require(self):
465 def test_helper(content, result):
466 with self.parsehelper(content) as f:
467 if isinstance(result, type) and issubclass(result, Exception):
468 with self.assertRaises(result):
469 d = bb.parse.handle(f.name, bb.data.createCopy(self.d))['']
470 else:
471 d = bb.parse.handle(f.name, bb.data.createCopy(self.d))['']
472 self.assertEqual(d.getVar("EXTRA_OECONF"), result)
473
474 with tempfile.TemporaryDirectory() as tempdir:
475 os.makedirs(tempdir + "/conf1")
476 os.makedirs(tempdir + "/conf2")
477
478 with open(tempdir + "/conf1/some.conf", "w") as f:
479 f.write(self.someconf1)
480 with open(tempdir + "/conf2/some.conf", "w") as f:
481 f.write(self.someconf2)
482 with open(tempdir + "/conf2/some3.conf", "w") as f:
483 f.write(self.someconf3)
484
485 self.d.setVar("BBPATH", tempdir + "/conf1" + ":" + tempdir + "/conf2")
486
487 test_helper("include some.conf", " foo")
488 test_helper("include someother.conf", None)
489 test_helper("include some3.conf", " foobar")
490 test_helper("include ${@''}", None)
491 test_helper("include " + tempdir + "/conf2/some.conf", " bar")
492
493 test_helper("require some.conf", " foo")
494 test_helper("require someother.conf", bb.parse.ParseError)
495 test_helper("require some3.conf", " foobar")
496 test_helper("require ${@''}", None)
497 test_helper("require " + tempdir + "/conf2/some.conf", " bar")
498
499 test_helper("include_all some.conf", " foo bar")
500 test_helper("include_all someother.conf", None)
501 test_helper("include_all some3.conf", " foobar")
502
503 self.d.setVar("BBPATH", tempdir + "/conf2" + ":" + tempdir + "/conf1")
504
505 test_helper("include some.conf", " bar")
506 test_helper("include some3.conf", " foobar")
507 test_helper("require some.conf", " bar")
508 test_helper("require some3.conf", " foobar")
509 test_helper("include_all some.conf", " bar foo")
510 test_helper("include_all some3.conf", " foobar")
diff --git a/bitbake/lib/bb/tests/persist_data.py b/bitbake/lib/bb/tests/persist_data.py
deleted file mode 100644
index f641b5acbc..0000000000
--- a/bitbake/lib/bb/tests/persist_data.py
+++ /dev/null
@@ -1,129 +0,0 @@
1#
2# BitBake Test for lib/bb/persist_data/
3#
4# Copyright (C) 2018 Garmin Ltd.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import unittest
10import bb.data
11import bb.persist_data
12import tempfile
13import threading
14
15class PersistDataTest(unittest.TestCase):
16 def _create_data(self):
17 return bb.persist_data.persist('TEST_PERSIST_DATA', self.d)
18
19 def setUp(self):
20 self.d = bb.data.init()
21 self.tempdir = tempfile.TemporaryDirectory()
22 self.d['PERSISTENT_DIR'] = self.tempdir.name
23 self.data = self._create_data()
24 self.items = {
25 'A1': '1',
26 'B1': '2',
27 'C2': '3'
28 }
29 self.stress_count = 10000
30 self.thread_count = 5
31
32 for k,v in self.items.items():
33 self.data[k] = v
34
35 def tearDown(self):
36 self.tempdir.cleanup()
37
38 def _iter_helper(self, seen, iterator):
39 with iter(iterator):
40 for v in iterator:
41 self.assertTrue(v in seen)
42 seen.remove(v)
43 self.assertEqual(len(seen), 0, '%s not seen' % seen)
44
45 def test_get(self):
46 for k, v in self.items.items():
47 self.assertEqual(self.data[k], v)
48
49 self.assertIsNone(self.data.get('D'))
50 with self.assertRaises(KeyError):
51 self.data['D']
52
53 def test_set(self):
54 for k, v in self.items.items():
55 self.data[k] += '-foo'
56
57 for k, v in self.items.items():
58 self.assertEqual(self.data[k], v + '-foo')
59
60 def test_delete(self):
61 self.data['D'] = '4'
62 self.assertEqual(self.data['D'], '4')
63 del self.data['D']
64 self.assertIsNone(self.data.get('D'))
65 with self.assertRaises(KeyError):
66 self.data['D']
67
68 def test_contains(self):
69 for k in self.items:
70 self.assertTrue(k in self.data)
71 self.assertTrue(self.data.has_key(k))
72 self.assertFalse('NotFound' in self.data)
73 self.assertFalse(self.data.has_key('NotFound'))
74
75 def test_len(self):
76 self.assertEqual(len(self.data), len(self.items))
77
78 def test_iter(self):
79 self._iter_helper(set(self.items.keys()), self.data)
80
81 def test_itervalues(self):
82 self._iter_helper(set(self.items.values()), self.data.itervalues())
83
84 def test_iteritems(self):
85 self._iter_helper(set(self.items.items()), self.data.iteritems())
86
87 def test_get_by_pattern(self):
88 self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1'))
89
90 def _stress_read(self, data):
91 for i in range(self.stress_count):
92 for k in self.items:
93 data[k]
94
95 def _stress_write(self, data):
96 for i in range(self.stress_count):
97 for k, v in self.items.items():
98 data[k] = v + str(i)
99
100 def _validate_stress(self):
101 for k, v in self.items.items():
102 self.assertEqual(self.data[k], v + str(self.stress_count - 1))
103
104 def test_stress(self):
105 self._stress_read(self.data)
106 self._stress_write(self.data)
107 self._validate_stress()
108
109 def test_stress_threads(self):
110 def read_thread():
111 data = self._create_data()
112 self._stress_read(data)
113
114 def write_thread():
115 data = self._create_data()
116 self._stress_write(data)
117
118 threads = []
119 for i in range(self.thread_count):
120 threads.append(threading.Thread(target=read_thread))
121 threads.append(threading.Thread(target=write_thread))
122
123 for t in threads:
124 t.start()
125 self._stress_read(self.data)
126 for t in threads:
127 t.join()
128 self._validate_stress()
129
diff --git a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
index b57650d591..3a3db55d2c 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
+++ b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
@@ -9,7 +9,7 @@ def stamptask(d):
9 with open(stampname, "a+") as f: 9 with open(stampname, "a+") as f:
10 f.write(d.getVar("BB_UNIHASH") + "\n") 10 f.write(d.getVar("BB_UNIHASH") + "\n")
11 11
12 if d.getVar("BB_CURRENT_MC") != "default": 12 if d.getVar("BB_CURRENT_MC") != "":
13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}") 13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}")
14 if thistask in d.getVar("SLOWTASKS").split(): 14 if thistask in d.getVar("SLOWTASKS").split():
15 bb.note("Slowing task %s" % thistask) 15 bb.note("Slowing task %s" % thistask)
@@ -98,7 +98,7 @@ do_package_qa[rdeptask] = "do_packagedata"
98do_populate_lic_deploy[recrdeptask] += "do_populate_lic do_deploy" 98do_populate_lic_deploy[recrdeptask] += "do_populate_lic do_deploy"
99 99
100DEBIANRDEP = "do_packagedata" 100DEBIANRDEP = "do_packagedata"
101oo_package_write_ipk[rdeptask] = "${DEBIANRDEP}" 101do_package_write_ipk[rdeptask] = "${DEBIANRDEP}"
102do_package_write_rpm[rdeptask] = "${DEBIANRDEP}" 102do_package_write_rpm[rdeptask] = "${DEBIANRDEP}"
103 103
104addtask fetch 104addtask fetch
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
new file mode 100644
index 0000000000..3c7dca0257
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
@@ -0,0 +1,2 @@
1do_build[mcdepends] = "mc::mc-1:h1:do_invalid"
2
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py
index cc87e8d6a8..74f5ded2e6 100644
--- a/bitbake/lib/bb/tests/runqueue.py
+++ b/bitbake/lib/bb/tests/runqueue.py
@@ -26,7 +26,7 @@ class RunQueueTests(unittest.TestCase):
26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot" 26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot"
27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot" 27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot"
28 28
29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): 29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False, allowfailure=False):
30 env = os.environ.copy() 30 env = os.environ.copy()
31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) 31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR" 32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR"
@@ -41,6 +41,8 @@ class RunQueueTests(unittest.TestCase):
41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) 41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir)
42 print(output) 42 print(output)
43 except subprocess.CalledProcessError as e: 43 except subprocess.CalledProcessError as e:
44 if allowfailure:
45 return e.output
44 self.fail("Command %s failed with %s" % (cmd, e.output)) 46 self.fail("Command %s failed with %s" % (cmd, e.output))
45 tasks = [] 47 tasks = []
46 tasklog = builddir + "/task.log" 48 tasklog = builddir + "/task.log"
@@ -314,6 +316,13 @@ class RunQueueTests(unittest.TestCase):
314 ["mc_2:a1:%s" % t for t in rerun_tasks] 316 ["mc_2:a1:%s" % t for t in rerun_tasks]
315 self.assertEqual(set(tasks), set(expected)) 317 self.assertEqual(set(tasks), set(expected))
316 318
319 # Check that a multiconfig that doesn't exist rasies a correct error message
320 error_output = self.run_bitbakecmd(["bitbake", "g1"], tempdir, "", extraenv=extraenv, cleanup=True, allowfailure=True)
321 self.assertIn("non-existent task", error_output)
322 # If the word 'Traceback' or 'KeyError' is in the output we've regressed
323 self.assertNotIn("Traceback", error_output)
324 self.assertNotIn("KeyError", error_output)
325
317 self.shutdown(tempdir) 326 self.shutdown(tempdir)
318 327
319 def test_hashserv_single(self): 328 def test_hashserv_single(self):
diff --git a/bitbake/lib/bb/tests/setup.py b/bitbake/lib/bb/tests/setup.py
new file mode 100644
index 0000000000..e320cdf56f
--- /dev/null
+++ b/bitbake/lib/bb/tests/setup.py
@@ -0,0 +1,358 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from bb.tests.fetch import FetcherTest
8import json
9
10class BitbakeSetupTest(FetcherTest):
11 def setUp(self):
12 super(BitbakeSetupTest, self).setUp()
13
14 self.registrypath = os.path.join(self.tempdir, "bitbake-setup-configurations")
15
16 os.makedirs(self.registrypath)
17 self.git_init(cwd=self.registrypath)
18 self.git('commit --allow-empty -m "Initial commit"', cwd=self.registrypath)
19
20 self.testrepopath = os.path.join(self.tempdir, "test-repo")
21 os.makedirs(self.testrepopath)
22 self.git_init(cwd=self.testrepopath)
23 self.git('commit --allow-empty -m "Initial commit"', cwd=self.testrepopath)
24
25 oeinitbuildenv = """BBPATH=$1
26export BBPATH
27PATH={}:$PATH
28""".format(os.path.join(self.testrepopath, 'scripts'))
29 self.add_file_to_testrepo('oe-init-build-env',oeinitbuildenv, script=True)
30
31 oesetupbuild = """#!/usr/bin/env python3
32import getopt
33import sys
34import os
35import shutil
36opts, args = getopt.getopt(sys.argv[2:], "c:b:", ["no-shell"])
37for option, value in opts:
38 if option == '-c':
39 template = value
40 if option == '-b':
41 builddir = value
42confdir = os.path.join(builddir, 'conf')
43os.makedirs(confdir, exist_ok=True)
44with open(os.path.join(confdir, 'conf-summary.txt'), 'w') as f:
45 f.write(template)
46shutil.copy(os.path.join(os.path.dirname(__file__), 'test-repo/test-file'), confdir)
47with open(os.path.join(builddir, 'init-build-env'), 'w') as f:
48 f.write("BBPATH={}\\nexport BBPATH\\nPATH={}:$PATH".format(builddir, os.path.join(os.path.dirname(__file__), 'test-repo/scripts')))
49"""
50 self.add_file_to_testrepo('scripts/oe-setup-build', oesetupbuild, script=True)
51
52 installbuildtools = """#!/usr/bin/env python3
53import getopt
54import sys
55import os
56
57opts, args = getopt.getopt(sys.argv[1:], "d:", ["downloads-directory="])
58for option, value in opts:
59 if option == '-d':
60 installdir = value
61
62print("Buildtools installed into {}".format(installdir))
63os.makedirs(installdir)
64"""
65 self.add_file_to_testrepo('scripts/install-buildtools', installbuildtools, script=True)
66
67 bitbakeconfigbuild = """#!/usr/bin/env python3
68import os
69import sys
70confdir = os.path.join(os.environ['BBPATH'], 'conf')
71fragment = sys.argv[2]
72with open(os.path.join(confdir, fragment), 'w') as f:
73 f.write('')
74"""
75 self.add_file_to_testrepo('scripts/bitbake-config-build', bitbakeconfigbuild, script=True)
76
77 sometargetexecutable_template = """#!/usr/bin/env python3
78import os
79print("This is {}")
80print("BBPATH is {{}}".format(os.environ["BBPATH"]))
81"""
82 for e_name in ("some-target-executable-1", "some-target-executable-2"):
83 sometargetexecutable = sometargetexecutable_template.format(e_name)
84 self.add_file_to_testrepo('scripts/{}'.format(e_name), sometargetexecutable, script=True)
85
86 def runbbsetup(self, cmd):
87 bbsetup = os.path.abspath(os.path.dirname(__file__) + "/../../../bin/bitbake-setup")
88 return bb.process.run("{} --global-settings {} {}".format(bbsetup, os.path.join(self.tempdir, 'global-config'), cmd))
89
90 def add_json_config_to_registry(self, name, rev, branch):
91 config = """
92{
93 "sources": {
94 "test-repo": {
95 "git-remote": {
96 "remotes": {
97 "origin": {
98 "uri": "file://%s"
99 }
100 },
101 "branch": "%s",
102 "rev": "%s"
103 },
104 "path": "test-repo"
105 }
106 },
107 "description": "Test configuration",
108 "bitbake-setup": {
109 "configurations": [
110 {
111 "name": "gadget",
112 "description": "Gadget build configuration",
113 "oe-template": "test-configuration-gadget",
114 "oe-fragments": ["test-fragment-1"]
115 },
116 {
117 "name": "gizmo",
118 "description": "Gizmo build configuration",
119 "oe-template": "test-configuration-gizmo",
120 "oe-fragments": ["test-fragment-2"]
121 },
122 {
123 "name": "gizmo-env-passthrough",
124 "description": "Gizmo build configuration with environment-passthrough",
125 "bb-layers": ["layerC","layerD/meta-layer"],
126 "oe-fragments": ["test-fragment-1"],
127 "bb-env-passthrough-additions": [
128 "BUILD_ID",
129 "BUILD_DATE",
130 "BUILD_SERVER"
131 ]
132 },
133 {
134 "name": "gizmo-no-fragment",
135 "description": "Gizmo no-fragment template-only build configuration",
136 "oe-template": "test-configuration-gizmo"
137 },
138 {
139 "name": "gadget-notemplate",
140 "description": "Gadget notemplate build configuration",
141 "bb-layers": ["layerA","layerB/meta-layer"],
142 "oe-fragments": ["test-fragment-1"]
143 },
144 {
145 "name": "gizmo-notemplate",
146 "description": "Gizmo notemplate build configuration",
147 "bb-layers": ["layerC","layerD/meta-layer"],
148 "oe-fragments": ["test-fragment-2"]
149 },
150 {
151 "name": "gizmo-notemplate-with-thisdir",
152 "description": "Gizmo notemplate build configuration using THISDIR",
153 "bb-layers": ["layerC","layerD/meta-layer","{THISDIR}/layerE/meta-layer"],
154 "oe-fragments": ["test-fragment-2"]
155 }
156 ]
157 },
158 "version": "1.0"
159}
160""" % (self.testrepopath, branch, rev)
161 os.makedirs(os.path.join(self.registrypath, os.path.dirname(name)), exist_ok=True)
162 with open(os.path.join(self.registrypath, name), 'w') as f:
163 f.write(config)
164 self.git('add {}'.format(name), cwd=self.registrypath)
165 self.git('commit -m "Adding {}"'.format(name), cwd=self.registrypath)
166 return json.loads(config)
167
168 def add_file_to_testrepo(self, name, content, script=False):
169 fullname = os.path.join(self.testrepopath, name)
170 os.makedirs(os.path.join(self.testrepopath, os.path.dirname(name)), exist_ok=True)
171 with open(fullname, 'w') as f:
172 f.write(content)
173 if script:
174 import stat
175 st = os.stat(fullname)
176 os.chmod(fullname, st.st_mode | stat.S_IEXEC)
177 self.git('add {}'.format(name), cwd=self.testrepopath)
178 self.git('commit -m "Adding {}"'.format(name), cwd=self.testrepopath)
179
180 def check_builddir_files(self, buildpath, test_file_content, json_config):
181 with open(os.path.join(buildpath, 'layers', 'test-repo', 'test-file')) as f:
182 self.assertEqual(f.read(), test_file_content)
183 bitbake_config = json_config["bitbake-config"]
184 bb_build_path = os.path.join(buildpath, 'build')
185 bb_conf_path = os.path.join(bb_build_path, 'conf')
186 self.assertTrue(os.path.exists(os.path.join(bb_build_path, 'init-build-env')))
187
188 if "oe-template" in bitbake_config:
189 with open(os.path.join(bb_conf_path, 'conf-summary.txt')) as f:
190 self.assertEqual(f.read(), bitbake_config["oe-template"])
191 with open(os.path.join(bb_conf_path, 'test-file')) as f:
192 self.assertEqual(f.read(), test_file_content)
193 else:
194 with open(os.path.join(bb_conf_path, 'conf-summary.txt')) as f:
195 self.assertIn(bitbake_config["description"], f.read())
196 with open(os.path.join(bb_conf_path, 'bblayers.conf')) as f:
197 bblayers = f.read()
198 for l in bitbake_config["bb-layers"]:
199 if l.startswith('{THISDIR}/'):
200 thisdir_layer = os.path.join(
201 os.path.dirname(json_config["path"]),
202 l.removeprefix("{THISDIR}/"),
203 )
204 self.assertIn(thisdir_layer, bblayers)
205 else:
206 self.assertIn(os.path.join(buildpath, "layers", l), bblayers)
207
208 if 'oe-fragment' in bitbake_config.keys():
209 for f in bitbake_config["oe-fragments"]:
210 self.assertTrue(os.path.exists(os.path.join(bb_conf_path, f)))
211
212 if 'bb-environment-passthrough' in bitbake_config.keys():
213 with open(os.path.join(bb_build_path, 'init-build-env'), 'r') as f:
214 init_build_env = f.read()
215 self.assertTrue('BB_ENV_PASSTHROUGH_ADDITIONS' in init_build_env)
216 self.assertTrue('BUILD_ID' in init_build_env)
217 self.assertTrue('BUILD_DATE' in init_build_env)
218 self.assertTrue('BUILD_SERVER' in init_build_env)
219 # a more throrough test could be to initialize a bitbake build-env, export FOO to the shell environment, set the env-passthrough on it and finally check against 'bitbake-getvar FOO'
220
221
222 def test_setup(self):
223 # unset BBPATH to ensure tests run in isolation from the existing bitbake environment
224 import os
225 if 'BBPATH' in os.environ:
226 del os.environ['BBPATH']
227
228 # check that no arguments works
229 self.runbbsetup("")
230
231 # check that --help works
232 self.runbbsetup("--help")
233
234 # set up global location for top-dir-prefix
235 out = self.runbbsetup("settings set --global default top-dir-prefix {}".format(self.tempdir))
236 settings_path = "{}/global-config".format(self.tempdir)
237 self.assertIn(settings_path, out[0])
238 self.assertIn("From section 'default' the setting 'top-dir-prefix' was changed to", out[0])
239 self.assertIn("Settings written to".format(settings_path), out[0])
240 out = self.runbbsetup("settings set --global default dl-dir {}".format(os.path.join(self.tempdir, 'downloads')))
241 self.assertIn("From section 'default' the setting 'dl-dir' was changed to", out[0])
242 self.assertIn("Settings written to".format(settings_path), out[0])
243
244 # check that writing settings works and then adjust them to point to
245 # test registry repo
246 out = self.runbbsetup("settings set default registry 'git://{};protocol=file;branch=master;rev=master'".format(self.registrypath))
247 settings_path = "{}/bitbake-builds/settings.conf".format(self.tempdir)
248 self.assertIn(settings_path, out[0])
249 self.assertIn("From section 'default' the setting 'registry' was changed to", out[0])
250 self.assertIn("Settings written to".format(settings_path), out[0])
251
252 # check that listing settings works
253 out = self.runbbsetup("settings list")
254 self.assertIn("default top-dir-prefix {}".format(self.tempdir), out[0])
255 self.assertIn("default dl-dir {}".format(os.path.join(self.tempdir, 'downloads')), out[0])
256 self.assertIn("default registry {}".format('git://{};protocol=file;branch=master;rev=master'.format(self.registrypath)), out[0])
257
258 # check that 'list' produces correct output with no configs, one config and two configs
259 out = self.runbbsetup("list")
260 self.assertNotIn("test-config-1", out[0])
261 self.assertNotIn("test-config-2", out[0])
262
263 json_1 = self.add_json_config_to_registry('test-config-1.conf.json', 'master', 'master')
264 out = self.runbbsetup("list")
265 self.assertIn("test-config-1", out[0])
266 self.assertNotIn("test-config-2", out[0])
267
268 json_2 = self.add_json_config_to_registry('config-2/test-config-2.conf.json', 'master', 'master')
269 out = self.runbbsetup("list --write-json={}".format(os.path.join(self.tempdir, "test-configs.json")))
270 self.assertIn("test-config-1", out[0])
271 self.assertIn("test-config-2", out[0])
272 with open(os.path.join(self.tempdir, "test-configs.json")) as f:
273 json_configs = json.load(f)
274 self.assertIn("test-config-1", json_configs)
275 self.assertIn("test-config-2", json_configs)
276
277 # check that init/status/update work
278 # (the latter two should do nothing and say that config hasn't changed)
279 test_file_content = 'initial\n'
280 self.add_file_to_testrepo('test-file', test_file_content)
281
282 # test-config-1 is tested as a registry config, test-config-2 as a local file
283 test_configurations = {'test-config-1': {'cmdline': 'test-config-1',
284 'buildconfigs':('gadget','gizmo',
285 'gizmo-env-passthrough',
286 'gizmo-no-fragment',
287 'gadget-notemplate','gizmo-notemplate')},
288 'test-config-2': {'cmdline': os.path.join(self.registrypath,'config-2/test-config-2.conf.json'),
289 'buildconfigs': ('gadget','gizmo',
290 'gizmo-env-passthrough',
291 'gizmo-no-fragment',
292 'gadget-notemplate','gizmo-notemplate',
293 'gizmo-notemplate-with-thisdir')}
294 }
295 for cf, v in test_configurations.items():
296 for c in v['buildconfigs']:
297 out = self.runbbsetup("init --non-interactive {} {}".format(v['cmdline'], c))
298 buildpath = os.path.join(self.tempdir, 'bitbake-builds', '{}-{}'.format(cf, c))
299 with open(os.path.join(buildpath, 'config', "config-upstream.json")) as f:
300 config_upstream = json.load(f)
301 self.check_builddir_files(buildpath, test_file_content, config_upstream)
302 os.environ['BBPATH'] = os.path.join(buildpath, 'build')
303 out = self.runbbsetup("status")
304 self.assertIn("Configuration in {} has not changed".format(buildpath), out[0])
305 out = self.runbbsetup("update")
306 self.assertIn("Configuration in {} has not changed".format(buildpath), out[0])
307
308 # install buildtools
309 out = self.runbbsetup("install-buildtools")
310 self.assertIn("Buildtools installed into", out[0])
311 self.assertTrue(os.path.exists(os.path.join(buildpath, 'buildtools')))
312
313 # change a file in the test layer repo, make a new commit and
314 # test that status/update correctly report the change and update the config
315 prev_test_file_content = test_file_content
316 test_file_content = 'modified\n'
317 self.add_file_to_testrepo('test-file', test_file_content)
318 for c in ('gadget', 'gizmo',
319 'gizmo-env-passthrough',
320 'gizmo-no-fragment',
321 'gadget-notemplate', 'gizmo-notemplate'):
322 buildpath = os.path.join(self.tempdir, 'bitbake-builds', 'test-config-1-{}'.format(c))
323 os.environ['BBPATH'] = os.path.join(buildpath, 'build')
324 out = self.runbbsetup("status")
325 self.assertIn("Layer repository file://{} checked out into {}/layers/test-repo updated revision master from".format(self.testrepopath, buildpath), out[0])
326 out = self.runbbsetup("update")
327 if c in ('gadget', 'gizmo'):
328 self.assertIn("Existing bitbake configuration directory renamed to {}/build/conf-backup.".format(buildpath), out[0])
329 self.assertIn('-{}+{}'.format(prev_test_file_content, test_file_content), out[0])
330 with open(os.path.join(buildpath, 'config', "config-upstream.json")) as f:
331 config_upstream = json.load(f)
332 self.check_builddir_files(buildpath, test_file_content, config_upstream)
333
334 # make a new branch in the test layer repo, change a file on that branch,
335 # make a new commit, update the top level json config to refer to that branch,
336 # and test that status/update correctly report the change and update the config
337 prev_test_file_content = test_file_content
338 test_file_content = 'modified-in-branch\n'
339 branch = "another-branch"
340 self.git('checkout -b {}'.format(branch), cwd=self.testrepopath)
341 self.add_file_to_testrepo('test-file', test_file_content)
342 json_1 = self.add_json_config_to_registry('test-config-1.conf.json', branch, branch)
343 for c in ('gadget', 'gizmo',
344 'gizmo-env-passthrough',
345 'gizmo-no-fragment',
346 'gadget-notemplate', 'gizmo-notemplate'):
347 buildpath = os.path.join(self.tempdir, 'bitbake-builds', 'test-config-1-{}'.format(c))
348 os.environ['BBPATH'] = os.path.join(buildpath, 'build')
349 out = self.runbbsetup("status")
350 self.assertIn("Configuration in {} has changed:".format(buildpath), out[0])
351 self.assertIn('- "rev": "master"\n+ "rev": "another-branch"', out[0])
352 out = self.runbbsetup("update")
353 if c in ('gadget', 'gizmo'):
354 self.assertIn("Existing bitbake configuration directory renamed to {}/build/conf-backup.".format(buildpath), out[0])
355 self.assertIn('-{}+{}'.format(prev_test_file_content, test_file_content), out[0])
356 with open(os.path.join(buildpath, 'config', "config-upstream.json")) as f:
357 config_upstream = json.load(f)
358 self.check_builddir_files(buildpath, test_file_content, config_upstream)
diff --git a/bitbake/lib/bb/tests/support/httpserver.py b/bitbake/lib/bb/tests/support/httpserver.py
index 78f7660053..03327e923b 100644
--- a/bitbake/lib/bb/tests/support/httpserver.py
+++ b/bitbake/lib/bb/tests/support/httpserver.py
@@ -3,7 +3,7 @@
3# 3#
4 4
5import http.server 5import http.server
6import multiprocessing 6from bb import multiprocessing
7import os 7import os
8import traceback 8import traceback
9import signal 9import signal
@@ -43,7 +43,7 @@ class HTTPService(object):
43 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger]) 43 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger])
44 44
45 # The signal handler from testimage.bbclass can cause deadlocks here 45 # The signal handler from testimage.bbclass can cause deadlocks here
46 # if the HTTPServer is terminated before it can restore the standard 46 # if the HTTPServer is terminated before it can restore the standard
47 #signal behaviour 47 #signal behaviour
48 orig = signal.getsignal(signal.SIGTERM) 48 orig = signal.getsignal(signal.SIGTERM)
49 signal.signal(signal.SIGTERM, signal.SIG_DFL) 49 signal.signal(signal.SIGTERM, signal.SIG_DFL)
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index c363f62d7d..52b7bf85bf 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -130,6 +130,14 @@ class Checksum(unittest.TestCase):
130 checksum = bb.utils.sha256_file(f.name) 130 checksum = bb.utils.sha256_file(f.name)
131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f") 131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f")
132 132
133 def test_goh1(self):
134 import hashlib
135 with tempfile.NamedTemporaryFile() as f:
136 f.write(self.filler)
137 f.flush()
138 checksum = bb.utils.goh1_file(f.name)
139 self.assertEqual(checksum, "81191f04d4abf413e5badd234814e4202d9efa73e6f9437e9ddd6b8165b569ef")
140
133class EditMetadataFile(unittest.TestCase): 141class EditMetadataFile(unittest.TestCase):
134 _origfile = """ 142 _origfile = """
135# A comment 143# A comment
@@ -684,3 +692,14 @@ class EnvironmentTests(unittest.TestCase):
684 self.assertIn("A", os.environ) 692 self.assertIn("A", os.environ)
685 self.assertEqual(os.environ["A"], "this is A") 693 self.assertEqual(os.environ["A"], "this is A")
686 self.assertNotIn("B", os.environ) 694 self.assertNotIn("B", os.environ)
695
696class FilemodeTests(unittest.TestCase):
697 def test_filemode_convert(self):
698 self.assertEqual(0o775, bb.utils.to_filemode("0o775"))
699 self.assertEqual(0o775, bb.utils.to_filemode(0o775))
700 self.assertEqual(0o775, bb.utils.to_filemode("775"))
701 with self.assertRaises(ValueError):
702 bb.utils.to_filemode("xyz")
703 with self.assertRaises(ValueError):
704 bb.utils.to_filemode("999")
705
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index dcd3910cc4..e7fbcbca0a 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -14,7 +14,8 @@ import time
14import atexit 14import atexit
15import re 15import re
16from collections import OrderedDict, defaultdict 16from collections import OrderedDict, defaultdict
17from functools import partial 17from functools import partial, wraps
18from contextlib import contextmanager
18 19
19import bb.cache 20import bb.cache
20import bb.cooker 21import bb.cooker
@@ -26,6 +27,135 @@ import bb.remotedata
26from bb.main import setup_bitbake, BitBakeConfigParameters 27from bb.main import setup_bitbake, BitBakeConfigParameters
27import bb.fetch2 28import bb.fetch2
28 29
30def wait_for(f):
31 """
32 Wrap a function that makes an asynchronous tinfoil call using
33 self.run_command() and wait for events to say that the call has been
34 successful, or an error has occurred.
35 """
36 @wraps(f)
37 def wrapper(self, *args, handle_events=True, extra_events=None, event_callback=None, **kwargs):
38 if handle_events:
39 # A reasonable set of default events matching up with those we handle below
40 eventmask = [
41 'bb.event.BuildStarted',
42 'bb.event.BuildCompleted',
43 'logging.LogRecord',
44 'bb.event.NoProvider',
45 'bb.command.CommandCompleted',
46 'bb.command.CommandFailed',
47 'bb.build.TaskStarted',
48 'bb.build.TaskFailed',
49 'bb.build.TaskSucceeded',
50 'bb.build.TaskFailedSilent',
51 'bb.build.TaskProgress',
52 'bb.runqueue.runQueueTaskStarted',
53 'bb.runqueue.sceneQueueTaskStarted',
54 'bb.event.ProcessStarted',
55 'bb.event.ProcessProgress',
56 'bb.event.ProcessFinished',
57 ]
58 if extra_events:
59 eventmask.extend(extra_events)
60 ret = self.set_event_mask(eventmask)
61
62 includelogs = self.config_data.getVar('BBINCLUDELOGS')
63 loglines = self.config_data.getVar('BBINCLUDELOGS_LINES')
64
65 # Call actual function
66 ret = f(self, *args, **kwargs)
67
68 if handle_events:
69 lastevent = time.time()
70 result = False
71 # Borrowed from knotty, instead somewhat hackily we use the helper
72 # as the object to store "shutdown" on
73 helper = bb.ui.uihelper.BBUIHelper()
74 helper.shutdown = 0
75 parseprogress = None
76 termfilter = bb.ui.knotty.TerminalFilter(helper, helper, self.logger.handlers, quiet=self.quiet)
77 try:
78 while True:
79 try:
80 event = self.wait_event(0.25)
81 if event:
82 lastevent = time.time()
83 if event_callback and event_callback(event):
84 continue
85 if helper.eventHandler(event):
86 if isinstance(event, bb.build.TaskFailedSilent):
87 self.logger.warning("Logfile for failed setscene task is %s" % event.logfile)
88 elif isinstance(event, bb.build.TaskFailed):
89 bb.ui.knotty.print_event_log(event, includelogs, loglines, termfilter)
90 continue
91 if isinstance(event, bb.event.ProcessStarted):
92 if self.quiet > 1:
93 continue
94 parseprogress = bb.ui.knotty.new_progress(event.processname, event.total)
95 parseprogress.start(False)
96 continue
97 if isinstance(event, bb.event.ProcessProgress):
98 if self.quiet > 1:
99 continue
100 if parseprogress:
101 parseprogress.update(event.progress)
102 else:
103 bb.warn("Got ProcessProgress event for something that never started?")
104 continue
105 if isinstance(event, bb.event.ProcessFinished):
106 if self.quiet > 1:
107 continue
108 if parseprogress:
109 parseprogress.finish()
110 parseprogress = None
111 continue
112 if isinstance(event, bb.command.CommandCompleted):
113 result = True
114 break
115 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
116 self.logger.error(str(event))
117 result = False
118 break
119 if isinstance(event, logging.LogRecord):
120 if event.taskpid == 0 or event.levelno > logging.INFO:
121 self.logger.handle(event)
122 continue
123 if isinstance(event, bb.event.NoProvider):
124 self.logger.error(str(event))
125 result = False
126 break
127 elif helper.shutdown > 1:
128 break
129 termfilter.updateFooter()
130 if time.time() > (lastevent + (3*60)):
131 if not self.run_command('ping', handle_events=False):
132 print("\nUnable to ping server and no events, closing down...\n")
133 return False
134 except KeyboardInterrupt:
135 termfilter.clearFooter()
136 if helper.shutdown == 1:
137 print("\nSecond Keyboard Interrupt, stopping...\n")
138 ret = self.run_command("stateForceShutdown")
139 if ret and ret[2]:
140 self.logger.error("Unable to cleanly stop: %s" % ret[2])
141 elif helper.shutdown == 0:
142 print("\nKeyboard Interrupt, closing down...\n")
143 interrupted = True
144 ret = self.run_command("stateShutdown")
145 if ret and ret[2]:
146 self.logger.error("Unable to cleanly shutdown: %s" % ret[2])
147 helper.shutdown = helper.shutdown + 1
148 termfilter.clearFooter()
149 finally:
150 termfilter.finish()
151 if helper.failed_tasks:
152 result = False
153 return result
154 else:
155 return ret
156
157 return wrapper
158
29 159
30# We need this in order to shut down the connection to the bitbake server, 160# We need this in order to shut down the connection to the bitbake server,
31# otherwise the process will never properly exit 161# otherwise the process will never properly exit
@@ -188,11 +318,19 @@ class TinfoilCookerAdapter:
188 self._cache[name] = attrvalue 318 self._cache[name] = attrvalue
189 return attrvalue 319 return attrvalue
190 320
321 class TinfoilSkiplistByMcAdapter:
322 def __init__(self, tinfoil):
323 self.tinfoil = tinfoil
324
325 def __getitem__(self, mc):
326 return self.tinfoil.get_skipped_recipes(mc)
327
191 def __init__(self, tinfoil): 328 def __init__(self, tinfoil):
192 self.tinfoil = tinfoil 329 self.tinfoil = tinfoil
193 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() 330 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split()
194 self.collections = {} 331 self.collections = {}
195 self.recipecaches = {} 332 self.recipecaches = {}
333 self.skiplist_by_mc = self.TinfoilSkiplistByMcAdapter(tinfoil)
196 for mc in self.multiconfigs: 334 for mc in self.multiconfigs:
197 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) 335 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc)
198 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) 336 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc)
@@ -201,8 +339,6 @@ class TinfoilCookerAdapter:
201 # Grab these only when they are requested since they aren't always used 339 # Grab these only when they are requested since they aren't always used
202 if name in self._cache: 340 if name in self._cache:
203 return self._cache[name] 341 return self._cache[name]
204 elif name == 'skiplist':
205 attrvalue = self.tinfoil.get_skipped_recipes()
206 elif name == 'bbfile_config_priorities': 342 elif name == 'bbfile_config_priorities':
207 ret = self.tinfoil.run_command('getLayerPriorities') 343 ret = self.tinfoil.run_command('getLayerPriorities')
208 bbfile_config_priorities = [] 344 bbfile_config_priorities = []
@@ -514,12 +650,12 @@ class Tinfoil:
514 """ 650 """
515 return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) 651 return defaultdict(list, self.run_command('getOverlayedRecipes', mc))
516 652
517 def get_skipped_recipes(self): 653 def get_skipped_recipes(self, mc=''):
518 """ 654 """
519 Find recipes which were skipped (i.e. SkipRecipe was raised 655 Find recipes which were skipped (i.e. SkipRecipe was raised
520 during parsing). 656 during parsing).
521 """ 657 """
522 return OrderedDict(self.run_command('getSkippedRecipes')) 658 return OrderedDict(self.run_command('getSkippedRecipes', mc))
523 659
524 def get_all_providers(self, mc=''): 660 def get_all_providers(self, mc=''):
525 return defaultdict(list, self.run_command('allProviders', mc)) 661 return defaultdict(list, self.run_command('allProviders', mc))
@@ -533,6 +669,7 @@ class Tinfoil:
533 def get_runtime_providers(self, rdep): 669 def get_runtime_providers(self, rdep):
534 return self.run_command('getRuntimeProviders', rdep) 670 return self.run_command('getRuntimeProviders', rdep)
535 671
672 # TODO: teach this method about mc
536 def get_recipe_file(self, pn): 673 def get_recipe_file(self, pn):
537 """ 674 """
538 Get the file name for the specified recipe/target. Raises 675 Get the file name for the specified recipe/target. Raises
@@ -541,6 +678,7 @@ class Tinfoil:
541 """ 678 """
542 best = self.find_best_provider(pn) 679 best = self.find_best_provider(pn)
543 if not best or (len(best) > 3 and not best[3]): 680 if not best or (len(best) > 3 and not best[3]):
681 # TODO: pass down mc
544 skiplist = self.get_skipped_recipes() 682 skiplist = self.get_skipped_recipes()
545 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) 683 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist)
546 skipreasons = taskdata.get_reasons(pn) 684 skipreasons = taskdata.get_reasons(pn)
@@ -633,6 +771,29 @@ class Tinfoil:
633 fn = self.get_recipe_file(pn) 771 fn = self.get_recipe_file(pn)
634 return self.parse_recipe_file(fn) 772 return self.parse_recipe_file(fn)
635 773
774 @contextmanager
775 def _data_tracked_if_enabled(self):
776 """
777 A context manager to enable data tracking for a code segment if data
778 tracking was enabled for this tinfoil instance.
779 """
780 if self.tracking:
781 # Enable history tracking just for the operation
782 self.run_command('enableDataTracking')
783
784 # Here goes the operation with the optional data tracking
785 yield
786
787 if self.tracking:
788 self.run_command('disableDataTracking')
789
790 def finalizeData(self):
791 """
792 Run anonymous functions and expand keys
793 """
794 with self._data_tracked_if_enabled():
795 return self._reconvert_type(self.run_command('finalizeData'), 'DataStoreConnectionHandle')
796
636 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): 797 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
637 """ 798 """
638 Parse the specified recipe file (with or without bbappends) 799 Parse the specified recipe file (with or without bbappends)
@@ -645,10 +806,7 @@ class Tinfoil:
645 appendlist: optional list of bbappend files to apply, if you 806 appendlist: optional list of bbappend files to apply, if you
646 want to filter them 807 want to filter them
647 """ 808 """
648 if self.tracking: 809 with self._data_tracked_if_enabled():
649 # Enable history tracking just for the parse operation
650 self.run_command('enableDataTracking')
651 try:
652 if appends and appendlist == []: 810 if appends and appendlist == []:
653 appends = False 811 appends = False
654 if config_data: 812 if config_data:
@@ -660,9 +818,6 @@ class Tinfoil:
660 return self._reconvert_type(dscon, 'DataStoreConnectionHandle') 818 return self._reconvert_type(dscon, 'DataStoreConnectionHandle')
661 else: 819 else:
662 return None 820 return None
663 finally:
664 if self.tracking:
665 self.run_command('disableDataTracking')
666 821
667 def build_file(self, buildfile, task, internal=True): 822 def build_file(self, buildfile, task, internal=True):
668 """ 823 """
@@ -674,6 +829,10 @@ class Tinfoil:
674 """ 829 """
675 return self.run_command('buildFile', buildfile, task, internal) 830 return self.run_command('buildFile', buildfile, task, internal)
676 831
832 @wait_for
833 def build_file_sync(self, *args):
834 self.build_file(*args)
835
677 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None): 836 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None):
678 """ 837 """
679 Builds the specified targets. This is equivalent to a normal invocation 838 Builds the specified targets. This is equivalent to a normal invocation
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 8b212b7803..4ee45d67a2 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -559,7 +559,10 @@ class ORMWrapper(object):
559 # we might have an invalid link; no way to detect this. just set it to None 559 # we might have an invalid link; no way to detect this. just set it to None
560 filetarget_obj = None 560 filetarget_obj = None
561 561
562 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 562 try:
563 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
564 except Target_File.DoesNotExist:
565 parent_obj = None
563 566
564 Target_File.objects.create( 567 Target_File.objects.create(
565 target = target_obj, 568 target = target_obj,
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index f86999bb09..00258c80ff 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -10,6 +10,7 @@
10 10
11from __future__ import division 11from __future__ import division
12 12
13import io
13import os 14import os
14import sys 15import sys
15import logging 16import logging
@@ -24,6 +25,12 @@ import atexit
24from itertools import groupby 25from itertools import groupby
25 26
26from bb.ui import uihelper 27from bb.ui import uihelper
28import bb.build
29import bb.command
30import bb.cooker
31import bb.event
32import bb.runqueue
33import bb.utils
27 34
28featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING] 35featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
29 36
@@ -103,7 +110,7 @@ def new_progress(msg, maxval):
103 return NonInteractiveProgress(msg, maxval) 110 return NonInteractiveProgress(msg, maxval)
104 111
105def pluralise(singular, plural, qty): 112def pluralise(singular, plural, qty):
106 if(qty == 1): 113 if qty == 1:
107 return singular % qty 114 return singular % qty
108 else: 115 else:
109 return plural % qty 116 return plural % qty
@@ -112,6 +119,7 @@ def pluralise(singular, plural, qty):
112class InteractConsoleLogFilter(logging.Filter): 119class InteractConsoleLogFilter(logging.Filter):
113 def __init__(self, tf): 120 def __init__(self, tf):
114 self.tf = tf 121 self.tf = tf
122 super().__init__()
115 123
116 def filter(self, record): 124 def filter(self, record):
117 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")): 125 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
@@ -120,6 +128,10 @@ class InteractConsoleLogFilter(logging.Filter):
120 return True 128 return True
121 129
122class TerminalFilter(object): 130class TerminalFilter(object):
131
132 # 40 Hz (FPS) -> 0.025 secs
133 _DEFAULT_PRINT_INTERVAL = 0.025
134
123 rows = 25 135 rows = 25
124 columns = 80 136 columns = 80
125 137
@@ -157,10 +169,12 @@ class TerminalFilter(object):
157 self.stdinbackup = None 169 self.stdinbackup = None
158 self.interactive = sys.stdout.isatty() 170 self.interactive = sys.stdout.isatty()
159 self.footer_present = False 171 self.footer_present = False
160 self.lastpids = [] 172 self.lasttime = time.time()
161 self.lasttime = None
162 self.quiet = quiet 173 self.quiet = quiet
163 174
175 self._footer_buf = io.StringIO()
176 self._footer_lines = None
177
164 if not self.interactive: 178 if not self.interactive:
165 return 179 return
166 180
@@ -208,11 +222,11 @@ class TerminalFilter(object):
208 222
209 def clearFooter(self): 223 def clearFooter(self):
210 if self.footer_present: 224 if self.footer_present:
211 lines = self.footer_present 225 sys.stdout.buffer.write(self.curses.tparm(self.cuu, self._footer_lines))
212 sys.stdout.buffer.write(self.curses.tparm(self.cuu, lines))
213 sys.stdout.buffer.write(self.curses.tparm(self.ed)) 226 sys.stdout.buffer.write(self.curses.tparm(self.ed))
214 sys.stdout.flush() 227 sys.stdout.flush()
215 self.footer_present = False 228 self.footer_present = False
229 self._footer_lines = None
216 230
217 def elapsed(self, sec): 231 def elapsed(self, sec):
218 hrs = int(sec / 3600.0) 232 hrs = int(sec / 3600.0)
@@ -228,9 +242,10 @@ class TerminalFilter(object):
228 242
229 def keepAlive(self, t): 243 def keepAlive(self, t):
230 if not self.cuu: 244 if not self.cuu:
231 print("Bitbake still alive (no events for %ds). Active tasks:" % t) 245 msgbuf = ["Bitbake still alive (no events for %ds). Active tasks:" % t]
232 for t in self.helper.running_tasks: 246 for t in self.helper.running_tasks:
233 print(t) 247 msgbuf.append(str(t))
248 print("\n".join(msgbuf))
234 sys.stdout.flush() 249 sys.stdout.flush()
235 250
236 def updateFooter(self): 251 def updateFooter(self):
@@ -238,20 +253,35 @@ class TerminalFilter(object):
238 return 253 return
239 activetasks = self.helper.running_tasks 254 activetasks = self.helper.running_tasks
240 failedtasks = self.helper.failed_tasks 255 failedtasks = self.helper.failed_tasks
241 runningpids = self.helper.running_pids
242 currenttime = time.time() 256 currenttime = time.time()
243 if not self.lasttime or (currenttime - self.lasttime > 5): 257 deltatime = currenttime - self.lasttime
258
259 if (deltatime > 5.0):
244 self.helper.needUpdate = True 260 self.helper.needUpdate = True
245 self.lasttime = currenttime 261 need_update = self.helper.needUpdate
246 if self.footer_present and not self.helper.needUpdate: 262 else:
263 # Do not let to update faster then _DEFAULT_PRINT_INTERVAL
264 # to avoid heavy print() flooding.
265 need_update = self.helper.needUpdate and (deltatime > self._DEFAULT_PRINT_INTERVAL)
266
267 if self.footer_present and (not need_update):
268 # Footer update is not need.
247 return 269 return
270 else:
271 # Footer update is need and store its "lasttime" value.
272 self.lasttime = currenttime
273
248 self.helper.needUpdate = False 274 self.helper.needUpdate = False
249 if self.footer_present:
250 self.clearFooter()
251 if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks): 275 if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks):
276 self.clearFooter()
252 return 277 return
278
279 # Clear footer buffer.
280 self._footer_buf.truncate(0)
281 self._footer_buf.seek(0)
282
253 tasks = [] 283 tasks = []
254 for t in runningpids: 284 for t in activetasks.keys():
255 start_time = activetasks[t].get("starttime", None) 285 start_time = activetasks[t].get("starttime", None)
256 if start_time: 286 if start_time:
257 msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"]) 287 msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"])
@@ -268,6 +298,7 @@ class TerminalFilter(object):
268 else: 298 else:
269 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle) 299 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle)
270 pbar.bouncing = False 300 pbar.bouncing = False
301 pbar.fd = self._footer_buf
271 activetasks[t]["progressbar"] = pbar 302 activetasks[t]["progressbar"] = pbar
272 tasks.append((pbar, msg, progress, rate, start_time)) 303 tasks.append((pbar, msg, progress, rate, start_time))
273 else: 304 else:
@@ -278,7 +309,7 @@ class TerminalFilter(object):
278 "Waiting for %s running tasks to finish", len(activetasks)) 309 "Waiting for %s running tasks to finish", len(activetasks))
279 if not self.quiet: 310 if not self.quiet:
280 content += ':' 311 content += ':'
281 print(content) 312 print(content, file=self._footer_buf)
282 else: 313 else:
283 scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total) 314 scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total)
284 cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 315 cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
@@ -287,7 +318,7 @@ class TerminalFilter(object):
287 if not self.quiet: 318 if not self.quiet:
288 msg = "Setscene tasks: %s" % scene_tasks 319 msg = "Setscene tasks: %s" % scene_tasks
289 content += msg + "\n" 320 content += msg + "\n"
290 print(msg) 321 print(msg, file=self._footer_buf)
291 322
292 if self.quiet: 323 if self.quiet:
293 msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks) 324 msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks)
@@ -299,11 +330,12 @@ class TerminalFilter(object):
299 if not self.main_progress or self.main_progress.maxval != maxtask: 330 if not self.main_progress or self.main_progress.maxval != maxtask:
300 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] 331 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
301 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle) 332 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
333 self.main_progress.fd = self._footer_buf
302 self.main_progress.start(False) 334 self.main_progress.start(False)
303 self.main_progress.setmessage(msg) 335 self.main_progress.setmessage(msg)
304 progress = max(0, self.helper.tasknumber_current - 1) 336 progress = max(0, self.helper.tasknumber_current - 1)
305 content += self.main_progress.update(progress) 337 content += self.main_progress.update(progress)
306 print('') 338 print('', file=self._footer_buf)
307 lines = self.getlines(content) 339 lines = self.getlines(content)
308 if not self.quiet: 340 if not self.quiet:
309 for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]): 341 for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]):
@@ -319,15 +351,19 @@ class TerminalFilter(object):
319 content = pbar.update(progress) 351 content = pbar.update(progress)
320 else: 352 else:
321 content = pbar.update(1) 353 content = pbar.update(1)
322 print('') 354 print('', file=self._footer_buf)
323 else: 355 else:
324 content = "%s: %s" % (tasknum, task) 356 content = "%s: %s" % (tasknum, task)
325 print(content) 357 print(content, file=self._footer_buf)
326 lines = lines + self.getlines(content) 358 lines = lines + self.getlines(content)
327 self.footer_present = lines
328 self.lastpids = runningpids[:]
329 self.lastcount = self.helper.tasknumber_current 359 self.lastcount = self.helper.tasknumber_current
330 360
361 # Clear footer and Print buffer.
362 self.clearFooter()
363 print(self._footer_buf.getvalue(), end='')
364 self._footer_lines = lines
365 self.footer_present = True
366
331 def getlines(self, content): 367 def getlines(self, content):
332 lines = 0 368 lines = 0
333 for line in content.split("\n"): 369 for line in content.split("\n"):
@@ -335,18 +371,18 @@ class TerminalFilter(object):
335 return lines 371 return lines
336 372
337 def finish(self): 373 def finish(self):
374 self._footer_buf.close()
338 if self.stdinbackup: 375 if self.stdinbackup:
339 fd = sys.stdin.fileno() 376 fd = sys.stdin.fileno()
340 self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup) 377 self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
341 378
342def print_event_log(event, includelogs, loglines, termfilter): 379def print_event_log(event, includelogs, loglines, termfilter):
343 # FIXME refactor this out further
344 logfile = event.logfile 380 logfile = event.logfile
345 if logfile and os.path.exists(logfile): 381 if logfile and os.path.exists(logfile):
346 termfilter.clearFooter() 382 termfilter.clearFooter()
347 bb.error("Logfile of failure stored in: %s" % logfile) 383 bb.error("Logfile of failure stored in: %s" % logfile)
348 if includelogs and not event.errprinted: 384 if includelogs and not event.errprinted:
349 print("Log data follows:") 385 msgbuf = ["Log data follows:"]
350 f = open(logfile, "r") 386 f = open(logfile, "r")
351 lines = [] 387 lines = []
352 while True: 388 while True:
@@ -359,11 +395,11 @@ def print_event_log(event, includelogs, loglines, termfilter):
359 if len(lines) > int(loglines): 395 if len(lines) > int(loglines):
360 lines.pop(0) 396 lines.pop(0)
361 else: 397 else:
362 print('| %s' % l) 398 msgbuf.append('| %s' % l)
363 f.close() 399 f.close()
364 if lines: 400 if lines:
365 for line in lines: 401 msgbuf.extend(lines)
366 print(line) 402 print("\n".join(msgbuf))
367 403
368def _log_settings_from_server(server, observe_only): 404def _log_settings_from_server(server, observe_only):
369 # Get values of variables which control our output 405 # Get values of variables which control our output
@@ -555,13 +591,23 @@ def main(server, eventHandler, params, tf = TerminalFilter):
555 } 591 }
556 }) 592 })
557 593
558 bb.utils.mkdirhier(os.path.dirname(consolelogfile)) 594 consolelogdirname = os.path.dirname(consolelogfile)
559 loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log') 595 # `bb.utils.mkdirhier` has this check, but it reports failure using bb.fatal, which logs
596 # to the very logger we are trying to set up.
597 if '${' in str(consolelogdirname):
598 print(
599 "FATAL: Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR pollution.".format(
600 consolelogdirname))
601 if '${MACHINE}' in consolelogdirname:
602 print("HINT: It looks like you forgot to set MACHINE in local.conf.")
603
604 bb.utils.mkdirhier(consolelogdirname)
605 loglink = os.path.join(consolelogdirname, 'console-latest.log')
560 bb.utils.remove(loglink) 606 bb.utils.remove(loglink)
561 try: 607 try:
562 os.symlink(os.path.basename(consolelogfile), loglink) 608 os.symlink(os.path.basename(consolelogfile), loglink)
563 except OSError: 609 except OSError:
564 pass 610 pass
565 611
566 # Add the logging domains specified by the user on the command line 612 # Add the logging domains specified by the user on the command line
567 for (domainarg, iterator) in groupby(params.debug_domains): 613 for (domainarg, iterator) in groupby(params.debug_domains):
@@ -577,6 +623,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
577 else: 623 else:
578 log_exec_tty = False 624 log_exec_tty = False
579 625
626 should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == ''
627
580 helper = uihelper.BBUIHelper() 628 helper = uihelper.BBUIHelper()
581 629
582 # Look for the specially designated handlers which need to be passed to the 630 # Look for the specially designated handlers which need to be passed to the
@@ -640,7 +688,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
640 return_value = 0 688 return_value = 0
641 errors = 0 689 errors = 0
642 warnings = 0 690 warnings = 0
643 taskfailures = [] 691 taskfailures = {}
644 692
645 printintervaldelta = 10 * 60 # 10 minutes 693 printintervaldelta = 10 * 60 # 10 minutes
646 printinterval = printintervaldelta 694 printinterval = printintervaldelta
@@ -726,6 +774,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
726 if isinstance(event, bb.build.TaskFailed): 774 if isinstance(event, bb.build.TaskFailed):
727 return_value = 1 775 return_value = 1
728 print_event_log(event, includelogs, loglines, termfilter) 776 print_event_log(event, includelogs, loglines, termfilter)
777 k = "{}:{}".format(event._fn, event._task)
778 taskfailures[k] = event.logfile
729 if isinstance(event, bb.build.TaskBase): 779 if isinstance(event, bb.build.TaskBase):
730 logger.info(event._message) 780 logger.info(event._message)
731 continue 781 continue
@@ -821,7 +871,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
821 871
822 if isinstance(event, bb.runqueue.runQueueTaskFailed): 872 if isinstance(event, bb.runqueue.runQueueTaskFailed):
823 return_value = 1 873 return_value = 1
824 taskfailures.append(event.taskstring) 874 taskfailures.setdefault(event.taskstring)
825 logger.error(str(event)) 875 logger.error(str(event))
826 continue 876 continue
827 877
@@ -942,11 +992,21 @@ def main(server, eventHandler, params, tf = TerminalFilter):
942 try: 992 try:
943 termfilter.clearFooter() 993 termfilter.clearFooter()
944 summary = "" 994 summary = ""
995 def format_hyperlink(url, link_text):
996 if should_print_hyperlinks:
997 start = f'\033]8;;{url}\033\\'
998 end = '\033]8;;\033\\'
999 return f'{start}{link_text}{end}'
1000 return link_text
1001
945 if taskfailures: 1002 if taskfailures:
946 summary += pluralise("\nSummary: %s task failed:", 1003 summary += pluralise("\nSummary: %s task failed:",
947 "\nSummary: %s tasks failed:", len(taskfailures)) 1004 "\nSummary: %s tasks failed:", len(taskfailures))
948 for failure in taskfailures: 1005 for (failure, log_file) in taskfailures.items():
949 summary += "\n %s" % failure 1006 summary += "\n %s" % failure
1007 if log_file:
1008 hyperlink = format_hyperlink(f"file://{log_file}", log_file)
1009 summary += "\n log: {}".format(hyperlink)
950 if warnings: 1010 if warnings:
951 summary += pluralise("\nSummary: There was %s WARNING message.", 1011 summary += pluralise("\nSummary: There was %s WARNING message.",
952 "\nSummary: There were %s WARNING messages.", warnings) 1012 "\nSummary: There were %s WARNING messages.", warnings)
diff --git a/bitbake/lib/bb/ui/teamcity.py b/bitbake/lib/bb/ui/teamcity.py
index fca46c2874..7eeaab8d63 100644
--- a/bitbake/lib/bb/ui/teamcity.py
+++ b/bitbake/lib/bb/ui/teamcity.py
@@ -30,7 +30,6 @@ import bb.build
30import bb.command 30import bb.command
31import bb.cooker 31import bb.cooker
32import bb.event 32import bb.event
33import bb.exceptions
34import bb.runqueue 33import bb.runqueue
35from bb.ui import uihelper 34from bb.ui import uihelper
36 35
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter):
102 details = "" 101 details = ""
103 if hasattr(record, 'bb_exc_formatted'): 102 if hasattr(record, 'bb_exc_formatted'):
104 details = ''.join(record.bb_exc_formatted) 103 details = ''.join(record.bb_exc_formatted)
105 elif hasattr(record, 'bb_exc_info'):
106 etype, value, tb = record.bb_exc_info
107 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
108 details = ''.join(formatted)
109 104
110 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: 105 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]:
111 # ERROR gets a separate errorDetails field 106 # ERROR gets a separate errorDetails field
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
index 82913e0da8..a223632471 100644
--- a/bitbake/lib/bb/ui/uihelper.py
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -13,7 +13,6 @@ class BBUIHelper:
13 self.needUpdate = False 13 self.needUpdate = False
14 self.running_tasks = {} 14 self.running_tasks = {}
15 # Running PIDs preserves the order tasks were executed in 15 # Running PIDs preserves the order tasks were executed in
16 self.running_pids = []
17 self.failed_tasks = [] 16 self.failed_tasks = []
18 self.pidmap = {} 17 self.pidmap = {}
19 self.tasknumber_current = 0 18 self.tasknumber_current = 0
@@ -23,7 +22,6 @@ class BBUIHelper:
23 # PIDs are a bad idea as they can be reused before we process all UI events. 22 # PIDs are a bad idea as they can be reused before we process all UI events.
24 # We maintain a 'fuzzy' match for TaskProgress since there is no other way to match 23 # We maintain a 'fuzzy' match for TaskProgress since there is no other way to match
25 def removetid(pid, tid): 24 def removetid(pid, tid):
26 self.running_pids.remove(tid)
27 del self.running_tasks[tid] 25 del self.running_tasks[tid]
28 if self.pidmap[pid] == tid: 26 if self.pidmap[pid] == tid:
29 del self.pidmap[pid] 27 del self.pidmap[pid]
@@ -31,11 +29,10 @@ class BBUIHelper:
31 29
32 if isinstance(event, bb.build.TaskStarted): 30 if isinstance(event, bb.build.TaskStarted):
33 tid = event._fn + ":" + event._task 31 tid = event._fn + ":" + event._task
34 if event._mc != "default": 32 if event._mc != "":
35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 33 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
36 else: 34 else:
37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 35 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
38 self.running_pids.append(tid)
39 self.pidmap[event.pid] = tid 36 self.pidmap[event.pid] = tid
40 self.needUpdate = True 37 self.needUpdate = True
41 elif isinstance(event, bb.build.TaskSucceeded): 38 elif isinstance(event, bb.build.TaskSucceeded):
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index ebee65d3dd..366836bfc9 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -11,11 +11,8 @@ import re, fcntl, os, string, stat, shutil, time
11import sys 11import sys
12import errno 12import errno
13import logging 13import logging
14import bb
15import bb.msg
16import locale 14import locale
17import multiprocessing 15from bb import multiprocessing
18import fcntl
19import importlib 16import importlib
20import importlib.machinery 17import importlib.machinery
21import importlib.util 18import importlib.util
@@ -24,7 +21,6 @@ import subprocess
24import glob 21import glob
25import fnmatch 22import fnmatch
26import traceback 23import traceback
27import errno
28import signal 24import signal
29import collections 25import collections
30import copy 26import copy
@@ -36,6 +32,9 @@ import tempfile
36from subprocess import getstatusoutput 32from subprocess import getstatusoutput
37from contextlib import contextmanager 33from contextlib import contextmanager
38from ctypes import cdll 34from ctypes import cdll
35import bb
36import bb.msg
37import bb.filter
39 38
40logger = logging.getLogger("BitBake.Util") 39logger = logging.getLogger("BitBake.Util")
41python_extensions = importlib.machinery.all_suffixes() 40python_extensions = importlib.machinery.all_suffixes()
@@ -84,7 +83,16 @@ def explode_version(s):
84 return r 83 return r
85 84
86def split_version(s): 85def split_version(s):
87 """Split a version string into its constituent parts (PE, PV, PR)""" 86 """Split a version string into its constituent parts (PE, PV, PR).
87
88 Arguments:
89
90 - ``s``: version string. The format of the input string should be::
91
92 ${PE}:${PV}-${PR}
93
94 Returns a tuple ``(pe, pv, pr)``.
95 """
88 s = s.strip(" <>=") 96 s = s.strip(" <>=")
89 e = 0 97 e = 0
90 if s.count(':'): 98 if s.count(':'):
@@ -136,16 +144,30 @@ def vercmp(ta, tb):
136 return r 144 return r
137 145
138def vercmp_string(a, b): 146def vercmp_string(a, b):
139 """ Split version strings and compare them """ 147 """ Split version strings using ``bb.utils.split_version()`` and compare
148 them with ``bb.utils.vercmp().``
149
150 Arguments:
151
152 - ``a``: left version string operand.
153 - ``b``: right version string operand.
154
155 Returns what ``bb.utils.vercmp()`` returns."""
140 ta = split_version(a) 156 ta = split_version(a)
141 tb = split_version(b) 157 tb = split_version(b)
142 return vercmp(ta, tb) 158 return vercmp(ta, tb)
143 159
144def vercmp_string_op(a, b, op): 160def vercmp_string_op(a, b, op):
145 """ 161 """
146 Compare two versions and check if the specified comparison operator matches the result of the comparison. 162 Takes the return value ``bb.utils.vercmp()`` and returns the operation
147 This function is fairly liberal about what operators it will accept since there are a variety of styles 163 defined by ``op`` between the return value and 0.
148 depending on the context. 164
165 Arguments:
166
167 - ``a``: left version string operand.
168 - ``b``: right version string operand.
169 - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``,
170 ``>``, ``>>``, ``<``, ``<<`` or ``!=``.
149 """ 171 """
150 res = vercmp_string(a, b) 172 res = vercmp_string(a, b)
151 if op in ('=', '=='): 173 if op in ('=', '=='):
@@ -163,11 +185,19 @@ def vercmp_string_op(a, b, op):
163 else: 185 else:
164 raise VersionStringException('Unsupported comparison operator "%s"' % op) 186 raise VersionStringException('Unsupported comparison operator "%s"' % op)
165 187
188@bb.filter.filter_proc(name="bb.utils.explode_deps")
166def explode_deps(s): 189def explode_deps(s):
167 """ 190 """
168 Take an RDEPENDS style string of format: 191 Takes an RDEPENDS style string of format::
169 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 192
170 and return a list of dependencies. 193 DEPEND1 (optional version) DEPEND2 (optional version) ...
194
195 Arguments:
196
197 - ``s``: input RDEPENDS style string
198
199 Returns a list of dependencies.
200
171 Version information is ignored. 201 Version information is ignored.
172 """ 202 """
173 r = [] 203 r = []
@@ -189,9 +219,17 @@ def explode_deps(s):
189 219
190def explode_dep_versions2(s, *, sort=True): 220def explode_dep_versions2(s, *, sort=True):
191 """ 221 """
192 Take an RDEPENDS style string of format: 222 Takes an RDEPENDS style string of format::
193 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 223
194 and return a dictionary of dependencies and versions. 224 DEPEND1 (optional version) DEPEND2 (optional version) ...
225
226 Arguments:
227
228 - ``s``: input RDEPENDS style string
229 - ``*``: *Unused*.
230 - ``sort``: whether to sort the output or not.
231
232 Returns a dictionary of dependencies and versions.
195 """ 233 """
196 r = collections.OrderedDict() 234 r = collections.OrderedDict()
197 l = s.replace(",", "").split() 235 l = s.replace(",", "").split()
@@ -256,10 +294,17 @@ def explode_dep_versions2(s, *, sort=True):
256 294
257def explode_dep_versions(s): 295def explode_dep_versions(s):
258 """ 296 """
259 Take an RDEPENDS style string of format: 297 Take an RDEPENDS style string of format::
260 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 298
261 skip null value and items appeared in dependency string multiple times 299 DEPEND1 (optional version) DEPEND2 (optional version) ...
262 and return a dictionary of dependencies and versions. 300
301 Skips null values and items appeared in dependency string multiple times.
302
303 Arguments:
304
305 - ``s``: input RDEPENDS style string
306
307 Returns a dictionary of dependencies and versions.
263 """ 308 """
264 r = explode_dep_versions2(s) 309 r = explode_dep_versions2(s)
265 for d in r: 310 for d in r:
@@ -273,7 +318,17 @@ def explode_dep_versions(s):
273 318
274def join_deps(deps, commasep=True): 319def join_deps(deps, commasep=True):
275 """ 320 """
276 Take the result from explode_dep_versions and generate a dependency string 321 Take a result from ``bb.utils.explode_dep_versions()`` and generate a
322 dependency string.
323
324 Arguments:
325
326 - ``deps``: dictionary of dependencies and versions.
327 - ``commasep``: makes the return value separated by commas if ``True``,
328 separated by spaces otherwise.
329
330 Returns a comma-separated (space-separated if ``comma-sep`` is ``False``)
331 string of dependencies and versions.
277 """ 332 """
278 result = [] 333 result = []
279 for dep in deps: 334 for dep in deps:
@@ -435,7 +490,11 @@ def better_eval(source, locals, extraglobals = None):
435 490
436@contextmanager 491@contextmanager
437def fileslocked(files, *args, **kwargs): 492def fileslocked(files, *args, **kwargs):
438 """Context manager for locking and unlocking file locks.""" 493 """Context manager for locking and unlocking file locks. Uses
494 ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock
495 files.
496
497 No return value."""
439 locks = [] 498 locks = []
440 if files: 499 if files:
441 for lockfile in files: 500 for lockfile in files:
@@ -446,19 +505,29 @@ def fileslocked(files, *args, **kwargs):
446 try: 505 try:
447 yield 506 yield
448 finally: 507 finally:
508 locks.reverse()
449 for lock in locks: 509 for lock in locks:
450 bb.utils.unlockfile(lock) 510 bb.utils.unlockfile(lock)
451 511
452def lockfile(name, shared=False, retry=True, block=False): 512def lockfile(name, shared=False, retry=True, block=False):
453 """ 513 """
454 Use the specified file as a lock file, return when the lock has 514 Use the specified file (with filename ``name``) as a lock file, return when
455 been acquired. Returns a variable to pass to unlockfile(). 515 the lock has been acquired. Returns a variable to pass to unlockfile().
456 Parameters: 516
457 retry: True to re-try locking if it fails, False otherwise 517 Arguments:
458 block: True to block until the lock succeeds, False otherwise 518
519 - ``shared``: sets the lock as a shared lock instead of an
520 exclusive lock.
521 - ``retry``: ``True`` to re-try locking if it fails, ``False``
522 otherwise.
523 - ``block``: ``True`` to block until the lock succeeds,
524 ``False`` otherwise.
525
459 The retry and block parameters are kind of equivalent unless you 526 The retry and block parameters are kind of equivalent unless you
460 consider the possibility of sending a signal to the process to break 527 consider the possibility of sending a signal to the process to break
461 out - at which point you want block=True rather than retry=True. 528 out - at which point you want block=True rather than retry=True.
529
530 Returns the locked file descriptor in case of success, ``None`` otherwise.
462 """ 531 """
463 basename = os.path.basename(name) 532 basename = os.path.basename(name)
464 if len(basename) > 255: 533 if len(basename) > 255:
@@ -517,7 +586,13 @@ def lockfile(name, shared=False, retry=True, block=False):
517 586
518def unlockfile(lf): 587def unlockfile(lf):
519 """ 588 """
520 Unlock a file locked using lockfile() 589 Unlock a file locked using ``bb.utils.lockfile()``.
590
591 Arguments:
592
593 - ``lf``: the locked file descriptor.
594
595 No return value.
521 """ 596 """
522 try: 597 try:
523 # If we had a shared lock, we need to promote to exclusive before 598 # If we had a shared lock, we need to promote to exclusive before
@@ -545,7 +620,11 @@ def _hasher(method, filename):
545 620
546def md5_file(filename): 621def md5_file(filename):
547 """ 622 """
548 Return the hex string representation of the MD5 checksum of filename. 623 Arguments:
624
625 - ``filename``: path to the input file.
626
627 Returns the hexadecimal string representation of the MD5 checksum of filename.
549 """ 628 """
550 import hashlib 629 import hashlib
551 try: 630 try:
@@ -557,36 +636,81 @@ def md5_file(filename):
557 636
558def sha256_file(filename): 637def sha256_file(filename):
559 """ 638 """
560 Return the hex string representation of the 256-bit SHA checksum of 639 Returns the hexadecimal representation of the 256-bit SHA checksum of
561 filename. 640 filename.
641
642 Arguments:
643
644 - ``filename``: path to the file.
562 """ 645 """
563 import hashlib 646 import hashlib
564 return _hasher(hashlib.sha256(), filename) 647 return _hasher(hashlib.sha256(), filename)
565 648
566def sha1_file(filename): 649def sha1_file(filename):
567 """ 650 """
568 Return the hex string representation of the SHA1 checksum of the filename 651 Returns the hexadecimal representation of the SHA1 checksum of the filename
652
653 Arguments:
654
655 - ``filename``: path to the file.
569 """ 656 """
570 import hashlib 657 import hashlib
571 return _hasher(hashlib.sha1(), filename) 658 return _hasher(hashlib.sha1(), filename)
572 659
573def sha384_file(filename): 660def sha384_file(filename):
574 """ 661 """
575 Return the hex string representation of the SHA384 checksum of the filename 662 Returns the hexadecimal representation of the SHA384 checksum of the filename
663
664 Arguments:
665
666 - ``filename``: path to the file.
576 """ 667 """
577 import hashlib 668 import hashlib
578 return _hasher(hashlib.sha384(), filename) 669 return _hasher(hashlib.sha384(), filename)
579 670
580def sha512_file(filename): 671def sha512_file(filename):
581 """ 672 """
582 Return the hex string representation of the SHA512 checksum of the filename 673 Returns the hexadecimal representation of the SHA512 checksum of the filename
674
675 Arguments:
676
677 - ``filename``: path to the file.
583 """ 678 """
584 import hashlib 679 import hashlib
585 return _hasher(hashlib.sha512(), filename) 680 return _hasher(hashlib.sha512(), filename)
586 681
682def goh1_file(filename):
683 """
684 Returns the hexadecimal string representation of the Go mod h1 checksum of the
685 filename. The Go mod h1 checksum uses the Go dirhash package. The package
686 defines hashes over directory trees and is used by go mod for mod files and
687 zip archives.
688
689 Arguments:
690
691 - ``filename``: path to the file.
692 """
693 import hashlib
694 import zipfile
695
696 lines = []
697 if zipfile.is_zipfile(filename):
698 with zipfile.ZipFile(filename) as archive:
699 for fn in sorted(archive.namelist()):
700 method = hashlib.sha256()
701 method.update(archive.read(fn))
702 hash = method.hexdigest()
703 lines.append("%s %s\n" % (hash, fn))
704 else:
705 hash = _hasher(hashlib.sha256(), filename)
706 lines.append("%s go.mod\n" % hash)
707 method = hashlib.sha256()
708 method.update("".join(lines).encode('utf-8'))
709 return method.hexdigest()
710
587def preserved_envvars_exported(): 711def preserved_envvars_exported():
588 """Variables which are taken from the environment and placed in and exported 712 """Returns the list of variables which are taken from the environment and
589 from the metadata""" 713 placed in and exported from the metadata."""
590 return [ 714 return [
591 'BB_TASKHASH', 715 'BB_TASKHASH',
592 'HOME', 716 'HOME',
@@ -600,7 +724,8 @@ def preserved_envvars_exported():
600 ] 724 ]
601 725
602def preserved_envvars(): 726def preserved_envvars():
603 """Variables which are taken from the environment and placed in the metadata""" 727 """Returns the list of variables which are taken from the environment and
728 placed in the metadata."""
604 v = [ 729 v = [
605 'BBPATH', 730 'BBPATH',
606 'BB_PRESERVE_ENV', 731 'BB_PRESERVE_ENV',
@@ -609,7 +734,9 @@ def preserved_envvars():
609 return v + preserved_envvars_exported() 734 return v + preserved_envvars_exported()
610 735
611def check_system_locale(): 736def check_system_locale():
612 """Make sure the required system locale are available and configured""" 737 """Make sure the required system locale are available and configured.
738
739 No return value."""
613 default_locale = locale.getlocale(locale.LC_CTYPE) 740 default_locale = locale.getlocale(locale.LC_CTYPE)
614 741
615 try: 742 try:
@@ -627,6 +754,12 @@ def filter_environment(good_vars):
627 """ 754 """
628 Create a pristine environment for bitbake. This will remove variables that 755 Create a pristine environment for bitbake. This will remove variables that
629 are not known and may influence the build in a negative way. 756 are not known and may influence the build in a negative way.
757
758 Arguments:
759
760 - ``good_vars``: list of variable to exclude from the filtering.
761
762 No return value.
630 """ 763 """
631 764
632 removed_vars = {} 765 removed_vars = {}
@@ -671,6 +804,8 @@ def clean_environment():
671 """ 804 """
672 Clean up any spurious environment variables. This will remove any 805 Clean up any spurious environment variables. This will remove any
673 variables the user hasn't chosen to preserve. 806 variables the user hasn't chosen to preserve.
807
808 No return value.
674 """ 809 """
675 if 'BB_PRESERVE_ENV' not in os.environ: 810 if 'BB_PRESERVE_ENV' not in os.environ:
676 good_vars = approved_variables() 811 good_vars = approved_variables()
@@ -681,6 +816,8 @@ def clean_environment():
681def empty_environment(): 816def empty_environment():
682 """ 817 """
683 Remove all variables from the environment. 818 Remove all variables from the environment.
819
820 No return value.
684 """ 821 """
685 for s in list(os.environ.keys()): 822 for s in list(os.environ.keys()):
686 os.unsetenv(s) 823 os.unsetenv(s)
@@ -689,6 +826,12 @@ def empty_environment():
689def build_environment(d): 826def build_environment(d):
690 """ 827 """
691 Build an environment from all exported variables. 828 Build an environment from all exported variables.
829
830 Arguments:
831
832 - ``d``: the data store.
833
834 No return value.
692 """ 835 """
693 import bb.data 836 import bb.data
694 for var in bb.data.keys(d): 837 for var in bb.data.keys(d):
@@ -713,7 +856,17 @@ def _check_unsafe_delete_path(path):
713 return False 856 return False
714 857
715def remove(path, recurse=False, ionice=False): 858def remove(path, recurse=False, ionice=False):
716 """Equivalent to rm -f or rm -rf""" 859 """Equivalent to rm -f or rm -rf.
860
861 Arguments:
862
863 - ``path``: path to file/directory to remove.
864 - ``recurse``: deletes recursively if ``True``.
865 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
866 ionice``.
867
868 No return value.
869 """
717 if not path: 870 if not path:
718 return 871 return
719 if recurse: 872 if recurse:
@@ -734,7 +887,17 @@ def remove(path, recurse=False, ionice=False):
734 raise 887 raise
735 888
736def prunedir(topdir, ionice=False): 889def prunedir(topdir, ionice=False):
737 """ Delete everything reachable from the directory named in 'topdir'. """ 890 """
891 Delete everything reachable from the directory named in ``topdir``.
892
893 Arguments:
894
895 - ``topdir``: directory path.
896 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
897 ionice``.
898
899 No return value.
900 """
738 # CAUTION: This is dangerous! 901 # CAUTION: This is dangerous!
739 if _check_unsafe_delete_path(topdir): 902 if _check_unsafe_delete_path(topdir):
740 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) 903 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
@@ -746,8 +909,15 @@ def prunedir(topdir, ionice=False):
746# 909#
747def prune_suffix(var, suffixes, d): 910def prune_suffix(var, suffixes, d):
748 """ 911 """
749 See if var ends with any of the suffixes listed and 912 Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and
750 remove it if found 913 remove it if found.
914
915 Arguments:
916
917 - ``var``: string to check for suffixes.
918 - ``suffixes``: list of strings representing suffixes to check for.
919
920 Returns the string ``var`` without the suffix.
751 """ 921 """
752 for suffix in suffixes: 922 for suffix in suffixes:
753 if suffix and var.endswith(suffix): 923 if suffix and var.endswith(suffix):
@@ -756,7 +926,13 @@ def prune_suffix(var, suffixes, d):
756 926
757def mkdirhier(directory): 927def mkdirhier(directory):
758 """Create a directory like 'mkdir -p', but does not complain if 928 """Create a directory like 'mkdir -p', but does not complain if
759 directory already exists like os.makedirs 929 directory already exists like ``os.makedirs()``.
930
931 Arguments:
932
933 - ``directory``: path to the directory.
934
935 No return value.
760 """ 936 """
761 if '${' in str(directory): 937 if '${' in str(directory):
762 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory)) 938 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
@@ -767,10 +943,24 @@ def mkdirhier(directory):
767 raise e 943 raise e
768 944
769def movefile(src, dest, newmtime = None, sstat = None): 945def movefile(src, dest, newmtime = None, sstat = None):
770 """Moves a file from src to dest, preserving all permissions and 946 """Moves a file from ``src`` to ``dest``, preserving all permissions and
771 attributes; mtime will be preserved even when moving across 947 attributes; mtime will be preserved even when moving across
772 filesystems. Returns true on success and false on failure. Move is 948 filesystems. Returns ``True`` on success and ``False`` on failure. Move is
773 atomic. 949 atomic.
950
951 Arguments:
952
953 - ``src`` -- Source file.
954 - ``dest`` -- Destination file.
955 - ``newmtime`` -- new mtime to be passed as float seconds since the epoch.
956 - ``sstat`` -- os.stat_result to use for the destination file.
957
958 Returns an ``os.stat_result`` of the destination file if the
959 source file is a symbolic link or the ``sstat`` argument represents a
960 symbolic link - in which case the destination file will also be created as
961 a symbolic link.
962
963 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
774 """ 964 """
775 965
776 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 966 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
@@ -861,9 +1051,24 @@ def movefile(src, dest, newmtime = None, sstat = None):
861 1051
862def copyfile(src, dest, newmtime = None, sstat = None): 1052def copyfile(src, dest, newmtime = None, sstat = None):
863 """ 1053 """
864 Copies a file from src to dest, preserving all permissions and 1054 Copies a file from ``src`` to ``dest``, preserving all permissions and
865 attributes; mtime will be preserved even when moving across 1055 attributes; mtime will be preserved even when moving across
866 filesystems. Returns true on success and false on failure. 1056 filesystems.
1057
1058 Arguments:
1059
1060 - ``src``: Source file.
1061 - ``dest``: Destination file.
1062 - ``newmtime``: new mtime to be passed as float seconds since the epoch.
1063 - ``sstat``: os.stat_result to use for the destination file.
1064
1065 Returns an ``os.stat_result`` of the destination file if the
1066 source file is a symbolic link or the ``sstat`` argument represents a
1067 symbolic link - in which case the destination file will also be created as
1068 a symbolic link.
1069
1070 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
1071
867 """ 1072 """
868 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 1073 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
869 try: 1074 try:
@@ -941,10 +1146,16 @@ def copyfile(src, dest, newmtime = None, sstat = None):
941 1146
942def break_hardlinks(src, sstat = None): 1147def break_hardlinks(src, sstat = None):
943 """ 1148 """
944 Ensures src is the only hardlink to this file. Other hardlinks, 1149 Ensures ``src`` is the only hardlink to this file. Other hardlinks,
945 if any, are not affected (other than in their st_nlink value, of 1150 if any, are not affected (other than in their st_nlink value, of
946 course). Returns true on success and false on failure. 1151 course).
1152
1153 Arguments:
947 1154
1155 - ``src``: source file path.
1156 - ``sstat``: os.stat_result to use when checking if the file is a link.
1157
1158 Returns ``True`` on success and ``False`` on failure.
948 """ 1159 """
949 try: 1160 try:
950 if not sstat: 1161 if not sstat:
@@ -958,11 +1169,24 @@ def break_hardlinks(src, sstat = None):
958 1169
959def which(path, item, direction = 0, history = False, executable=False): 1170def which(path, item, direction = 0, history = False, executable=False):
960 """ 1171 """
961 Locate `item` in the list of paths `path` (colon separated string like $PATH). 1172 Locate ``item`` in the list of paths ``path`` (colon separated string like
962 If `direction` is non-zero then the list is reversed. 1173 ``$PATH``).
963 If `history` is True then the list of candidates also returned as result,history. 1174
964 If `executable` is True then the candidate has to be an executable file, 1175 Arguments:
965 otherwise the candidate simply has to exist. 1176
1177 - ``path``: list of colon-separated paths.
1178 - ``item``: string to search for.
1179 - ``direction``: if non-zero then the list is reversed.
1180 - ``history``: if ``True`` then the list of candidates also returned as
1181 ``result,history`` where ``history`` is the list of previous path
1182 checked.
1183 - ``executable``: if ``True`` then the candidate defined by ``path`` has
1184 to be an executable file, otherwise if ``False`` the candidate simply
1185 has to exist.
1186
1187 Returns the item if found in the list of path, otherwise an empty string.
1188 If ``history`` is ``True``, return the list of previous path checked in a
1189 tuple with the found (or not found) item as ``(item, history)``.
966 """ 1190 """
967 1191
968 if executable: 1192 if executable:
@@ -989,10 +1213,29 @@ def which(path, item, direction = 0, history = False, executable=False):
989 return "", hist 1213 return "", hist
990 return "" 1214 return ""
991 1215
1216def to_filemode(input):
1217 """
1218 Take a bitbake variable contents defining a file mode and return
1219 the proper python representation of the number
1220
1221 Arguments:
1222
1223 - ``input``: a string or number to convert, e.g. a bitbake variable
1224 string, assumed to be an octal representation
1225
1226 Returns the python file mode as a number
1227 """
1228 # umask might come in as a number or text string..
1229 if type(input) is int:
1230 return input
1231 return int(input, 8)
1232
992@contextmanager 1233@contextmanager
993def umask(new_mask): 1234def umask(new_mask):
994 """ 1235 """
995 Context manager to set the umask to a specific mask, and restore it afterwards. 1236 Context manager to set the umask to a specific mask, and restore it afterwards.
1237
1238 No return value.
996 """ 1239 """
997 current_mask = os.umask(new_mask) 1240 current_mask = os.umask(new_mask)
998 try: 1241 try:
@@ -1003,7 +1246,17 @@ def umask(new_mask):
1003def to_boolean(string, default=None): 1246def to_boolean(string, default=None):
1004 """ 1247 """
1005 Check input string and return boolean value True/False/None 1248 Check input string and return boolean value True/False/None
1006 depending upon the checks 1249 depending upon the checks.
1250
1251 Arguments:
1252
1253 - ``string``: input string.
1254 - ``default``: default return value if the input ``string`` is ``None``,
1255 ``0``, ``False`` or an empty string.
1256
1257 Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False``
1258 if the string is one of "n", "no", "0", or "false". Return ``default`` if
1259 the input ``string`` is ``None``, ``0``, ``False`` or an empty string.
1007 """ 1260 """
1008 if not string: 1261 if not string:
1009 return default 1262 return default
@@ -1024,18 +1277,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
1024 1277
1025 Arguments: 1278 Arguments:
1026 1279
1027 variable -- the variable name. This will be fetched and expanded (using 1280 - ``variable``: the variable name. This will be fetched and expanded (using
1028 d.getVar(variable)) and then split into a set(). 1281 d.getVar(variable)) and then split into a set().
1029 1282 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1030 checkvalues -- if this is a string it is split on whitespace into a set(), 1283 otherwise coerced directly into a set().
1031 otherwise coerced directly into a set(). 1284 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1285 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1286 not a subset of variable.
1287 - ``d``: the data store.
1032 1288
1033 truevalue -- the value to return if checkvalues is a subset of variable. 1289 Returns ``True`` if the variable contains the values specified, ``False``
1034 1290 otherwise.
1035 falsevalue -- the value to return if variable is empty or if checkvalues is
1036 not a subset of variable.
1037
1038 d -- the data store.
1039 """ 1291 """
1040 1292
1041 val = d.getVar(variable) 1293 val = d.getVar(variable)
@@ -1055,18 +1307,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1055 1307
1056 Arguments: 1308 Arguments:
1057 1309
1058 variable -- the variable name. This will be fetched and expanded (using 1310 - ``variable``: the variable name. This will be fetched and expanded (using
1059 d.getVar(variable)) and then split into a set(). 1311 d.getVar(variable)) and then split into a set().
1060 1312 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1061 checkvalues -- if this is a string it is split on whitespace into a set(), 1313 otherwise coerced directly into a set().
1062 otherwise coerced directly into a set(). 1314 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1315 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1316 not a subset of variable.
1317 - ``d``: the data store.
1063 1318
1064 truevalue -- the value to return if checkvalues is a subset of variable. 1319 Returns ``True`` if the variable contains any of the values specified,
1065 1320 ``False`` otherwise.
1066 falsevalue -- the value to return if variable is empty or if checkvalues is
1067 not a subset of variable.
1068
1069 d -- the data store.
1070 """ 1321 """
1071 val = d.getVar(variable) 1322 val = d.getVar(variable)
1072 if not val: 1323 if not val:
@@ -1081,17 +1332,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1081 return falsevalue 1332 return falsevalue
1082 1333
1083def filter(variable, checkvalues, d): 1334def filter(variable, checkvalues, d):
1084 """Return all words in the variable that are present in the checkvalues. 1335 """Return all words in the variable that are present in the ``checkvalues``.
1085 1336
1086 Arguments: 1337 Arguments:
1087 1338
1088 variable -- the variable name. This will be fetched and expanded (using 1339 - ``variable``: the variable name. This will be fetched and expanded (using
1089 d.getVar(variable)) and then split into a set(). 1340 d.getVar(variable)) and then split into a set().
1341 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1342 otherwise coerced directly into a set().
1343 - ``d``: the data store.
1090 1344
1091 checkvalues -- if this is a string it is split on whitespace into a set(), 1345 Returns a list of string.
1092 otherwise coerced directly into a set().
1093
1094 d -- the data store.
1095 """ 1346 """
1096 1347
1097 val = d.getVar(variable) 1348 val = d.getVar(variable)
@@ -1107,8 +1358,27 @@ def filter(variable, checkvalues, d):
1107 1358
1108def get_referenced_vars(start_expr, d): 1359def get_referenced_vars(start_expr, d):
1109 """ 1360 """
1110 :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level 1361 Get the names of the variables referenced in a given expression.
1111 are ordered arbitrarily) 1362
1363 Arguments:
1364
1365 - ``start_expr``: the expression where to look for variables references.
1366
1367 For example::
1368
1369 ${VAR_A} string ${VAR_B}
1370
1371 Or::
1372
1373 ${@d.getVar('VAR')}
1374
1375 If a variables makes references to other variables, the latter are also
1376 returned recursively.
1377
1378 - ``d``: the data store.
1379
1380 Returns the names of vars referenced in ``start_expr`` (recursively), in
1381 quasi-BFS order (variables within the same level are ordered arbitrarily).
1112 """ 1382 """
1113 1383
1114 seen = set() 1384 seen = set()
@@ -1150,45 +1420,71 @@ def cpu_count():
1150def nonblockingfd(fd): 1420def nonblockingfd(fd):
1151 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) 1421 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1152 1422
1153def process_profilelog(fn, pout = None): 1423def profile_function(profile, function, output_fn, process=True):
1424 """Common function to profile a code block and optionally process the
1425 output using or processing function.
1426
1427 Arguments:
1428
1429 - ``profile``: a boolean saying whether to enable profiling or not
1430 - ``function``: the function call to profile/run
1431 - ``outputfn``: where to write the profiling data
1432 - ``process``: whether to process the profiling data and write a report
1433
1434 Returns the wrapped function return value
1435 """
1436 if profile:
1437 try:
1438 import cProfile as profile
1439 except:
1440 import profile
1441 prof = profile.Profile()
1442 ret = profile.Profile.runcall(prof, function)
1443 prof.dump_stats(output_fn)
1444 if process:
1445 process_profilelog(output_fn)
1446 serverlog("Raw profiling information saved to %s and processed statistics to %s.report*" % (output_fn, output_fn))
1447 return ret
1448 else:
1449 return function()
1450
1451def process_profilelog(fn, fn_out = None):
1154 # Either call with a list of filenames and set pout or a filename and optionally pout. 1452 # Either call with a list of filenames and set pout or a filename and optionally pout.
1155 if not pout: 1453 import pstats
1156 pout = fn + '.processed'
1157 1454
1158 with open(pout, 'w') as pout: 1455 if not fn_out:
1159 import pstats 1456 fn_out = fn + '.report'
1457
1458 def pstatopen():
1160 if isinstance(fn, list): 1459 if isinstance(fn, list):
1161 p = pstats.Stats(*fn, stream=pout) 1460 return pstats.Stats(*fn, stream=pout)
1162 else: 1461 return pstats.Stats(fn, stream=pout)
1163 p = pstats.Stats(fn, stream=pout) 1462
1463 with open(fn_out + '.time', 'w') as pout:
1464 p = pstatopen()
1164 p.sort_stats('time') 1465 p.sort_stats('time')
1165 p.print_stats() 1466 p.print_stats()
1467
1468 with open(fn_out + '.time-callers', 'w') as pout:
1469 p = pstatopen()
1470 p.sort_stats('time')
1166 p.print_callers() 1471 p.print_callers()
1472
1473 with open(fn_out + '.cumulative', 'w') as pout:
1474 p = pstatopen()
1167 p.sort_stats('cumulative') 1475 p.sort_stats('cumulative')
1168 p.print_stats() 1476 p.print_stats()
1169 1477
1170 pout.flush() 1478 with open(fn_out + '.cumulative-callers', 'w') as pout:
1171 1479 p = pstatopen()
1172# 1480 p.sort_stats('cumulative')
1173# Was present to work around multiprocessing pool bugs in python < 2.7.3 1481 p.print_callers()
1174#
1175def multiprocessingpool(*args, **kwargs):
1176
1177 import multiprocessing.pool
1178 #import multiprocessing.util
1179 #multiprocessing.util.log_to_stderr(10)
1180 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1181 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1182 def wrapper(func):
1183 def wrap(self, timeout=None):
1184 return func(self, timeout=timeout if timeout is not None else 1e100)
1185 return wrap
1186 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1187 1482
1188 return multiprocessing.Pool(*args, **kwargs)
1189 1483
1190def exec_flat_python_func(func, *args, **kwargs): 1484def exec_flat_python_func(func, *args, **kwargs):
1191 """Execute a flat python function (defined with def funcname(args):...)""" 1485 """Execute a flat python function (defined with ``def funcname(args): ...``)
1486
1487 Returns the return value of the function."""
1192 # Prepare a small piece of python code which calls the requested function 1488 # Prepare a small piece of python code which calls the requested function
1193 # To do this we need to prepare two things - a set of variables we can use to pass 1489 # To do this we need to prepare two things - a set of variables we can use to pass
1194 # the values of arguments into the calling function, and the list of arguments for 1490 # the values of arguments into the calling function, and the list of arguments for
@@ -1214,48 +1510,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1214 """Edit lines from a recipe or config file and modify one or more 1510 """Edit lines from a recipe or config file and modify one or more
1215 specified variable values set in the file using a specified callback 1511 specified variable values set in the file using a specified callback
1216 function. Lines are expected to have trailing newlines. 1512 function. Lines are expected to have trailing newlines.
1217 Parameters: 1513
1218 meta_lines: lines from the file; can be a list or an iterable 1514 Arguments:
1219 (e.g. file pointer) 1515
1220 variables: a list of variable names to look for. Functions 1516 - ``meta_lines``: lines from the file; can be a list or an iterable
1221 may also be specified, but must be specified with '()' at 1517 (e.g. file pointer)
1222 the end of the name. Note that the function doesn't have 1518 - ``variables``: a list of variable names to look for. Functions
1223 any intrinsic understanding of :append, :prepend, :remove, 1519 may also be specified, but must be specified with ``()`` at
1224 or overrides, so these are considered as part of the name. 1520 the end of the name. Note that the function doesn't have
1225 These values go into a regular expression, so regular 1521 any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``,
1226 expression syntax is allowed. 1522 or overrides, so these are considered as part of the name.
1227 varfunc: callback function called for every variable matching 1523 These values go into a regular expression, so regular
1228 one of the entries in the variables parameter. The function 1524 expression syntax is allowed.
1229 should take four arguments: 1525 - ``varfunc``: callback function called for every variable matching
1230 varname: name of variable matched 1526 one of the entries in the variables parameter.
1231 origvalue: current value in file 1527
1232 op: the operator (e.g. '+=') 1528 The function should take four arguments:
1233 newlines: list of lines up to this point. You can use 1529
1234 this to prepend lines before this variable setting 1530 - ``varname``: name of variable matched
1235 if you wish. 1531 - ``origvalue``: current value in file
1236 and should return a four-element tuple: 1532 - ``op``: the operator (e.g. ``+=``)
1237 newvalue: new value to substitute in, or None to drop 1533 - ``newlines``: list of lines up to this point. You can use
1238 the variable setting entirely. (If the removal 1534 this to prepend lines before this variable setting
1239 results in two consecutive blank lines, one of the 1535 if you wish.
1240 blank lines will also be dropped). 1536
1241 newop: the operator to use - if you specify None here, 1537 And should return a four-element tuple:
1242 the original operation will be used. 1538
1243 indent: number of spaces to indent multi-line entries, 1539 - ``newvalue``: new value to substitute in, or ``None`` to drop
1244 or -1 to indent up to the level of the assignment 1540 the variable setting entirely. (If the removal
1245 and opening quote, or a string to use as the indent. 1541 results in two consecutive blank lines, one of the
1246 minbreak: True to allow the first element of a 1542 blank lines will also be dropped).
1247 multi-line value to continue on the same line as 1543 - ``newop``: the operator to use - if you specify ``None`` here,
1248 the assignment, False to indent before the first 1544 the original operation will be used.
1249 element. 1545 - ``indent``: number of spaces to indent multi-line entries,
1250 To clarify, if you wish not to change the value, then you 1546 or ``-1`` to indent up to the level of the assignment
1251 would return like this: return origvalue, None, 0, True 1547 and opening quote, or a string to use as the indent.
1252 match_overrides: True to match items with _overrides on the end, 1548 - ``minbreak``: ``True`` to allow the first element of a
1253 False otherwise 1549 multi-line value to continue on the same line as
1550 the assignment, ``False`` to indent before the first
1551 element.
1552
1553 To clarify, if you wish not to change the value, then you
1554 would return like this::
1555
1556 return origvalue, None, 0, True
1557 - ``match_overrides``: True to match items with _overrides on the end,
1558 False otherwise
1559
1254 Returns a tuple: 1560 Returns a tuple:
1255 updated: 1561
1256 True if changes were made, False otherwise. 1562 - ``updated``: ``True`` if changes were made, ``False`` otherwise.
1257 newlines: 1563 - ``newlines``: Lines after processing.
1258 Lines after processing
1259 """ 1564 """
1260 1565
1261 var_res = {} 1566 var_res = {}
@@ -1399,12 +1704,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1399 1704
1400 1705
1401def edit_metadata_file(meta_file, variables, varfunc): 1706def edit_metadata_file(meta_file, variables, varfunc):
1402 """Edit a recipe or config file and modify one or more specified 1707 """Edit a recipe or configuration file and modify one or more specified
1403 variable values set in the file using a specified callback function. 1708 variable values set in the file using a specified callback function.
1404 The file is only written to if the value(s) actually change. 1709 The file is only written to if the value(s) actually change.
1405 This is basically the file version of edit_metadata(), see that 1710 This is basically the file version of ``bb.utils.edit_metadata()``, see that
1406 function's description for parameter/usage information. 1711 function's description for parameter/usage information.
1407 Returns True if the file was written to, False otherwise. 1712
1713 Returns ``True`` if the file was written to, ``False`` otherwise.
1408 """ 1714 """
1409 with open(meta_file, 'r') as f: 1715 with open(meta_file, 'r') as f:
1410 (updated, newlines) = edit_metadata(f, variables, varfunc) 1716 (updated, newlines) = edit_metadata(f, variables, varfunc)
@@ -1415,23 +1721,25 @@ def edit_metadata_file(meta_file, variables, varfunc):
1415 1721
1416 1722
1417def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): 1723def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1418 """Edit bblayers.conf, adding and/or removing layers 1724 """Edit ``bblayers.conf``, adding and/or removing layers.
1419 Parameters: 1725
1420 bblayers_conf: path to bblayers.conf file to edit 1726 Arguments:
1421 add: layer path (or list of layer paths) to add; None or empty 1727
1422 list to add nothing 1728 - ``bblayers_conf``: path to ``bblayers.conf`` file to edit
1423 remove: layer path (or list of layer paths) to remove; None or 1729 - ``add``: layer path (or list of layer paths) to add; ``None`` or empty
1424 empty list to remove nothing 1730 list to add nothing
1425 edit_cb: optional callback function that will be called after 1731 - ``remove``: layer path (or list of layer paths) to remove; ``None`` or
1426 processing adds/removes once per existing entry. 1732 empty list to remove nothing
1733 - ``edit_cb``: optional callback function that will be called
1734 after processing adds/removes once per existing entry.
1735
1427 Returns a tuple: 1736 Returns a tuple:
1428 notadded: list of layers specified to be added but weren't
1429 (because they were already in the list)
1430 notremoved: list of layers that were specified to be removed
1431 but weren't (because they weren't in the list)
1432 """
1433 1737
1434 import fnmatch 1738 - ``notadded``: list of layers specified to be added but weren't
1739 (because they were already in the list)
1740 - ``notremoved``: list of layers that were specified to be removed
1741 but weren't (because they weren't in the list)
1742 """
1435 1743
1436 def remove_trailing_sep(pth): 1744 def remove_trailing_sep(pth):
1437 if pth and pth[-1] == os.sep: 1745 if pth and pth[-1] == os.sep:
@@ -1550,7 +1858,22 @@ def get_collection_res(d):
1550 1858
1551 1859
1552def get_file_layer(filename, d, collection_res={}): 1860def get_file_layer(filename, d, collection_res={}):
1553 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" 1861 """Determine the collection (or layer name, as defined by a layer's
1862 ``layer.conf`` file) containing the specified file.
1863
1864 Arguments:
1865
1866 - ``filename``: the filename to look for.
1867 - ``d``: the data store.
1868 - ``collection_res``: dictionary with the layer names as keys and file
1869 patterns to match as defined with the BBFILE_COLLECTIONS and
1870 BBFILE_PATTERN variables respectively. The return value of
1871 ``bb.utils.get_collection_res()`` is the default if this variable is
1872 not specified.
1873
1874 Returns the layer name containing the file. If multiple layers contain the
1875 file, the last matching layer name from collection_res is returned.
1876 """
1554 if not collection_res: 1877 if not collection_res:
1555 collection_res = get_collection_res(d) 1878 collection_res = get_collection_res(d)
1556 1879
@@ -1588,7 +1911,13 @@ class PrCtlError(Exception):
1588 1911
1589def signal_on_parent_exit(signame): 1912def signal_on_parent_exit(signame):
1590 """ 1913 """
1591 Trigger signame to be sent when the parent process dies 1914 Trigger ``signame`` to be sent when the parent process dies.
1915
1916 Arguments:
1917
1918 - ``signame``: name of the signal. See ``man signal``.
1919
1920 No return value.
1592 """ 1921 """
1593 signum = getattr(signal, signame) 1922 signum = getattr(signal, signame)
1594 # http://linux.die.net/man/2/prctl 1923 # http://linux.die.net/man/2/prctl
@@ -1623,7 +1952,7 @@ def ioprio_set(who, cls, value):
1623 bb.warn("Unable to set IO Prio for arch %s" % _unamearch) 1952 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1624 1953
1625def set_process_name(name): 1954def set_process_name(name):
1626 from ctypes import cdll, byref, create_string_buffer 1955 from ctypes import byref, create_string_buffer
1627 # This is nice to have for debugging, not essential 1956 # This is nice to have for debugging, not essential
1628 try: 1957 try:
1629 libc = cdll.LoadLibrary('libc.so.6') 1958 libc = cdll.LoadLibrary('libc.so.6')
@@ -1675,6 +2004,13 @@ def disable_network(uid=None, gid=None):
1675 Disable networking in the current process if the kernel supports it, else 2004 Disable networking in the current process if the kernel supports it, else
1676 just return after logging to debug. To do this we need to create a new user 2005 just return after logging to debug. To do this we need to create a new user
1677 namespace, then map back to the original uid/gid. 2006 namespace, then map back to the original uid/gid.
2007
2008 Arguments:
2009
2010 - ``uid``: original user id.
2011 - ``gid``: original user group id.
2012
2013 No return value.
1678 """ 2014 """
1679 libc = ctypes.CDLL('libc.so.6') 2015 libc = ctypes.CDLL('libc.so.6')
1680 2016
@@ -1744,9 +2080,14 @@ class LogCatcher(logging.Handler):
1744 2080
1745def is_semver(version): 2081def is_semver(version):
1746 """ 2082 """
1747 Is the version string following the semver semantic? 2083 Arguments:
2084
2085 - ``version``: the version string.
1748 2086
1749 https://semver.org/spec/v2.0.0.html 2087 Returns ``True`` if the version string follow semantic versioning, ``False``
2088 otherwise.
2089
2090 See https://semver.org/spec/v2.0.0.html.
1750 """ 2091 """
1751 regex = re.compile( 2092 regex = re.compile(
1752 r""" 2093 r"""
@@ -1784,6 +2125,8 @@ def rename(src, dst):
1784def environment(**envvars): 2125def environment(**envvars):
1785 """ 2126 """
1786 Context manager to selectively update the environment with the specified mapping. 2127 Context manager to selectively update the environment with the specified mapping.
2128
2129 No return value.
1787 """ 2130 """
1788 backup = dict(os.environ) 2131 backup = dict(os.environ)
1789 try: 2132 try:
@@ -1800,6 +2143,13 @@ def is_local_uid(uid=''):
1800 """ 2143 """
1801 Check whether uid is a local one or not. 2144 Check whether uid is a local one or not.
1802 Can't use pwd module since it gets all UIDs, not local ones only. 2145 Can't use pwd module since it gets all UIDs, not local ones only.
2146
2147 Arguments:
2148
2149 - ``uid``: user id. If not specified the user id is determined from
2150 ``os.getuid()``.
2151
2152 Returns ``True`` is the user id is local, ``False`` otherwise.
1803 """ 2153 """
1804 if not uid: 2154 if not uid:
1805 uid = os.getuid() 2155 uid = os.getuid()
@@ -1814,7 +2164,7 @@ def is_local_uid(uid=''):
1814 2164
1815def mkstemp(suffix=None, prefix=None, dir=None, text=False): 2165def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1816 """ 2166 """
1817 Generates a unique filename, independent of time. 2167 Generates a unique temporary file, independent of time.
1818 2168
1819 mkstemp() in glibc (at least) generates unique file names based on the 2169 mkstemp() in glibc (at least) generates unique file names based on the
1820 current system time. When combined with highly parallel builds, and 2170 current system time. When combined with highly parallel builds, and
@@ -1823,6 +2173,18 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1823 2173
1824 This function adds additional entropy to the file name so that a collision 2174 This function adds additional entropy to the file name so that a collision
1825 is independent of time and thus extremely unlikely. 2175 is independent of time and thus extremely unlikely.
2176
2177 Arguments:
2178
2179 - ``suffix``: filename suffix.
2180 - ``prefix``: filename prefix.
2181 - ``dir``: directory where the file will be created.
2182 - ``text``: if ``True``, the file is opened in text mode.
2183
2184 Returns a tuple containing:
2185
2186 - the file descriptor for the created file
2187 - the name of the file.
1826 """ 2188 """
1827 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20)) 2189 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1828 if prefix: 2190 if prefix:
@@ -1833,12 +2195,20 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1833 2195
1834def path_is_descendant(descendant, ancestor): 2196def path_is_descendant(descendant, ancestor):
1835 """ 2197 """
1836 Returns True if the path `descendant` is a descendant of `ancestor` 2198 Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor``
1837 (including being equivalent to `ancestor` itself). Otherwise returns False. 2199 (including being equivalent to ``ancestor`` itself). Otherwise returns
2200 ``False``.
2201
1838 Correctly accounts for symlinks, bind mounts, etc. by using 2202 Correctly accounts for symlinks, bind mounts, etc. by using
1839 os.path.samestat() to compare paths 2203 ``os.path.samestat()`` to compare paths.
2204
2205 May raise any exception that ``os.stat()`` raises.
1840 2206
1841 May raise any exception that os.stat() raises 2207 Arguments:
2208
2209 - ``descendant``: path to check for being an ancestor.
2210 - ``ancestor``: path to the ancestor ``descendant`` will be checked
2211 against.
1842 """ 2212 """
1843 2213
1844 ancestor_stat = os.stat(ancestor) 2214 ancestor_stat = os.stat(ancestor)
@@ -1854,15 +2224,43 @@ def path_is_descendant(descendant, ancestor):
1854 2224
1855 return False 2225 return False
1856 2226
2227# Recomputing the sets in signal.py is expensive (bitbake -pP idle)
2228# so try and use _signal directly to avoid it
2229valid_signals = signal.valid_signals()
2230try:
2231 import _signal
2232 sigmask = _signal.pthread_sigmask
2233except ImportError:
2234 sigmask = signal.pthread_sigmask
2235
1857# If we don't have a timeout of some kind and a process/thread exits badly (for example 2236# If we don't have a timeout of some kind and a process/thread exits badly (for example
1858# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better 2237# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1859# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. 2238# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
2239# This function can still deadlock python since it can't signal the other threads to exit
2240# (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads
2241# to exit.
1860@contextmanager 2242@contextmanager
1861def lock_timeout(lock): 2243def lock_timeout(lock):
1862 held = lock.acquire(timeout=5*60)
1863 try: 2244 try:
2245 s = sigmask(signal.SIG_BLOCK, valid_signals)
2246 held = lock.acquire(timeout=5*60)
1864 if not held: 2247 if not held:
2248 bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack())
1865 os._exit(1) 2249 os._exit(1)
1866 yield held 2250 yield held
1867 finally: 2251 finally:
1868 lock.release() 2252 lock.release()
2253 sigmask(signal.SIG_SETMASK, s)
2254
2255# A version of lock_timeout without the check that the lock was locked and a shorter timeout
2256@contextmanager
2257def lock_timeout_nocheck(lock):
2258 l = False
2259 try:
2260 s = sigmask(signal.SIG_BLOCK, valid_signals)
2261 l = lock.acquire(timeout=10)
2262 yield l
2263 finally:
2264 if l:
2265 lock.release()
2266 sigmask(signal.SIG_SETMASK, s)